Search in sources :

Example 41 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class IfExpression method getMajorType.

@Override
public MajorType getMajorType() {
    if (outputType != null) {
        return outputType;
    }
    MajorType elseType = elseExpression.getMajorType();
    MajorType ifType = ifCondition.expression.getMajorType();
    if (elseType.getMinorType() == MinorType.UNION) {
        Set<MinorType> subtypes = Sets.newHashSet();
        for (MinorType subtype : elseType.getSubTypeList()) {
            subtypes.add(subtype);
        }
        for (MinorType subtype : ifType.getSubTypeList()) {
            subtypes.add(subtype);
        }
        MajorType.Builder builder = MajorType.newBuilder().setMinorType(MinorType.UNION).setMode(DataMode.OPTIONAL);
        for (MinorType subtype : subtypes) {
            builder.addSubType(subtype);
        }
        return builder.build();
    }
    MajorType.Builder builder = MajorType.newBuilder().setMinorType(ifType.getMinorType());
    builder.setMode(elseType.getMode() == DataMode.OPTIONAL || ifType.getMode() == DataMode.OPTIONAL ? DataMode.OPTIONAL : elseType.getMode());
    builder = Types.calculateTypePrecisionAndScale(ifType, elseType, builder);
    return builder.build();
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType)

Example 42 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class JdbcBatchReader method buildSchema.

private TupleMetadata buildSchema() throws SQLException {
    SchemaBuilder builder = new SchemaBuilder();
    ResultSetMetaData meta = resultSet.getMetaData();
    jdbcColumns = new ArrayList<>();
    int columnsCount = meta.getColumnCount();
    if (columns.size() != columnsCount) {
        throw UserException.validationError().message("Expected columns count differs from the returned one.\n" + "Expected columns: %s\n" + "Returned columns count: %s", columns, columnsCount).addContext("Sql", sql).addContext(errorContext).build(logger);
    }
    for (int i = 1; i <= columnsCount; i++) {
        String name = columns.get(i - 1).getRootSegmentPath();
        // column index in ResultSetMetaData starts from 1
        int jdbcType = meta.getColumnType(i);
        int width = Math.min(meta.getPrecision(i), DRILL_REL_DATATYPE_SYSTEM.getMaxNumericPrecision());
        // Note, if both the precision and scale are not defined in the query, Drill defaults to 38 for both
        // Which causes an overflow exception.  We reduce the scale by one here to avoid this.  The better solution
        // would be for the user to provide the precision and scale.
        int scale = Math.min(meta.getScale(i), DRILL_REL_DATATYPE_SYSTEM.getMaxNumericScale() - 1);
        MinorType minorType = JDBC_TYPE_MAPPINGS.get(jdbcType);
        if (minorType == null) {
            logger.warn("Ignoring column that is unsupported.", UserException.unsupportedError().message("A column you queried has a data type that is not currently supported by the JDBC storage plugin. " + "The column's name was %s and its JDBC data type was %s. ", name, nameFromType(jdbcType)).addContext("Sql", sql).addContext("Column Name", name).addContext(errorContext).build(logger));
            continue;
        }
        jdbcColumns.add(new JdbcColumn(name, minorType, i, scale, width));
        // Precision and scale are passed for all readers whether they are needed or not.
        builder.addNullable(name, minorType, width, scale);
    }
    return builder.buildSchema();
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) MinorType(org.apache.drill.common.types.TypeProtos.MinorType)

Example 43 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class MapUtility method writeToMapFromReader.

/*
   * Function to read a value from the field reader, detect the type, construct the appropriate value holder
   * and use the value holder to write to the Map.
   */
// TODO : This should be templatized and generated using freemarker
public static void writeToMapFromReader(FieldReader fieldReader, BaseWriter.MapWriter mapWriter) {
    try {
        MajorType valueMajorType = fieldReader.getType();
        MinorType valueMinorType = valueMajorType.getMinorType();
        boolean repeated = false;
        if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
            repeated = true;
        }
        switch(valueMinorType) {
            case TINYINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).tinyInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.tinyInt(MappifyUtility.fieldValue));
                }
                break;
            case SMALLINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).smallInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.smallInt(MappifyUtility.fieldValue));
                }
                break;
            case BIGINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bigInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.bigInt(MappifyUtility.fieldValue));
                }
                break;
            case INT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).integer());
                } else {
                    fieldReader.copyAsValue(mapWriter.integer(MappifyUtility.fieldValue));
                }
                break;
            case UINT1:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt1());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt1(MappifyUtility.fieldValue));
                }
                break;
            case UINT2:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt2());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt2(MappifyUtility.fieldValue));
                }
                break;
            case UINT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt4());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt4(MappifyUtility.fieldValue));
                }
                break;
            case UINT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt8());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt8(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL9:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal9());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal9(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL18:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal18());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal18(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL28SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal28Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal28Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL38SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal38Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal38Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DATE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).date());
                } else {
                    fieldReader.copyAsValue(mapWriter.date(MappifyUtility.fieldValue));
                }
                break;
            case TIME:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).time());
                } else {
                    fieldReader.copyAsValue(mapWriter.time(MappifyUtility.fieldValue));
                }
                break;
            case TIMESTAMP:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).timeStamp());
                } else {
                    fieldReader.copyAsValue(mapWriter.timeStamp(MappifyUtility.fieldValue));
                }
                break;
            case INTERVAL:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).interval());
                } else {
                    fieldReader.copyAsValue(mapWriter.interval(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALDAY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalDay());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalDay(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALYEAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalYear());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalYear(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float4());
                } else {
                    fieldReader.copyAsValue(mapWriter.float4(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float8());
                } else {
                    fieldReader.copyAsValue(mapWriter.float8(MappifyUtility.fieldValue));
                }
                break;
            case BIT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bit());
                } else {
                    fieldReader.copyAsValue(mapWriter.bit(MappifyUtility.fieldValue));
                }
                break;
            case VARCHAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varChar());
                } else {
                    fieldReader.copyAsValue(mapWriter.varChar(MappifyUtility.fieldValue));
                }
                break;
            case VARBINARY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varBinary());
                } else {
                    fieldReader.copyAsValue(mapWriter.varBinary(MappifyUtility.fieldValue));
                }
                break;
            case MAP:
                if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).map());
                } else {
                    fieldReader.copyAsValue(mapWriter.map(MappifyUtility.fieldValue));
                }
                break;
            case LIST:
                fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).list());
                break;
            default:
                throw new DrillRuntimeException(String.format("kvgen does not support input of type: %s", valueMinorType));
        }
    } catch (ClassCastException e) {
        final MaterializedField field = fieldReader.getField();
        throw new DrillRuntimeException(String.format(TYPE_MISMATCH_ERROR, field.getPath(), field.getType()));
    }
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) MaterializedField(org.apache.drill.exec.record.MaterializedField) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 44 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class JdbcRecordReader method setup.

@Override
public void setup(OperatorContext operatorContext, OutputMutator output) throws ExecutionSetupException {
    try {
        this.operatorContext = operatorContext;
        connection = source.getConnection();
        statement = connection.createStatement();
        resultSet = statement.executeQuery(sql);
        final ResultSetMetaData meta = resultSet.getMetaData();
        final int columns = meta.getColumnCount();
        ImmutableList.Builder<ValueVector> vectorBuilder = ImmutableList.builder();
        ImmutableList.Builder<Copier<?>> copierBuilder = ImmutableList.builder();
        for (int i = 1; i <= columns; i++) {
            final String name = meta.getColumnLabel(i);
            final int jdbcType = meta.getColumnType(i);
            final int width = meta.getPrecision(i);
            final int scale = meta.getScale(i);
            MinorType minorType = JDBC_TYPE_MAPPINGS.get(jdbcType);
            if (minorType == null) {
                logger.warn("Ignoring column that is unsupported.", UserException.unsupportedError().message("A column you queried has a data type that is not currently supported by the JDBC storage plugin. " + "The column's name was %s and its JDBC data type was %s. ", name, nameFromType(jdbcType)).addContext("sql", sql).addContext("column Name", name).addContext("plugin", storagePluginName).build(logger));
                continue;
            }
            final MajorType type = Types.optional(minorType);
            final MaterializedField field = MaterializedField.create(name, type);
            final Class<? extends ValueVector> clazz = (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(minorType, type.getMode());
            ValueVector vector = output.addField(field, clazz);
            vectorBuilder.add(vector);
            copierBuilder.add(getCopier(jdbcType, i, resultSet, vector));
        }
        vectors = vectorBuilder.build();
        copiers = copierBuilder.build();
    } catch (SQLException | SchemaChangeException e) {
        throw UserException.dataReadError(e).message("The JDBC storage plugin failed while trying setup the SQL query. ").addContext("sql", sql).addContext("plugin", storagePluginName).build(logger);
    }
}
Also used : SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MaterializedField(org.apache.drill.exec.record.MaterializedField) ResultSetMetaData(java.sql.ResultSetMetaData) ValueVector(org.apache.drill.exec.vector.ValueVector) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) MinorType(org.apache.drill.common.types.TypeProtos.MinorType)

Example 45 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class ParquetGroupScan method populatePruningVector.

public void populatePruningVector(ValueVector v, int index, SchemaPath column, String file) {
    String f = Path.getPathWithoutSchemeAndAuthority(new Path(file)).toString();
    MinorType type = getTypeForColumn(column).getMinorType();
    switch(type) {
        case INT:
            {
                NullableIntVector intVector = (NullableIntVector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                intVector.getMutator().setSafe(index, value);
                return;
            }
        case SMALLINT:
            {
                NullableSmallIntVector smallIntVector = (NullableSmallIntVector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                smallIntVector.getMutator().setSafe(index, value.shortValue());
                return;
            }
        case TINYINT:
            {
                NullableTinyIntVector tinyIntVector = (NullableTinyIntVector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                tinyIntVector.getMutator().setSafe(index, value.byteValue());
                return;
            }
        case UINT1:
            {
                NullableUInt1Vector intVector = (NullableUInt1Vector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                intVector.getMutator().setSafe(index, value.byteValue());
                return;
            }
        case UINT2:
            {
                NullableUInt2Vector intVector = (NullableUInt2Vector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                intVector.getMutator().setSafe(index, (char) value.shortValue());
                return;
            }
        case UINT4:
            {
                NullableUInt4Vector intVector = (NullableUInt4Vector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                intVector.getMutator().setSafe(index, value);
                return;
            }
        case BIGINT:
            {
                NullableBigIntVector bigIntVector = (NullableBigIntVector) v;
                Long value = (Long) partitionValueMap.get(f).get(column);
                bigIntVector.getMutator().setSafe(index, value);
                return;
            }
        case FLOAT4:
            {
                NullableFloat4Vector float4Vector = (NullableFloat4Vector) v;
                Float value = (Float) partitionValueMap.get(f).get(column);
                float4Vector.getMutator().setSafe(index, value);
                return;
            }
        case FLOAT8:
            {
                NullableFloat8Vector float8Vector = (NullableFloat8Vector) v;
                Double value = (Double) partitionValueMap.get(f).get(column);
                float8Vector.getMutator().setSafe(index, value);
                return;
            }
        case VARBINARY:
            {
                NullableVarBinaryVector varBinaryVector = (NullableVarBinaryVector) v;
                Object s = partitionValueMap.get(f).get(column);
                byte[] bytes;
                if (s instanceof Binary) {
                    bytes = ((Binary) s).getBytes();
                } else if (s instanceof String) {
                    bytes = ((String) s).getBytes();
                } else if (s instanceof byte[]) {
                    bytes = (byte[]) s;
                } else {
                    throw new UnsupportedOperationException("Unable to create column data for type: " + type);
                }
                varBinaryVector.getMutator().setSafe(index, bytes, 0, bytes.length);
                return;
            }
        case DECIMAL18:
            {
                NullableDecimal18Vector decimalVector = (NullableDecimal18Vector) v;
                Long value = (Long) partitionValueMap.get(f).get(column);
                decimalVector.getMutator().setSafe(index, value);
                return;
            }
        case DATE:
            {
                NullableDateVector dateVector = (NullableDateVector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                dateVector.getMutator().setSafe(index, value * (long) DateTimeConstants.MILLIS_PER_DAY);
                return;
            }
        case TIME:
            {
                NullableTimeVector timeVector = (NullableTimeVector) v;
                Integer value = (Integer) partitionValueMap.get(f).get(column);
                timeVector.getMutator().setSafe(index, value);
                return;
            }
        case TIMESTAMP:
            {
                NullableTimeStampVector timeStampVector = (NullableTimeStampVector) v;
                Long value = (Long) partitionValueMap.get(f).get(column);
                timeStampVector.getMutator().setSafe(index, value);
                return;
            }
        case VARCHAR:
            {
                NullableVarCharVector varCharVector = (NullableVarCharVector) v;
                Object s = partitionValueMap.get(f).get(column);
                byte[] bytes;
                if (s instanceof String) {
                    // if the metadata was read from a JSON cache file it maybe a string type
                    bytes = ((String) s).getBytes();
                } else if (s instanceof Binary) {
                    bytes = ((Binary) s).getBytes();
                } else if (s instanceof byte[]) {
                    bytes = (byte[]) s;
                } else {
                    throw new UnsupportedOperationException("Unable to create column data for type: " + type);
                }
                varCharVector.getMutator().setSafe(index, bytes, 0, bytes.length);
                return;
            }
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
}
Also used : NullableTinyIntVector(org.apache.drill.exec.vector.NullableTinyIntVector) NullableFloat8Vector(org.apache.drill.exec.vector.NullableFloat8Vector) NullableVarBinaryVector(org.apache.drill.exec.vector.NullableVarBinaryVector) NullableUInt1Vector(org.apache.drill.exec.vector.NullableUInt1Vector) NullableIntVector(org.apache.drill.exec.vector.NullableIntVector) NullableFloat4Vector(org.apache.drill.exec.vector.NullableFloat4Vector) NullableUInt4Vector(org.apache.drill.exec.vector.NullableUInt4Vector) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) NullableDecimal18Vector(org.apache.drill.exec.vector.NullableDecimal18Vector) Path(org.apache.hadoop.fs.Path) SchemaPath(org.apache.drill.common.expression.SchemaPath) ReadEntryWithPath(org.apache.drill.exec.store.dfs.ReadEntryWithPath) NullableDateVector(org.apache.drill.exec.vector.NullableDateVector) NullableTimeStampVector(org.apache.drill.exec.vector.NullableTimeStampVector) NullableSmallIntVector(org.apache.drill.exec.vector.NullableSmallIntVector) NullableUInt2Vector(org.apache.drill.exec.vector.NullableUInt2Vector) NullableVarCharVector(org.apache.drill.exec.vector.NullableVarCharVector) NullableBigIntVector(org.apache.drill.exec.vector.NullableBigIntVector) Binary(org.apache.parquet.io.api.Binary) NullableTimeVector(org.apache.drill.exec.vector.NullableTimeVector)

Aggregations

MinorType (org.apache.drill.common.types.TypeProtos.MinorType)86 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)32 MaterializedField (org.apache.drill.exec.record.MaterializedField)17 ValueVector (org.apache.drill.exec.vector.ValueVector)11 DataMode (org.apache.drill.common.types.TypeProtos.DataMode)10 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)8 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)7 SubOperatorTest (org.apache.drill.test.SubOperatorTest)6 Test (org.junit.Test)6 ImmutableList (com.google.common.collect.ImmutableList)5 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 ValueHolder (org.apache.drill.exec.expr.holders.ValueHolder)5 IOException (java.io.IOException)4 UserException (org.apache.drill.common.exceptions.UserException)4 OriginalType (org.apache.parquet.schema.OriginalType)4 PrimitiveType (org.apache.parquet.schema.PrimitiveType)4 SQLException (java.sql.SQLException)3 DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)3 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)3 ExtendableRowSet (org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)3