Search in sources :

Example 71 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class TestFillEmpties method doFillEmptiesScalar.

private void doFillEmptiesScalar(MajorType majorType) {
    TupleMetadata schema = new SchemaBuilder().add("a", majorType).buildSchema();
    ExtendableRowSet rs = fixture.rowSet(schema);
    RowSetWriter writer = rs.writer();
    ScalarWriter colWriter = writer.scalar(0);
    ValueType valueType = colWriter.valueType();
    boolean nullable = majorType.getMode() == DataMode.OPTIONAL;
    for (int i = 0; i < ROW_COUNT; i++) {
        if (i % 5 == 0) {
            colWriter.setObject(RowSetUtilities.testDataFromInt(valueType, majorType, i));
        }
        writer.save();
    }
    SingleRowSet result = writer.done();
    RowSetReader reader = result.reader();
    ScalarReader colReader = reader.scalar(0);
    MinorType type = majorType.getMinorType();
    boolean isVariable = Types.isVarWidthType(type);
    for (int i = 0; i < ROW_COUNT; i++) {
        assertTrue(reader.next());
        if (i % 5 != 0) {
            if (nullable) {
                // Nullable types fill with nulls.
                assertTrue(colReader.isNull());
                continue;
            }
            if (isVariable) {
                // Variable width types fill with a zero-length value.
                assertEquals(0, colReader.getBytes().length);
                continue;
            }
        }
        // All other types fill with zero-bytes, interpreted as some form
        // of zero for each type.
        Object actual = colReader.getObject();
        Object expected = RowSetUtilities.testDataFromInt(valueType, majorType, i % 5 == 0 ? i : 0);
        RowSetUtilities.assertEqualValues(majorType.toString().replace('\n', ' ') + "[" + i + "]", valueType, expected, actual);
    }
    result.clear();
}
Also used : SingleRowSet(org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet) ValueType(org.apache.drill.exec.vector.accessor.ValueType) ScalarReader(org.apache.drill.exec.vector.accessor.ScalarReader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ExtendableRowSet(org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)

Example 72 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class TestFillEmpties method testDefaultValue.

/**
 * Test each vector type to ensure it supports setting a default value.
 * Sets the default directly on the write to avoid the need to serialize
 * the default value to string, which is awkward for some types when
 * using the generic "test value from int" tool.
 */
@Test
public void testDefaultValue() {
    doTestDefaultValue(Types.required(MinorType.VARCHAR));
    for (MinorType type : MinorType.values()) {
        if (!isSupported(type)) {
            continue;
        }
        switch(type) {
            case DECIMAL18:
            case DECIMAL28SPARSE:
            case DECIMAL9:
            case DECIMAL38SPARSE:
            case VARDECIMAL:
                MajorType majorType = MajorType.newBuilder().setMinorType(type).setMode(DataMode.REQUIRED).setPrecision(9).setScale(2).build();
                doTestDefaultValue(majorType);
                break;
            default:
                doTestDefaultValue(Types.required(type));
        }
    }
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 73 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class HDF5BatchReader method buildSchemaFor2DimensionalDataset.

/**
 * Builds a Drill schema from a dataset with 2 or more dimensions. HDF5 only
 * supports INT, LONG, DOUBLE and FLOAT for >2 data types so this function is
 * not as inclusive as the 1D function. This function will build the schema
 * by adding DataWriters to the dataWriters array.
 *
 * @param dataset
 *          The dataset which Drill will use to build a schema
 */
private void buildSchemaFor2DimensionalDataset(Dataset dataset) {
    MinorType currentDataType = HDF5Utils.getDataType(dataset.getDataType());
    // Case for null or unknown data types:
    if (currentDataType == null) {
        logger.warn("Couldn't add {}", dataset.getJavaType().getName());
        return;
    }
    long cols = dimensions[1];
    String tempFieldName;
    for (int i = 0; i < cols; i++) {
        switch(currentDataType) {
            case INT:
                tempFieldName = INT_COLUMN_PREFIX + i;
                dataWriters.add(new HDF5IntDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
                break;
            case BIGINT:
                tempFieldName = LONG_COLUMN_PREFIX + i;
                dataWriters.add(new HDF5LongDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
                break;
            case FLOAT8:
                tempFieldName = DOUBLE_COLUMN_PREFIX + i;
                dataWriters.add(new HDF5DoubleDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
                break;
            case FLOAT4:
                tempFieldName = FLOAT_COLUMN_PREFIX + i;
                dataWriters.add(new HDF5FloatDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
                break;
            default:
                throw new UnsupportedOperationException(currentDataType.name());
        }
    }
}
Also used : HDF5DoubleDataWriter(org.apache.drill.exec.store.hdf5.writers.HDF5DoubleDataWriter) HDF5IntDataWriter(org.apache.drill.exec.store.hdf5.writers.HDF5IntDataWriter) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) HDF5FloatDataWriter(org.apache.drill.exec.store.hdf5.writers.HDF5FloatDataWriter) HDF5LongDataWriter(org.apache.drill.exec.store.hdf5.writers.HDF5LongDataWriter)

Example 74 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class HDF5BatchReader method buildSchemaFor1DimensionalDataset.

/**
 * This function is called when the default path is set and the data set is a single dimension.
 * This function will create an array of one dataWriter of the
 * correct datatype
 * @param dataset The HDF5 dataset
 */
private void buildSchemaFor1DimensionalDataset(Dataset dataset) {
    MinorType currentDataType = HDF5Utils.getDataType(dataset.getDataType());
    // Case for null or unknown data types:
    if (currentDataType == null) {
        logger.warn("Couldn't add {}", dataset.getJavaType().getName());
        return;
    }
    dataWriters.add(buildWriter(currentDataType));
}
Also used : MinorType(org.apache.drill.common.types.TypeProtos.MinorType)

Example 75 with MinorType

use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.

the class KuduRecordReader method initCols.

private void initCols(Schema schema) throws SchemaChangeException {
    ImmutableList.Builder<ProjectedColumnInfo> pciBuilder = ImmutableList.builder();
    for (int i = 0; i < schema.getColumnCount(); i++) {
        ColumnSchema col = schema.getColumnByIndex(i);
        final String name = col.getName();
        final Type kuduType = col.getType();
        lastColumnName = name;
        lastColumnType = kuduType;
        MinorType minorType = TYPES.get(kuduType);
        if (minorType == null) {
            logger.warn("Ignoring column that is unsupported.", UserException.unsupportedError().message("A column you queried has a data type that is not currently supported by the Kudu storage plugin. " + "The column's name was %s and its Kudu data type was %s. ", name, kuduType.toString()).addContext("column Name", name).addContext("plugin", "kudu").build(logger));
            continue;
        }
        MajorType majorType;
        if (col.isNullable()) {
            majorType = Types.optional(minorType);
        } else {
            majorType = Types.required(minorType);
        }
        MaterializedField field = MaterializedField.create(name, majorType);
        final Class<? extends ValueVector> clazz = TypeHelper.getValueVectorClass(minorType, majorType.getMode());
        ValueVector vector = output.addField(field, clazz);
        vector.allocateNew();
        ProjectedColumnInfo pci = new ProjectedColumnInfo();
        pci.vv = vector;
        pci.kuduColumn = col;
        pci.index = i;
        pciBuilder.add(pci);
    }
    projectedCols = pciBuilder.build();
}
Also used : ValueVector(org.apache.drill.exec.vector.ValueVector) Type(org.apache.kudu.Type) MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) ImmutableList(org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList) MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) ColumnSchema(org.apache.kudu.ColumnSchema) MaterializedField(org.apache.drill.exec.record.MaterializedField)

Aggregations

MinorType (org.apache.drill.common.types.TypeProtos.MinorType)86 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)32 MaterializedField (org.apache.drill.exec.record.MaterializedField)17 ValueVector (org.apache.drill.exec.vector.ValueVector)11 DataMode (org.apache.drill.common.types.TypeProtos.DataMode)10 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)8 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)7 SubOperatorTest (org.apache.drill.test.SubOperatorTest)6 Test (org.junit.Test)6 ImmutableList (com.google.common.collect.ImmutableList)5 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 ValueHolder (org.apache.drill.exec.expr.holders.ValueHolder)5 IOException (java.io.IOException)4 UserException (org.apache.drill.common.exceptions.UserException)4 OriginalType (org.apache.parquet.schema.OriginalType)4 PrimitiveType (org.apache.parquet.schema.PrimitiveType)4 SQLException (java.sql.SQLException)3 DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)3 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)3 ExtendableRowSet (org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)3