use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class TestFillEmpties method doFillEmptiesScalar.
private void doFillEmptiesScalar(MajorType majorType) {
TupleMetadata schema = new SchemaBuilder().add("a", majorType).buildSchema();
ExtendableRowSet rs = fixture.rowSet(schema);
RowSetWriter writer = rs.writer();
ScalarWriter colWriter = writer.scalar(0);
ValueType valueType = colWriter.valueType();
boolean nullable = majorType.getMode() == DataMode.OPTIONAL;
for (int i = 0; i < ROW_COUNT; i++) {
if (i % 5 == 0) {
colWriter.setObject(RowSetUtilities.testDataFromInt(valueType, majorType, i));
}
writer.save();
}
SingleRowSet result = writer.done();
RowSetReader reader = result.reader();
ScalarReader colReader = reader.scalar(0);
MinorType type = majorType.getMinorType();
boolean isVariable = Types.isVarWidthType(type);
for (int i = 0; i < ROW_COUNT; i++) {
assertTrue(reader.next());
if (i % 5 != 0) {
if (nullable) {
// Nullable types fill with nulls.
assertTrue(colReader.isNull());
continue;
}
if (isVariable) {
// Variable width types fill with a zero-length value.
assertEquals(0, colReader.getBytes().length);
continue;
}
}
// All other types fill with zero-bytes, interpreted as some form
// of zero for each type.
Object actual = colReader.getObject();
Object expected = RowSetUtilities.testDataFromInt(valueType, majorType, i % 5 == 0 ? i : 0);
RowSetUtilities.assertEqualValues(majorType.toString().replace('\n', ' ') + "[" + i + "]", valueType, expected, actual);
}
result.clear();
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class TestFillEmpties method testDefaultValue.
/**
* Test each vector type to ensure it supports setting a default value.
* Sets the default directly on the write to avoid the need to serialize
* the default value to string, which is awkward for some types when
* using the generic "test value from int" tool.
*/
@Test
public void testDefaultValue() {
doTestDefaultValue(Types.required(MinorType.VARCHAR));
for (MinorType type : MinorType.values()) {
if (!isSupported(type)) {
continue;
}
switch(type) {
case DECIMAL18:
case DECIMAL28SPARSE:
case DECIMAL9:
case DECIMAL38SPARSE:
case VARDECIMAL:
MajorType majorType = MajorType.newBuilder().setMinorType(type).setMode(DataMode.REQUIRED).setPrecision(9).setScale(2).build();
doTestDefaultValue(majorType);
break;
default:
doTestDefaultValue(Types.required(type));
}
}
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class HDF5BatchReader method buildSchemaFor2DimensionalDataset.
/**
* Builds a Drill schema from a dataset with 2 or more dimensions. HDF5 only
* supports INT, LONG, DOUBLE and FLOAT for >2 data types so this function is
* not as inclusive as the 1D function. This function will build the schema
* by adding DataWriters to the dataWriters array.
*
* @param dataset
* The dataset which Drill will use to build a schema
*/
private void buildSchemaFor2DimensionalDataset(Dataset dataset) {
MinorType currentDataType = HDF5Utils.getDataType(dataset.getDataType());
// Case for null or unknown data types:
if (currentDataType == null) {
logger.warn("Couldn't add {}", dataset.getJavaType().getName());
return;
}
long cols = dimensions[1];
String tempFieldName;
for (int i = 0; i < cols; i++) {
switch(currentDataType) {
case INT:
tempFieldName = INT_COLUMN_PREFIX + i;
dataWriters.add(new HDF5IntDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
break;
case BIGINT:
tempFieldName = LONG_COLUMN_PREFIX + i;
dataWriters.add(new HDF5LongDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
break;
case FLOAT8:
tempFieldName = DOUBLE_COLUMN_PREFIX + i;
dataWriters.add(new HDF5DoubleDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
break;
case FLOAT4:
tempFieldName = FLOAT_COLUMN_PREFIX + i;
dataWriters.add(new HDF5FloatDataWriter(hdfFile, writerSpec, readerConfig.defaultPath, tempFieldName, i));
break;
default:
throw new UnsupportedOperationException(currentDataType.name());
}
}
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class HDF5BatchReader method buildSchemaFor1DimensionalDataset.
/**
* This function is called when the default path is set and the data set is a single dimension.
* This function will create an array of one dataWriter of the
* correct datatype
* @param dataset The HDF5 dataset
*/
private void buildSchemaFor1DimensionalDataset(Dataset dataset) {
MinorType currentDataType = HDF5Utils.getDataType(dataset.getDataType());
// Case for null or unknown data types:
if (currentDataType == null) {
logger.warn("Couldn't add {}", dataset.getJavaType().getName());
return;
}
dataWriters.add(buildWriter(currentDataType));
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class KuduRecordReader method initCols.
private void initCols(Schema schema) throws SchemaChangeException {
ImmutableList.Builder<ProjectedColumnInfo> pciBuilder = ImmutableList.builder();
for (int i = 0; i < schema.getColumnCount(); i++) {
ColumnSchema col = schema.getColumnByIndex(i);
final String name = col.getName();
final Type kuduType = col.getType();
lastColumnName = name;
lastColumnType = kuduType;
MinorType minorType = TYPES.get(kuduType);
if (minorType == null) {
logger.warn("Ignoring column that is unsupported.", UserException.unsupportedError().message("A column you queried has a data type that is not currently supported by the Kudu storage plugin. " + "The column's name was %s and its Kudu data type was %s. ", name, kuduType.toString()).addContext("column Name", name).addContext("plugin", "kudu").build(logger));
continue;
}
MajorType majorType;
if (col.isNullable()) {
majorType = Types.optional(minorType);
} else {
majorType = Types.required(minorType);
}
MaterializedField field = MaterializedField.create(name, majorType);
final Class<? extends ValueVector> clazz = TypeHelper.getValueVectorClass(minorType, majorType.getMode());
ValueVector vector = output.addField(field, clazz);
vector.allocateNew();
ProjectedColumnInfo pci = new ProjectedColumnInfo();
pci.vv = vector;
pci.kuduColumn = col;
pci.index = i;
pciBuilder.add(pci);
}
projectedCols = pciBuilder.build();
}
Aggregations