use of org.apache.drill.exec.vector.accessor.ObjectReader in project drill by apache.
the class TestVariantAccessors method testListOfUnions.
/**
* Test a union list.
*/
@Test
public void testListOfUnions() {
final TupleMetadata schema = new SchemaBuilder().addList("list").addType(MinorType.INT).addType(MinorType.VARCHAR).resumeSchema().buildSchema();
final ExtendableRowSet rowSet = fixture.rowSet(schema);
final RowSetWriter writer = rowSet.writer();
{
final ObjectWriter listObj = writer.column(0);
assertEquals(ObjectType.ARRAY, listObj.type());
final ArrayWriter listArray = listObj.array();
final ObjectWriter itemObj = listArray.entry();
assertEquals(ObjectType.VARIANT, itemObj.type());
final VariantWriter variant = itemObj.variant();
final ScalarWriter intWriter = variant.scalar(MinorType.INT);
final ScalarWriter strWriter = variant.scalar(MinorType.VARCHAR);
// First row: (1, "two", 3)
variant.setType(MinorType.INT);
intWriter.setInt(1);
listArray.save();
variant.setType(MinorType.VARCHAR);
strWriter.setString("two");
listArray.save();
variant.setType(MinorType.INT);
intWriter.setInt(3);
listArray.save();
writer.save();
// Second row: null
writer.save();
// Third row: 4, null, "six", null int, null string
variant.setType(MinorType.INT);
intWriter.setInt(4);
listArray.save();
variant.setNull();
listArray.save();
variant.setType(MinorType.VARCHAR);
strWriter.setString("six");
listArray.save();
variant.setType(MinorType.INT);
intWriter.setNull();
listArray.save();
variant.setType(MinorType.VARCHAR);
intWriter.setNull();
listArray.save();
writer.save();
// Fourth row: empty array.
listArray.setNull(false);
writer.save();
// Fifth row: 9
variant.setType(MinorType.INT);
intWriter.setInt(9);
listArray.save();
writer.save();
}
final SingleRowSet result = writer.done();
assertEquals(5, result.rowCount());
{
final RowSetReader reader = result.reader();
final ObjectReader listObj = reader.column(0);
assertEquals(ObjectType.ARRAY, listObj.type());
final ArrayReader listArray = listObj.array();
assertEquals(ObjectType.VARIANT, listArray.entry().type());
final VariantReader variant = listArray.variant();
final ScalarReader intReader = variant.scalar(MinorType.INT);
final ScalarReader strReader = variant.scalar(MinorType.VARCHAR);
// First row: (1, "two", 3)
assertTrue(reader.next());
assertFalse(listArray.isNull());
assertEquals(3, listArray.size());
assertTrue(listArray.next());
assertEquals(MinorType.INT, variant.dataType());
assertFalse(intReader.isNull());
assertTrue(strReader.isNull());
assertEquals(1, intReader.getInt());
assertEquals(1, variant.scalar().getInt());
assertTrue(listArray.next());
assertEquals(MinorType.VARCHAR, variant.dataType());
assertTrue(intReader.isNull());
assertFalse(strReader.isNull());
assertEquals("two", strReader.getString());
assertEquals("two", variant.scalar().getString());
assertTrue(listArray.next());
assertEquals(MinorType.INT, variant.dataType());
assertEquals(3, intReader.getInt());
assertFalse(listArray.next());
// Second row: null
assertTrue(reader.next());
assertTrue(listArray.isNull());
assertEquals(0, listArray.size());
// Third row: 4, null, "six", null int, null string
assertTrue(reader.next());
assertEquals(5, listArray.size());
assertTrue(listArray.next());
assertEquals(4, intReader.getInt());
assertTrue(listArray.next());
assertTrue(variant.isNull());
assertTrue(listArray.next());
assertEquals("six", strReader.getString());
assertTrue(listArray.next());
assertEquals(MinorType.INT, variant.dataType());
assertTrue(intReader.isNull());
assertTrue(listArray.next());
assertEquals(MinorType.VARCHAR, variant.dataType());
assertTrue(strReader.isNull());
assertFalse(listArray.next());
// Fourth row: empty array.
assertTrue(reader.next());
assertFalse(listArray.isNull());
assertEquals(0, listArray.size());
assertFalse(listArray.next());
// Fifth row: 9
assertTrue(reader.next());
assertEquals(1, listArray.size());
assertTrue(listArray.next());
assertEquals(9, intReader.getInt());
assertFalse(listArray.next());
assertFalse(reader.next());
}
result.clear();
}
use of org.apache.drill.exec.vector.accessor.ObjectReader in project drill by apache.
the class TestAnalyze method verifyAnalyzeOutput.
// Helper function to verify output of ANALYZE statement
private void verifyAnalyzeOutput(String query, String message) throws Exception {
DirectRowSet rowSet = queryBuilder().sql(query).rowSet();
try {
assertEquals(1, rowSet.rowCount());
RowSetReader reader = rowSet.reader();
assertEquals(2, reader.columnCount());
while (reader.next()) {
ObjectReader column = reader.column(1);
assertEquals(message, column.isNull() ? null : column.getObject().toString());
}
} finally {
rowSet.clear();
}
}
use of org.apache.drill.exec.vector.accessor.ObjectReader in project drill by apache.
the class MetadataControllerBatch method getMetadataStatistics.
private List<StatisticsHolder<?>> getMetadataStatistics(TupleReader reader, TupleMetadata columnMetadata) {
List<StatisticsHolder<?>> metadataStatistics = new ArrayList<>();
String rgs = columnNamesOptions.rowGroupStart();
String rgl = columnNamesOptions.rowGroupLength();
for (ColumnMetadata column : columnMetadata) {
String columnName = column.name();
ObjectReader objectReader = reader.column(columnName);
if (AnalyzeColumnUtils.isMetadataStatisticsField(columnName)) {
metadataStatistics.add(new StatisticsHolder<>(objectReader.getObject(), AnalyzeColumnUtils.getStatisticsKind(columnName)));
} else if (!objectReader.isNull()) {
if (columnName.equals(rgs)) {
metadataStatistics.add(new StatisticsHolder<>(Long.parseLong(objectReader.scalar().getString()), new BaseStatisticsKind<>(ExactStatisticsConstants.START, true)));
} else if (columnName.equals(rgl)) {
metadataStatistics.add(new StatisticsHolder<>(Long.parseLong(objectReader.scalar().getString()), new BaseStatisticsKind<>(ExactStatisticsConstants.LENGTH, true)));
}
}
}
return metadataStatistics;
}
use of org.apache.drill.exec.vector.accessor.ObjectReader in project drill by apache.
the class MetadataControllerBatch method getMetadataUnits.
private List<TableMetadataUnit> getMetadataUnits(TupleReader reader, int nestingLevel) {
List<TableMetadataUnit> metadataUnits = new ArrayList<>();
TupleMetadata columnMetadata = reader.tupleSchema();
ObjectReader metadataColumnReader = reader.column(MetastoreAnalyzeConstants.METADATA_TYPE);
Preconditions.checkNotNull(metadataColumnReader, "metadataType column wasn't found");
ObjectReader underlyingMetadataReader = reader.column(MetastoreAnalyzeConstants.COLLECTED_MAP_FIELD);
if (underlyingMetadataReader != null) {
if (!underlyingMetadataReader.schema().isArray()) {
throw new IllegalStateException("Incoming vector with name `collected_map` should be repeated map");
}
// current row contains information about underlying metadata
ArrayReader array = underlyingMetadataReader.array();
while (array.next()) {
metadataUnits.addAll(getMetadataUnits(array.tuple(), nestingLevel + 1));
}
}
List<StatisticsHolder<?>> metadataStatistics = getMetadataStatistics(reader, columnMetadata);
Long rowCount = (Long) metadataStatistics.stream().filter(statisticsHolder -> statisticsHolder.getStatisticsKind() == TableStatisticsKind.ROW_COUNT).findAny().map(StatisticsHolder::getStatisticsValue).orElse(null);
Map<SchemaPath, ColumnStatistics<?>> columnStatistics = getColumnStatistics(reader, columnMetadata, rowCount);
MetadataType metadataType = MetadataType.valueOf(metadataColumnReader.scalar().getString());
BaseMetadata metadata;
switch(metadataType) {
case TABLE:
{
metadata = getTableMetadata(reader, metadataStatistics, columnStatistics);
break;
}
case SEGMENT:
{
metadata = getSegmentMetadata(reader, metadataStatistics, columnStatistics, nestingLevel);
break;
}
case PARTITION:
{
metadata = getPartitionMetadata(reader, metadataStatistics, columnStatistics, nestingLevel);
break;
}
case FILE:
{
metadata = getFileMetadata(reader, metadataStatistics, columnStatistics, nestingLevel);
break;
}
case ROW_GROUP:
{
metadata = getRowGroupMetadata(reader, metadataStatistics, columnStatistics, nestingLevel);
break;
}
default:
throw new UnsupportedOperationException("Unsupported metadata type: " + metadataType);
}
metadataUnits.add(metadata.toMetadataUnit());
return metadataUnits;
}
Aggregations