use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestDirectConverter method testDecimalFromString.
@Test
public void testDecimalFromString() {
TupleMetadata outputSchema = new SchemaBuilder().add("id", MinorType.INT).add("dec", MinorType.VARDECIMAL, 4, 2).buildSchema();
TupleMetadata inputSchema = new SchemaBuilder().add("id", MinorType.INT).add("dec", MinorType.VARCHAR).buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture.addRow(1, "0").addRow(2, "-0").addRow(3, "0.12").addRow(4, "1.23").addRow(5, "12.34").addRow(6, "23.456").addRow(7, "-99.99").build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema).addRow(1, dec("0")).addRow(2, dec("-0")).addRow(3, dec("0.12")).addRow(4, dec("1.23")).addRow(5, dec("12.34")).addRow(6, dec("23.46")).addRow(7, dec("-99.99")).build();
RowSetUtilities.verify(expected, actual);
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestDirectConverter method testNumberToStringConversion.
/**
* Test the standard string-to-type conversion using an ad-hoc conversion
* from the input type (the type used by the row set builder) to the output
* (vector) type.
*/
@Test
public void testNumberToStringConversion() {
// Create the schema
TupleMetadata outputSchema = new SchemaBuilder().add("ti", MinorType.VARCHAR).add("si", MinorType.VARCHAR).add("int", MinorType.VARCHAR).add("bi", MinorType.VARCHAR).add("fl", MinorType.VARCHAR).add("db", MinorType.VARCHAR).buildSchema();
TupleMetadata inputSchema = new SchemaBuilder().add("ti", MinorType.TINYINT).add("si", MinorType.SMALLINT).add("int", MinorType.INT).add("bi", MinorType.BIGINT).add("fl", MinorType.FLOAT4).add("db", MinorType.FLOAT8).buildSchema();
// The setObject() method won't do the Float to Double conversion,
// so values before are provided as doubles in the float case.
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture.addRow(11, 12, 13, 14L, 15.5D, 16.25D).addRow(127, 32757, Integer.MAX_VALUE, Long.MAX_VALUE, 10E6D, 10E200D).build();
// Build the expected vector without a type converter.
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema).addRow("11", "12", "13", "14", "15.5", "16.25").addRow("127", "32757", Integer.toString(Integer.MAX_VALUE), Long.toString(Long.MAX_VALUE), "1.0E7", "1.0E201").build();
// Compare
RowSetUtilities.verify(expected, actual);
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestScanBatchWriters method sanityTest.
@Test
public void sanityTest() throws Exception {
Scan scanConfig = new AbstractSubScan("bob") {
@Override
public String getOperatorType() {
return "";
}
};
OperatorContext opContext = fixture.newOperatorContext(scanConfig);
// Setup: normally done by ScanBatch
VectorContainer container = new VectorContainer(fixture.allocator());
OutputMutator output = new ScanBatch.Mutator(opContext, fixture.allocator(), container);
DrillBuf buffer = opContext.getManagedBuffer();
try (VectorContainerWriter writer = new VectorContainerWriter(output)) {
// Per-batch
writer.allocate();
writer.reset();
BaseWriter.MapWriter map = writer.rootAsMap();
// Write one record (10, "Fred", [100, 110, 120] )
map.integer("a").writeInt(10);
byte[] bytes = "Fred".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
try (ListWriter list = map.list("c")) {
list.startList();
list.integer().writeInt(100);
list.integer().writeInt(110);
list.integer().writeInt(120);
list.endList();
// Write another record: (20, "Wilma", [])
writer.setPosition(1);
map.integer("a").writeInt(20);
bytes = "Wilma".getBytes("UTF-8");
buffer.setBytes(0, bytes, 0, bytes.length);
map.varChar("b").writeVarChar(0, bytes.length, buffer);
writer.setValueCount(2);
// Wrap-up done by ScanBatch
container.setRecordCount(2);
container.buildSchema(SelectionVectorMode.NONE);
RowSet rowSet = fixture.wrap(container);
// Expected
TupleMetadata schema = new SchemaBuilder().addNullable("a", MinorType.INT).addNullable("b", MinorType.VARCHAR).addArray("c", MinorType.INT).buildSchema();
RowSet expected = fixture.rowSetBuilder(schema).addRow(10, "Fred", new int[] { 100, 110, 120 }).addRow(20, "Wilma", null).build();
new RowSetComparison(expected).verifyAndClearAll(rowSet);
}
} finally {
opContext.close();
}
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestSequenceFileReader method testLimitPushdown.
@Test
public void testLimitPushdown() throws Exception {
String sql = "select * from cp.`sequencefiles/simple.seq` limit 1 offset 1";
QueryBuilder builder = client.queryBuilder().sql(sql);
RowSet sets = builder.rowSet();
assertEquals(1, sets.rowCount());
sets.clear();
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestSequenceFileReader method testExplicitQuery.
@Test
public void testExplicitQuery() throws Exception {
String sql = "select convert_from(binary_key, 'UTF8') as binary_key from cp.`sequencefiles/simple.seq`";
QueryBuilder builder = client.queryBuilder().sql(sql);
RowSet sets = builder.rowSet();
TupleMetadata schema = new SchemaBuilder().addNullable(SequenceFileBatchReader.KEY_SCHEMA, MinorType.VARCHAR).build();
RowSet expected = new RowSetBuilder(client.allocator(), schema).addRow(byteWritableString("key0")).addRow(byteWritableString("key1")).build();
assertEquals(2, sets.rowCount());
new RowSetComparison(expected).verifyAndClearAll(sets);
}
Aggregations