use of org.apache.drill.test.rowSet.RowSet in project drill by axbaretto.
the class ExampleTest method secondTest.
/**
* <p>
* Example that uses the fixture builder to build a cluster fixture. Lets
* you set configuration (boot-time) options, session options, system options
* and more.
* </p>
* <p>
* You can write test files to the {@link BaseDirTestWatcher#getRootDir()} and query them in the test.
* </p>
* <p>
* Also shows how to display the plan JSON and just run a query silently,
* getting just the row count, batch count and run time.
* </p>
* @throws Exception if anything goes wrong
*/
@Test
public void secondTest() throws Exception {
try (RootAllocator allocator = new RootAllocator(100_000_000)) {
final File tableFile = dirTestWatcher.getRootDir().toPath().resolve("employee.json").toFile();
final BatchSchema schema = new SchemaBuilder().add("id", Types.required(TypeProtos.MinorType.VARCHAR)).add("name", Types.required(TypeProtos.MinorType.VARCHAR)).build();
final RowSet rowSet = new RowSetBuilder(allocator, schema).addRow("1", "kiwi").addRow("2", "watermelon").build();
new JsonFileBuilder(rowSet).build(tableFile);
rowSet.clear();
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).configProperty(ExecConstants.SLICE_TARGET, 10);
try (ClusterFixture cluster = builder.build();
ClientFixture client = cluster.clientFixture()) {
String sql = "SELECT * FROM `dfs`.`test/employee.json`";
System.out.println(client.queryBuilder().sql(sql).explainJson());
QuerySummary results = client.queryBuilder().sql(sql).run();
System.out.println(String.format("Read %d rows", results.recordCount()));
// Usually we want to test something. Here, just test that we got
// the 2 records.
assertEquals(2, results.recordCount());
}
}
}
use of org.apache.drill.test.rowSet.RowSet in project drill by apache.
the class QueryBuilder method singletonLong.
/**
* Run the query that is expected to return (at least) one row
* with the only (or first) column returning a long value.
* The long value cannot be null.
*
* @return the value of the first column of the first row
* @throws RpcException if anything goes wrong
*/
public long singletonLong() throws RpcException {
RowSet rowSet = rowSet();
if (rowSet == null) {
throw new IllegalStateException("No rows returned");
}
RowSetReader reader = rowSet.reader();
reader.next();
long value = reader.column(0).getLong();
rowSet.clear();
return value;
}
use of org.apache.drill.test.rowSet.RowSet in project drill by apache.
the class TestCsv method testInvalidCsvHeaders.
@Test
public void testInvalidCsvHeaders() throws IOException {
String fileName = "case3.csv";
buildFile(fileName, invalidHeaders);
RowSet actual = client.queryBuilder().sql(makeStatement(fileName)).rowSet();
BatchSchema expectedSchema = new SchemaBuilder().add("column_1", MinorType.VARCHAR).add("column_2", MinorType.VARCHAR).add("col_9b", MinorType.VARCHAR).add("c", MinorType.VARCHAR).add("c_2", MinorType.VARCHAR).add("c_2_2", MinorType.VARCHAR).build();
assertEquals(expectedSchema, actual.batchSchema());
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).add("10", "foo", "bar", "fourth", "fifth", "sixth").build();
new RowSetComparison(expected).verifyAndClear(actual);
}
use of org.apache.drill.test.rowSet.RowSet in project drill by apache.
the class TestCsv method testValidCsvHeaders.
@Test
public void testValidCsvHeaders() throws IOException {
String fileName = "case2.csv";
buildFile(fileName, validHeaders);
RowSet actual = client.queryBuilder().sql(makeStatement(fileName)).rowSet();
BatchSchema expectedSchema = new SchemaBuilder().add("a", MinorType.VARCHAR).add("b", MinorType.VARCHAR).add("c", MinorType.VARCHAR).build();
assertEquals(expectedSchema, actual.batchSchema());
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).add("10", "foo", "bar").build();
new RowSetComparison(expected).verifyAndClear(actual);
}
use of org.apache.drill.test.rowSet.RowSet in project drill by axbaretto.
the class TestResultSetLoaderMaps method testMapEvolution.
/**
* Create schema with a map, then add columns to the map
* after delivering the first batch. The new columns should appear
* in the second-batch output.
*/
@Test
public void testMapEvolution() {
TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).addMap("m").add("b", MinorType.VARCHAR).resumeSchema().buildSchema();
ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder().setSchema(schema).build();
ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
assertEquals(3, rsLoader.schemaVersion());
RowSetLoader rootWriter = rsLoader.writer();
rsLoader.startBatch();
rootWriter.addRow(10, objArray("fred")).addRow(20, objArray("barney"));
RowSet actual = fixture.wrap(rsLoader.harvest());
assertEquals(3, rsLoader.schemaVersion());
assertEquals(2, actual.rowCount());
// Validate first batch
SingleRowSet expected = fixture.rowSetBuilder(schema).addRow(10, objArray("fred")).addRow(20, objArray("barney")).build();
new RowSetComparison(expected).verifyAndClearAll(actual);
// Add three columns in the second batch. One before
// the batch starts, one before the first row, and one after
// the first row.
TupleWriter mapWriter = rootWriter.tuple("m");
mapWriter.addColumn(SchemaBuilder.columnSchema("c", MinorType.INT, DataMode.REQUIRED));
rsLoader.startBatch();
mapWriter.addColumn(SchemaBuilder.columnSchema("d", MinorType.BIGINT, DataMode.REQUIRED));
rootWriter.addRow(30, objArray("wilma", 130, 130_000L));
mapWriter.addColumn(SchemaBuilder.columnSchema("e", MinorType.VARCHAR, DataMode.REQUIRED));
rootWriter.addRow(40, objArray("betty", 140, 140_000L, "bam-bam"));
actual = fixture.wrap(rsLoader.harvest());
assertEquals(6, rsLoader.schemaVersion());
assertEquals(2, actual.rowCount());
// Validate first batch
TupleMetadata expectedSchema = new SchemaBuilder().add("a", MinorType.INT).addMap("m").add("b", MinorType.VARCHAR).add("c", MinorType.INT).add("d", MinorType.BIGINT).add("e", MinorType.VARCHAR).resumeSchema().buildSchema();
expected = fixture.rowSetBuilder(expectedSchema).addRow(30, objArray("wilma", 130, 130_000L, "")).addRow(40, objArray("betty", 140, 140_000L, "bam-bam")).build();
new RowSetComparison(expected).verifyAndClearAll(actual);
rsLoader.close();
}
Aggregations