use of org.apache.drill.exec.physical.rowSet.RowSetBuilder in project drill by apache.
the class TestHDF5Format method test4DFlattenScalarQuery.
@Test
public void test4DFlattenScalarQuery() throws Exception {
String sql = "SELECT int_col_0, int_col_1 FROM table(dfs.`hdf5/scalar.h5` (type => 'hdf5', defaultPath => '/nd/4D')) WHERE int_col_0 <= 2";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("int_col_0", MinorType.INT, DataMode.OPTIONAL).add("int_col_1", MinorType.INT, DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(-2147483648, 5).addRow(1, 6).build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.exec.physical.rowSet.RowSetBuilder in project drill by apache.
the class TestHDF5Format method test3DFlattenScalarQuery.
@Test
public void test3DFlattenScalarQuery() throws Exception {
String sql = "SELECT int_col_0, int_col_1 FROM table(dfs.`hdf5/scalar.h5` (type => 'hdf5', defaultPath => '/nd/3D'))";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("int_col_0", MinorType.INT, DataMode.OPTIONAL).add("int_col_1", MinorType.INT, DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(-2147483648, 5).addRow(1, 6).addRow(2, 7).addRow(3, 8).build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.exec.physical.rowSet.RowSetBuilder in project drill by apache.
the class TestHDF5Format method testCompoundExplicitQuery2.
@Test
public void testCompoundExplicitQuery2() throws Exception {
String sql = "SELECT `field_1` FROM table(dfs.`hdf5/non-scalar.h5` (type => 'hdf5', defaultPath => '/compound')) WHERE field_1 < 5";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("field_1", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(0).addRow(1).addRow(2).addRow(3).addRow(4).build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.exec.physical.rowSet.RowSetBuilder in project drill by apache.
the class TestHDF5Format method testCompoundStarQuery.
@Test
public void testCompoundStarQuery() throws Exception {
String sql = "SELECT * FROM table(dfs.`hdf5/non-scalar.h5` (type => 'hdf5', defaultPath => '/compound')) WHERE field_1 < 5";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("field_1", MinorType.INT, DataMode.OPTIONAL).add("field_2", MinorType.FLOAT8, DataMode.OPTIONAL).add("field_3", MinorType.VARCHAR, DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(0, 0.0, "0").addRow(1, 1.0, "1").addRow(2, 2.0, "2").addRow(3, 3.0, "3").addRow(4, 4.0, "4").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.exec.physical.rowSet.RowSetBuilder in project drill by apache.
the class TestHDF5Format method testExplicitQueryWithCompressedFile.
@Test
public void testExplicitQueryWithCompressedFile() throws Exception {
generateCompressedFile("hdf5/dset.h5", "zip", "hdf5/dset.h5.zip");
String sql = "SELECT path, data_type, file_name FROM dfs.`hdf5/dset.h5.zip`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("path", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).add("data_type", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).add("file_name", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow("/dset", "DATASET", "dset.h5.zip").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
Aggregations