use of org.apache.drill.test.rowSet.RowSetComparison in project drill by apache.
the class TestHDF5Format method testExplicitQueryWithCompressedFile.
@Test
public void testExplicitQueryWithCompressedFile() throws Exception {
generateCompressedFile("hdf5/dset.h5", "zip", "hdf5/dset.h5.zip");
String sql = "SELECT path, data_type, file_name FROM dfs.`hdf5/dset.h5.zip`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("path", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).add("data_type", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).add("file_name", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow("/dset", "DATASET", "dset.h5.zip").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.test.rowSet.RowSetComparison in project drill by apache.
the class TestHDF5Format method testCompoundExplicitQuery.
@Test
public void testCompoundExplicitQuery() throws Exception {
String sql = "SELECT `field_1`, `field_3` FROM table(dfs.`hdf5/non-scalar.h5` (type => 'hdf5', defaultPath => '/compound')) WHERE field_1 < 5";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("field_1", MinorType.INT, DataMode.OPTIONAL).add("field_3", MinorType.VARCHAR, DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(0, "0").addRow(1, "1").addRow(2, "2").addRow(3, "3").addRow(4, "4").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.test.rowSet.RowSetComparison in project drill by apache.
the class TestHDF5Format method testStringScalarQuery.
@Test
public void testStringScalarQuery() throws Exception {
String sql = "SELECT flatten(s10) AS string_col\n" + "FROM dfs.`hdf5/scalar.h5`\n" + "WHERE path='/datatype/s10'";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("string_col", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.REQUIRED).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow("a ").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("abcdefghij").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.test.rowSet.RowSetComparison in project drill by apache.
the class TestHDF5Format method testFloat32ScalarQuery.
@Test
public void testFloat32ScalarQuery() throws Exception {
String sql = "SELECT flatten(float32) AS float_col\n" + "FROM dfs.`hdf5/scalar.h5`\n" + "WHERE path='/datatype/float32'";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("float_col", TypeProtos.MinorType.FLOAT4, TypeProtos.DataMode.REQUIRED).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(-3.4028234663852886E38).addRow(1.0).addRow(2.0).addRow(3.0).addRow(4.0).addRow(5.0).addRow(6.0).addRow(7.0).addRow(8.0).addRow(3.4028234663852886E38).build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
use of org.apache.drill.test.rowSet.RowSetComparison in project drill by apache.
the class TestHDF5Format method testFlattenStringScalarQuery.
@Test
public void testFlattenStringScalarQuery() throws Exception {
String sql = "SELECT * FROM table(dfs.`hdf5/scalar.h5` (type => 'hdf5', defaultPath => '/datatype/s10'))";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("s10", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow("a ").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("").addRow("abcdefghij").build();
new RowSetComparison(expected).unorderedVerifyAndClearAll(results);
}
Aggregations