use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestCsvWithSchema method testMissingColsReqDefault.
/**
* Verify the behavior of missing columns, not null mode, with
* a default value.
*/
@Test
public void testMissingColsReqDefault() throws Exception {
String tableName = "missingColsDefault";
String tablePath = buildTable(tableName, trivalContents);
try {
enableSchemaSupport();
String sql = "create or replace schema (" + "col_int integer not null default '10', " + "col_bigint bigint not null default '10', " + "col_double double not null default '10.5', " + "col_float float not null default '10.5f', " + "col_var varchar not null default 'foo', " + "col_boolean boolean not null default '1', " + "col_interval interval not null default 'P10D', " + "col_time time not null default '12:34:56', " + "col_date date not null default '2019-03-28', " + "col_timestamp timestamp not null format 'yyyy-MM-dd HH:mm:ss' default '2019-03-28 12:34:56'" + ") for table %s";
run(sql, tablePath);
sql = "SELECT * FROM " + tablePath + "ORDER BY id";
RowSet actual = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().add("col_int", MinorType.INT).add("col_bigint", MinorType.BIGINT).add("col_double", MinorType.FLOAT8).add("col_float", MinorType.FLOAT4).add("col_var", MinorType.VARCHAR).add("col_boolean", MinorType.BIT).add("col_interval", MinorType.INTERVAL).add("col_time", MinorType.TIME).add("col_date", MinorType.DATE).add("col_timestamp", MinorType.TIMESTAMP).add("id", MinorType.VARCHAR).buildSchema();
LocalTime lt = LocalTime.of(12, 34, 56);
LocalDate ld = LocalDate.of(2019, 3, 28);
Instant ts = LocalDateTime.of(ld, lt).toInstant(ZoneOffset.UTC);
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(10, 10L, 10.5, 10.5f, "foo", true, new Period(0).plusDays(10), lt, ld, ts, "1").build();
RowSetUtilities.verify(expected, actual);
} finally {
resetSchemaSupport();
}
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestCsvWithSchema method testMultiFileSchemaMissingCol.
/**
* Test the case that a file does not contain a required column (in this case,
* id in the third file.) There are two choices. 1) we could fail the query,
* 2) we can muddle through as best we can. The scan framework chooses to
* muddle through by assuming a default value of 0 for the missing int
* column.
* <p>
* Inserts an ORDER BY to force a single batch in a known order. Assumes
* the other ORDER BY tests pass.
* <p>
* This test shows that having consistent types is sufficient for the sort
* operator to work; the DAG will include a project operator that reorders
* the columns when produced by readers in different orders. (Column ordering
* is more an abstract concept anyway in a columnar system such as Drill.)
*/
@Test
public void testMultiFileSchemaMissingCol() throws Exception {
RowSet expected = null;
try {
enableSchemaSupport();
enableMultiScan();
String tablePath = buildTable("schemaMissingCols", raggedMulti1Contents, reordered2Contents, multi3Contents);
run(SCHEMA_SQL, tablePath);
// Wildcard expands to union of schema + table. In this case
// all table columns appear in the schema (though not all schema
// columns appear in the table.)
String sql = "SELECT id, `name`, `date`, gender, comment FROM " + tablePath + " ORDER BY id";
TupleMetadata expectedSchema = new SchemaBuilder().add("id", MinorType.INT).add("name", MinorType.VARCHAR).addNullable("date", MinorType.DATE).add("gender", MinorType.VARCHAR).add("comment", MinorType.VARCHAR).buildSchema();
expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(0, "dino", LocalDate.of(2018, 9, 1), "NA", "ABC").addRow(1, "wilma", LocalDate.of(2019, 1, 18), "female", "ABC").addRow(2, "fred", LocalDate.of(2019, 1, 19), "male", "ABC").addRow(3, "barney", LocalDate.of(2001, 1, 16), "NA", "ABC").addRow(4, "betty", LocalDate.of(2019, 5, 4), "NA", "ABC").build();
for (int i = 0; i < 10; i++) {
RowSet results = client.queryBuilder().sql(sql).rowSet();
new RowSetComparison(expected).verifyAndClear(results);
}
} finally {
expected.clear();
resetSchemaSupport();
resetMultiScan();
}
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestCsvWithoutHeaders method testColumnsAndMetadata.
@Test
public void testColumnsAndMetadata() throws IOException {
String sql = "SELECT columns, filename FROM `dfs.data`.`%s`";
RowSet actual = client.queryBuilder().sql(sql, TEST_FILE_NAME).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().addArray("columns", MinorType.VARCHAR).add("filename", MinorType.VARCHAR).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addRow(strArray("10", "foo", "bar"), TEST_FILE_NAME).addRow(strArray("20", "fred", "wilma"), TEST_FILE_NAME).build();
RowSetUtilities.verify(expected, actual);
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestCsvWithoutHeaders method testWildcard.
/**
* Verify that the wildcard expands to the `columns` array
*/
@Test
public void testWildcard() throws IOException {
String sql = "SELECT * FROM `dfs.data`.`%s`";
RowSet actual = client.queryBuilder().sql(sql, TEST_FILE_NAME).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder().addArray("columns", MinorType.VARCHAR).buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema).addSingleCol(strArray("10", "foo", "bar")).addSingleCol(strArray("20", "fred", "wilma")).build();
RowSetUtilities.verify(expected, actual);
}
use of org.apache.drill.exec.physical.rowSet.RowSet in project drill by apache.
the class TestCsvWithoutHeaders method testHugeColumn.
@Test
public void testHugeColumn() throws IOException {
String fileName = buildBigColFile(false);
String sql = "SELECT * FROM `dfs.data`.`%s`";
RowSet actual = client.queryBuilder().sql(sql, fileName).rowSet();
assertEquals(10, actual.rowCount());
RowSetReader reader = actual.reader();
ArrayReader arrayReader = reader.array(0);
while (reader.next()) {
int i = reader.logicalIndex();
arrayReader.next();
assertEquals(Integer.toString(i + 1), arrayReader.scalar().getString());
arrayReader.next();
String big = arrayReader.scalar().getString();
assertEquals(BIG_COL_SIZE, big.length());
for (int j = 0; j < BIG_COL_SIZE; j++) {
assertEquals((char) ((j + i) % 26 + 'A'), big.charAt(j));
}
arrayReader.next();
assertEquals(Integer.toString((i + 1) * 10), arrayReader.scalar().getString());
}
actual.clear();
}
Aggregations