Search in sources :

Example 11 with TestBuilder

use of org.apache.drill.test.TestBuilder in project drill by axbaretto.

the class TestCorruptParquetDateCorrection method testCorruptValueDetectionDuringPruning.

@Test
public void testCorruptValueDetectionDuringPruning() throws Exception {
    try {
        for (String selection : new String[] { "*", "date_col" }) {
            for (Path table : new Path[] { CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH }) {
                // for sanity, try reading all partitions without a filter
                TestBuilder builder = testBuilder().sqlQuery("select %s from dfs.`%s`", selection, table).unOrdered().baselineColumns("date_col");
                addDateBaselineValues(builder);
                builder.go();
                String query = format("select %s from dfs.`%s`" + " where date_col = date '1970-01-01'", selection, table);
                // verify that pruning is actually taking place
                testPlanMatchingPatterns(query, new String[] { "numFiles=1" }, null);
                // read with a filter on the partition column
                testBuilder().sqlQuery(query).unOrdered().baselineColumns("date_col").baselineValues(new DateTime(1970, 1, 1, 0, 0)).go();
            }
        }
    } finally {
        test("alter session reset all");
    }
}
Also used : Path(java.nio.file.Path) TestBuilder(org.apache.drill.test.TestBuilder) DateTime(org.joda.time.DateTime) ParquetTest(org.apache.drill.categories.ParquetTest) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 12 with TestBuilder

use of org.apache.drill.test.TestBuilder in project drill by axbaretto.

the class TestCorruptParquetDateCorrection method readMixedCorruptedAndCorrectDates.

/**
 * Read a directory with parquet files where some have corrupted dates, see DRILL-4203.
 * @throws Exception
 */
private void readMixedCorruptedAndCorrectDates() throws Exception {
    // for bad values) to set the flag that the values are corrupt
    for (String selection : new String[] { "*", "date_col" }) {
        TestBuilder builder = testBuilder().sqlQuery("select %s from dfs.`%s`", selection, MIXED_CORRUPTED_AND_CORRECT_DATES_PATH).unOrdered().baselineColumns("date_col");
        for (int i = 0; i < 4; i++) {
            addDateBaselineValues(builder);
        }
        builder.go();
    }
}
Also used : TestBuilder(org.apache.drill.test.TestBuilder)

Example 13 with TestBuilder

use of org.apache.drill.test.TestBuilder in project drill by axbaretto.

the class TestExternalSort method testNumericTypes.

/**
 * Test union type support in sort using numeric types: BIGINT and FLOAT8
 * Drill does not support union types fully. Sort was adapted to handle them.
 * This test simply verifies that the sort handles these types, even though
 * Drill does not.
 *
 * @param testLegacy
 *          true to test the old (pre-1.11) sort, false to test the new (1.11
 *          and later) sort
 * @throws Exception
 */
private void testNumericTypes(boolean testLegacy) throws Exception {
    final int record_count = 10000;
    final String tableDirName = "numericTypes";
    {
        final BatchSchema schema = new SchemaBuilder().add("a", Types.required(TypeProtos.MinorType.INT)).build();
        final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
        for (int i = 0; i <= record_count; i += 2) {
            rowSetBuilder.addRow(i);
        }
        final RowSet rowSet = rowSetBuilder.build();
        final File tableFile = createTableFile(tableDirName, "a.json");
        new JsonFileBuilder(rowSet).build(tableFile);
        rowSet.clear();
    }
    {
        final BatchSchema schema = new SchemaBuilder().add("a", Types.required(TypeProtos.MinorType.FLOAT4)).build();
        final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
        for (int i = 1; i <= record_count; i += 2) {
            rowSetBuilder.addRow((float) i);
        }
        final RowSet rowSet = rowSetBuilder.build();
        final File tableFile = createTableFile(tableDirName, "b.json");
        new JsonFileBuilder(rowSet).setCustomFormatter("a", "%.2f").build(tableFile);
        rowSet.clear();
    }
    TestBuilder builder = testBuilder().sqlQuery("select * from dfs.`%s` order by a desc", tableDirName).optionSettingQueriesForTestQuery(getOptions(testLegacy)).ordered().baselineColumns("a");
    for (int i = record_count; i >= 0; ) {
        builder.baselineValues((long) i--);
        if (i >= 0) {
            builder.baselineValues((double) i--);
        }
    }
    builder.go();
}
Also used : RowSetBuilder(org.apache.drill.test.rowSet.RowSetBuilder) BatchSchema(org.apache.drill.exec.record.BatchSchema) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) RowSet(org.apache.drill.test.rowSet.RowSet) JsonFileBuilder(org.apache.drill.test.rowSet.file.JsonFileBuilder) File(java.io.File) TestBuilder(org.apache.drill.test.TestBuilder)

Example 14 with TestBuilder

use of org.apache.drill.test.TestBuilder in project drill by axbaretto.

the class TestFlatten method drill1673.

// repeated list within a repeated map
@Test
@Category(UnlikelyTest.class)
public void drill1673() throws Exception {
    String jsonRecords = BaseTestQuery.getFile("store/json/1673.json");
    int numCopies = 25000;
    new TestConstantFolding.SmallFileCreator(pathDir).setRecord(jsonRecords).createFiles(1, numCopies, "json");
    TestBuilder builder = testBuilder().sqlQuery("select t.fixed_column as fixed_column, " + "flatten(t.list_column) as list_col " + "from dfs.`%s/bigfile/bigfile.json` as t", TEST_DIR).baselineColumns("fixed_column", "list_col").unOrdered();
    Object map1 = mapOf("id1", "1", "name", "zhu", "num", listOf(listOf(1l, 2l, 3l)));
    Object map2 = mapOf("id1", "2", "name", "hao", "num", listOf(listOf(4l, 5l, 6l)));
    for (int i = 0; i < numCopies; i++) {
        builder.baselineValues("abc", map1);
        builder.baselineValues("abc", map2);
    }
    builder.go();
}
Also used : TestBuilder(org.apache.drill.test.TestBuilder) Category(org.junit.experimental.categories.Category) OperatorTest(org.apache.drill.categories.OperatorTest) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 15 with TestBuilder

use of org.apache.drill.test.TestBuilder in project drill by axbaretto.

the class TestMergeJoinWithSchemaChanges method testMissingAndNewColumns.

@Ignore("DRILL-5612")
@Test
public void testMissingAndNewColumns() throws Exception {
    // missing column kl
    BufferedWriter writer = new BufferedWriter(new FileWriter(new File(leftDir, "l1.json")));
    for (int i = 0; i < 50; ++i) {
        writer.write(String.format("{ \"kl1\" : %d , \"vl1\": %d }\n", i, i));
    }
    writer.close();
    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l2.json")));
    for (int i = 50; i < 100; ++i) {
        writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
    }
    writer.close();
    writer = new BufferedWriter(new FileWriter(new File(leftDir, "l3.json")));
    for (int i = 100; i < 150; ++i) {
        writer.write(String.format("{ \"kl2\" : %d , \"vl2\": %d }\n", i, i));
    }
    writer.close();
    // right missing column kr
    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r1.json")));
    for (int i = 0; i < 50; ++i) {
        writer.write(String.format("{ \"kr1\" : %f , \"vr1\": %f }\n", (float) i, (float) i));
    }
    writer.close();
    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r2.json")));
    for (int i = 50; i < 100; ++i) {
        writer.write(String.format("{ \"kr\" : %f , \"vr\": %f }\n", (float) i, (float) i));
    }
    writer.close();
    writer = new BufferedWriter(new FileWriter(new File(rightDir, "r3.json")));
    for (int i = 100; i < 150; ++i) {
        writer.write(String.format("{ \"kr2\" : %f , \"vr2\": %f }\n", (float) i, (float) i));
    }
    writer.close();
    // INNER JOIN
    String query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on L.kl=R.kr", LEFT_DIR, "inner", RIGHT_DIR);
    TestBuilder builder = testBuilder().sqlQuery(query).optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true").unOrdered().baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
    for (long i = 50; i < 100; ++i) {
        builder.baselineValues(i, i, (double) i, (double) i, null, null, null, null, null, null, null, null);
    }
    builder.go();
    // LEFT JOIN
    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on L.kl=R.kr", LEFT_DIR, "left", RIGHT_DIR);
    builder = testBuilder().sqlQuery(query).optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true").unOrdered().baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
    for (long i = 0; i < 50; ++i) {
        builder.baselineValues(null, null, null, null, i, i, null, null, null, null, null, null);
    }
    for (long i = 50; i < 100; ++i) {
        builder.baselineValues(i, i, (double) i, (double) i, null, null, null, null, null, null, null, null);
    }
    for (long i = 100; i < 150; ++i) {
        builder.baselineValues(null, null, null, null, null, null, i, i, null, null, null, null);
    }
    builder.go();
    // RIGHT JOIN
    query = String.format("select * from dfs.`%s` L %s join dfs.`%s` R on L.kl=R.kr", LEFT_DIR, "right", RIGHT_DIR);
    builder = testBuilder().sqlQuery(query).optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true").unOrdered().baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
    for (long i = 0; i < 50; ++i) {
        builder.baselineValues(null, null, null, null, null, null, null, null, (double) i, (double) i, null, null);
    }
    for (long i = 50; i < 100; ++i) {
        builder.baselineValues(i, i, (double) i, (double) i, null, null, null, null, null, null, null, null);
    }
    for (long i = 100; i < 150; ++i) {
        builder.baselineValues(null, null, null, null, null, null, null, null, null, null, (double) i, (double) i);
    }
    builder.go();
}
Also used : FileWriter(java.io.FileWriter) File(java.io.File) TestBuilder(org.apache.drill.test.TestBuilder) BufferedWriter(java.io.BufferedWriter) Ignore(org.junit.Ignore) OperatorTest(org.apache.drill.categories.OperatorTest) Test(org.junit.Test)

Aggregations

TestBuilder (org.apache.drill.test.TestBuilder)94 Test (org.junit.Test)74 File (java.io.File)27 OperatorTest (org.apache.drill.categories.OperatorTest)27 ClusterTest (org.apache.drill.test.ClusterTest)20 BufferedWriter (java.io.BufferedWriter)18 FileWriter (java.io.FileWriter)18 UnlikelyTest (org.apache.drill.categories.UnlikelyTest)16 ParquetTest (org.apache.drill.categories.ParquetTest)10 TestBuilder.mapOfObject (org.apache.drill.test.TestBuilder.mapOfObject)6 Path (java.nio.file.Path)5 JsonFileBuilder (org.apache.drill.test.rowSet.file.JsonFileBuilder)5 Category (org.junit.experimental.categories.Category)5 SlowTest (org.apache.drill.categories.SlowTest)4 SqlTest (org.apache.drill.categories.SqlTest)4 JsonStringHashMap (org.apache.drill.exec.util.JsonStringHashMap)4 IOException (java.io.IOException)3 UserRemoteException (org.apache.drill.common.exceptions.UserRemoteException)3 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)3 RowSetBuilder (org.apache.drill.exec.physical.rowSet.RowSetBuilder)3