Search in sources :

Example 1 with DummyFileWork

use of org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork in project drill by apache.

the class TestFileScanLifecycle method testWildcard.

@Test
public void testWildcard() {
    FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
    builder.rootDir(MOCK_ROOT_PATH);
    builder.maxPartitionDepth(3);
    builder.fileSplits(Collections.singletonList(new DummyFileWork(MOCK_FILE_PATH)));
    builder.useLegacyWildcardExpansion(true);
    builder.readerFactory(new FileReaderFactory() {

        @Override
        public ManagedReader newReader(FileSchemaNegotiator negotiator) {
            return new MockEarlySchemaReader(negotiator, 1);
        }
    });
    ScanLifecycle scan = buildScan(builder);
    assertSame(ProjectionType.ALL, scan.schemaTracker().projectionType());
    RowBatchReader reader = scan.nextReader();
    assertTrue(reader.open());
    assertTrue(reader.next());
    TupleMetadata expectedSchema = new SchemaBuilder().addAll(SCHEMA).add(FileScanUtils.partitionColName(0), PARTITION_COL_TYPE).add(FileScanUtils.partitionColName(1), PARTITION_COL_TYPE).add(FileScanUtils.partitionColName(2), PARTITION_COL_TYPE).build();
    RowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, "fred", MOCK_DIR0, MOCK_DIR1, null).addRow(20, "wilma", MOCK_DIR0, MOCK_DIR1, null).build();
    RowSetUtilities.verify(expected, fixture.wrap(reader.output()));
    assertFalse(reader.next());
    reader.close();
    scan.close();
}
Also used : RowBatchReader(org.apache.drill.exec.physical.impl.scan.RowBatchReader) DummyFileWork(org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ManagedReader(org.apache.drill.exec.physical.impl.scan.v3.ManagedReader) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) RowSet(org.apache.drill.exec.physical.rowSet.RowSet) ScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle) BaseTestScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle) Test(org.junit.Test) EvfTest(org.apache.drill.categories.EvfTest)

Example 2 with DummyFileWork

use of org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork in project drill by apache.

the class TestFileScanLifecycle method testPartitionColumnTwoDigits.

/**
 * Test the obscure case that the partition column contains two digits:
 * dir11. Also tests the obscure case that the output only has partition
 * columns.
 */
@Test
public void testPartitionColumnTwoDigits() {
    Path filePath = new Path("file:/w/0/1/2/3/4/5/6/7/8/9/10/d11/z.csv");
    FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
    builder.rootDir(MOCK_ROOT_PATH);
    builder.maxPartitionDepth(11);
    builder.projection(RowSetTestUtils.projectList("a", "b", FileScanUtils.partitionColName(11)));
    builder.fileSplits(Collections.singletonList(new DummyFileWork(filePath)));
    builder.useLegacyWildcardExpansion(true);
    builder.readerFactory(new FileReaderFactory() {

        @Override
        public ManagedReader newReader(FileSchemaNegotiator negotiator) {
            return new MockEarlySchemaReader(negotiator, 1);
        }
    });
    ScanLifecycle scan = buildScan(builder);
    RowBatchReader reader = scan.nextReader();
    assertTrue(reader.open());
    assertTrue(reader.next());
    TupleMetadata expectedSchema = new SchemaBuilder().addAll(SCHEMA).add(FileScanUtils.partitionColName(11), PARTITION_COL_TYPE).build();
    RowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, "fred", "d11").addRow(20, "wilma", "d11").build();
    RowSetUtilities.verify(expected, fixture.wrap(reader.output()));
    assertFalse(reader.next());
    reader.close();
    scan.close();
}
Also used : SchemaPath(org.apache.drill.common.expression.SchemaPath) Path(org.apache.hadoop.fs.Path) RowBatchReader(org.apache.drill.exec.physical.impl.scan.RowBatchReader) ManagedReader(org.apache.drill.exec.physical.impl.scan.v3.ManagedReader) RowSet(org.apache.drill.exec.physical.rowSet.RowSet) DummyFileWork(org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle) BaseTestScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle) Test(org.junit.Test) EvfTest(org.apache.drill.categories.EvfTest)

Example 3 with DummyFileWork

use of org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork in project drill by apache.

the class TestFileScanLifecycle method testAllColumns.

@Test
public void testAllColumns() {
    FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
    builder.rootDir(MOCK_ROOT_PATH);
    builder.maxPartitionDepth(3);
    builder.projection(FileScanUtils.projectAllWithMetadata(3));
    builder.fileSplits(Collections.singletonList(new DummyFileWork(MOCK_FILE_PATH)));
    builder.useLegacyWildcardExpansion(true);
    builder.readerFactory(new FileReaderFactory() {

        @Override
        public ManagedReader newReader(FileSchemaNegotiator negotiator) {
            return new MockEarlySchemaReader(negotiator, 1);
        }
    });
    ScanLifecycle scan = buildScan(builder);
    assertSame(ProjectionType.ALL, scan.schemaTracker().projectionType());
    RowBatchReader reader = scan.nextReader();
    assertTrue(reader.open());
    assertTrue(reader.next());
    TupleMetadata expectedSchema = new SchemaBuilder().addAll(SCHEMA).add(FileScanUtils.FULLY_QUALIFIED_NAME_COL, IMPLICIT_COL_TYPE).add(FileScanUtils.FILE_PATH_COL, IMPLICIT_COL_TYPE).add(FileScanUtils.FILE_NAME_COL, IMPLICIT_COL_TYPE).add(FileScanUtils.SUFFIX_COL, IMPLICIT_COL_TYPE).add(FileScanUtils.partitionColName(0), PARTITION_COL_TYPE).add(FileScanUtils.partitionColName(1), PARTITION_COL_TYPE).add(FileScanUtils.partitionColName(2), PARTITION_COL_TYPE).build();
    RowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(10, "fred", MOCK_FILE_FQN, MOCK_FILE_DIR_PATH, MOCK_FILE_NAME, MOCK_SUFFIX, MOCK_DIR0, MOCK_DIR1, null).addRow(20, "wilma", MOCK_FILE_FQN, MOCK_FILE_DIR_PATH, MOCK_FILE_NAME, MOCK_SUFFIX, MOCK_DIR0, MOCK_DIR1, null).build();
    RowSetUtilities.verify(expected, fixture.wrap(reader.output()));
    assertFalse(reader.next());
    reader.close();
    scan.close();
}
Also used : RowBatchReader(org.apache.drill.exec.physical.impl.scan.RowBatchReader) DummyFileWork(org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ManagedReader(org.apache.drill.exec.physical.impl.scan.v3.ManagedReader) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) RowSet(org.apache.drill.exec.physical.rowSet.RowSet) ScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle) BaseTestScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle) Test(org.junit.Test) EvfTest(org.apache.drill.categories.EvfTest)

Example 4 with DummyFileWork

use of org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork in project drill by apache.

the class TestFileScanLifecycle method testSingleCol.

@Test
public void testSingleCol() {
    FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
    builder.projection(RowSetTestUtils.projectList(FileScanUtils.FILE_NAME_COL, SchemaPath.DYNAMIC_STAR));
    builder.rootDir(MOCK_ROOT_PATH);
    builder.fileSplits(Collections.singletonList(new DummyFileWork(MOCK_FILE_PATH)));
    builder.readerFactory(new FileReaderFactory() {

        @Override
        public ManagedReader newReader(FileSchemaNegotiator negotiator) {
            return new MockEarlySchemaReader(negotiator, 1);
        }
    });
    ScanLifecycle scan = buildScan(builder);
    assertSame(ProjectionType.ALL, scan.schemaTracker().projectionType());
    RowBatchReader reader = scan.nextReader();
    assertTrue(reader.open());
    assertTrue(reader.next());
    TupleMetadata expectedSchema = new SchemaBuilder().add(FileScanUtils.FILE_NAME_COL, IMPLICIT_COL_TYPE).addAll(SCHEMA).build();
    RowSet expected = fixture.rowSetBuilder(expectedSchema).addRow(MOCK_FILE_NAME, 10, "fred").addRow(MOCK_FILE_NAME, 20, "wilma").build();
    RowSetUtilities.verify(expected, fixture.wrap(reader.output()));
    assertFalse(reader.next());
    reader.close();
    scan.close();
}
Also used : RowBatchReader(org.apache.drill.exec.physical.impl.scan.RowBatchReader) DummyFileWork(org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ManagedReader(org.apache.drill.exec.physical.impl.scan.v3.ManagedReader) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) RowSet(org.apache.drill.exec.physical.rowSet.RowSet) ScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle) BaseTestScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle) Test(org.junit.Test) EvfTest(org.apache.drill.categories.EvfTest)

Example 5 with DummyFileWork

use of org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork in project drill by apache.

the class TestFileScanLifecycle method testCtorUserError.

/**
 * Verify that errors thrown from file-based readers include the file name
 * in addition to the scan and reader level error contexts.
 */
@Test
public void testCtorUserError() {
    FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
    builder.errorContext(b -> b.addContext("Scan context"));
    builder.rootDir(MOCK_ROOT_PATH);
    builder.maxPartitionDepth(3);
    builder.projection(FileScanUtils.projectAllWithMetadata(3));
    builder.fileSplits(Collections.singletonList(new DummyFileWork(MOCK_FILE_PATH)));
    builder.useLegacyWildcardExpansion(true);
    builder.readerFactory(new FileReaderFactory() {

        @Override
        public ManagedReader newReader(FileSchemaNegotiator negotiator) {
            return new FailingReader(negotiator, "ctor-u");
        }
    });
    ScanLifecycle scan = buildScan(builder);
    RowBatchReader reader = scan.nextReader();
    try {
        reader.open();
        fail();
    } catch (UserException e) {
        String msg = e.getMessage();
        assertTrue(msg.contains("Oops ctor"));
        assertTrue(msg.contains("My custom context"));
        assertTrue(msg.contains("Scan context"));
        assertTrue(msg.contains(MOCK_FILE_NAME));
        assertNull(e.getCause());
    }
    scan.close();
}
Also used : RowBatchReader(org.apache.drill.exec.physical.impl.scan.RowBatchReader) DummyFileWork(org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork) ManagedReader(org.apache.drill.exec.physical.impl.scan.v3.ManagedReader) UserException(org.apache.drill.common.exceptions.UserException) ScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle) BaseTestScanLifecycle(org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle) Test(org.junit.Test) EvfTest(org.apache.drill.categories.EvfTest)

Aggregations

EvfTest (org.apache.drill.categories.EvfTest)6 RowBatchReader (org.apache.drill.exec.physical.impl.scan.RowBatchReader)6 ManagedReader (org.apache.drill.exec.physical.impl.scan.v3.ManagedReader)6 DummyFileWork (org.apache.drill.exec.physical.impl.scan.v3.file.BaseFileScanTest.DummyFileWork)6 BaseTestScanLifecycle (org.apache.drill.exec.physical.impl.scan.v3.lifecycle.BaseTestScanLifecycle)6 ScanLifecycle (org.apache.drill.exec.physical.impl.scan.v3.lifecycle.ScanLifecycle)6 Test (org.junit.Test)6 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)4 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)4 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)4 UserException (org.apache.drill.common.exceptions.UserException)1 SchemaPath (org.apache.drill.common.expression.SchemaPath)1 Path (org.apache.hadoop.fs.Path)1