Search in sources :

Example 1 with ResolvedRow

use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.

the class TestImplicitColumnProjection method testPartitionColumnTwoDigits.

/**
 * Test the obscure case that the partition column contains two digits:
 * dir11. Also tests the obscure case that the output only has partition
 * columns.
 */
@Test
public void testPartitionColumnTwoDigits() {
    Path filePath = new Path("hdfs:///x/0/1/2/3/4/5/6/7/8/9/10/d11/z.csv");
    ImplicitColumnManager metadataManager = new ImplicitColumnManager(fixture.getOptionManager(), standardOptions(filePath));
    ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("dir11"), ScanTestUtils.parsers(metadataManager.projectionParser()));
    TupleMetadata tableSchema = new SchemaBuilder().add("a", MinorType.VARCHAR).buildSchema();
    metadataManager.startFile(filePath);
    NullColumnBuilder builder = new NullBuilderBuilder().build();
    ResolvedRow rootTuple = new ResolvedRow(builder);
    new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers(metadataManager));
    List<ResolvedColumn> columns = rootTuple.columns();
    assertEquals(1, columns.size());
    assertEquals("d11", ((MetadataColumn) columns.get(0)).value());
}
Also used : Path(org.apache.hadoop.fs.Path) ImplicitColumnManager(org.apache.drill.exec.physical.impl.scan.file.ImplicitColumnManager) NullColumnBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder) ResolvedRow(org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow) ScanLevelProjection(org.apache.drill.exec.physical.impl.scan.project.ScanLevelProjection) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) NullBuilderBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder) ExplicitSchemaProjection(org.apache.drill.exec.physical.impl.scan.project.ExplicitSchemaProjection) ResolvedColumn(org.apache.drill.exec.physical.impl.scan.project.ResolvedColumn) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 2 with ResolvedRow

use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.

the class TestImplicitColumnProjection method testProjectList.

/**
 * Test the file projection planner with metadata.
 */
@Test
public void testProjectList() {
    Path filePath = new Path("hdfs:///w/x/y/z.csv");
    ImplicitColumnManager metadataManager = new ImplicitColumnManager(fixture.getOptionManager(), standardOptions(filePath));
    ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList(ScanTestUtils.FILE_NAME_COL, "a", ScanTestUtils.partitionColName(0)), ScanTestUtils.parsers(metadataManager.projectionParser()));
    assertEquals(3, scanProj.columns().size());
    // Scan-level projection: defines the columns
    {
        assertTrue(scanProj.columns().get(0) instanceof FileMetadataColumn);
        FileMetadataColumn col0 = (FileMetadataColumn) scanProj.columns().get(0);
        assertTrue(col0 instanceof FileMetadataColumn);
        assertEquals(ScanTestUtils.FILE_NAME_COL, col0.name());
        assertEquals(MinorType.VARCHAR, col0.schema().getType().getMinorType());
        assertEquals(DataMode.REQUIRED, col0.schema().getType().getMode());
        ColumnProjection col1 = scanProj.columns().get(1);
        assertTrue(col1 instanceof UnresolvedColumn);
        assertEquals("a", col1.name());
        assertTrue(scanProj.columns().get(2) instanceof PartitionColumn);
        PartitionColumn col2 = (PartitionColumn) scanProj.columns().get(2);
        assertTrue(col2 instanceof PartitionColumn);
        assertEquals(ScanTestUtils.partitionColName(0), col2.name());
        assertEquals(MinorType.VARCHAR, col2.schema().getType().getMinorType());
        assertEquals(DataMode.OPTIONAL, col2.schema().getType().getMode());
    }
    // Schema-level projection, fills in values.
    TupleMetadata tableSchema = new SchemaBuilder().add("a", MinorType.VARCHAR).buildSchema();
    metadataManager.startFile(filePath);
    NullColumnBuilder builder = new NullBuilderBuilder().build();
    ResolvedRow rootTuple = new ResolvedRow(builder);
    new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers(metadataManager));
    List<ResolvedColumn> columns = rootTuple.columns();
    assertEquals(3, columns.size());
    {
        assertTrue(columns.get(0) instanceof FileMetadataColumn);
        FileMetadataColumn col0 = (FileMetadataColumn) columns.get(0);
        assertTrue(col0 instanceof FileMetadataColumn);
        assertEquals(ScanTestUtils.FILE_NAME_COL, col0.name());
        assertEquals("z.csv", col0.value());
        assertEquals(MinorType.VARCHAR, col0.schema().getType().getMinorType());
        assertEquals(DataMode.REQUIRED, col0.schema().getType().getMode());
        ResolvedColumn col1 = columns.get(1);
        assertEquals("a", col1.name());
        assertTrue(columns.get(2) instanceof PartitionColumn);
        PartitionColumn col2 = (PartitionColumn) columns.get(2);
        assertTrue(col2 instanceof PartitionColumn);
        assertEquals(ScanTestUtils.partitionColName(0), col2.name());
        assertEquals("x", col2.value());
        assertEquals(MinorType.VARCHAR, col2.schema().getType().getMinorType());
        assertEquals(DataMode.OPTIONAL, col2.schema().getType().getMode());
    }
    // Verify that the file metadata columns were picked out
    assertEquals(2, metadataManager.metadataColumns().size());
    assertSame(columns.get(0), metadataManager.metadataColumns().get(0));
    assertSame(columns.get(2), metadataManager.metadataColumns().get(1));
}
Also used : Path(org.apache.hadoop.fs.Path) ImplicitColumnManager(org.apache.drill.exec.physical.impl.scan.file.ImplicitColumnManager) ScanLevelProjection(org.apache.drill.exec.physical.impl.scan.project.ScanLevelProjection) PartitionColumn(org.apache.drill.exec.physical.impl.scan.file.PartitionColumn) NullBuilderBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder) ExplicitSchemaProjection(org.apache.drill.exec.physical.impl.scan.project.ExplicitSchemaProjection) ResolvedColumn(org.apache.drill.exec.physical.impl.scan.project.ResolvedColumn) NullColumnBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder) ResolvedRow(org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow) UnresolvedColumn(org.apache.drill.exec.physical.impl.scan.project.AbstractUnresolvedColumn.UnresolvedColumn) ColumnProjection(org.apache.drill.exec.physical.impl.scan.project.ColumnProjection) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) FileMetadataColumn(org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumn) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 3 with ResolvedRow

use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.

the class TestReaderLevelProjection method testSubset.

/**
 * Test an explicit projection (providing columns) in which the
 * names in the project lists are a different case than the data
 * source, the order of columns differs, and we ask for a
 * subset of data source columns.
 */
@Test
public void testSubset() {
    // Simulate SELECT c, a ...
    final ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("c", "a"), ScanTestUtils.parsers());
    assertEquals(2, scanProj.columns().size());
    // Simulate a data source, with early schema, of (a, b, c)
    final TupleMetadata tableSchema = new SchemaBuilder().add("A", MinorType.VARCHAR).add("B", MinorType.VARCHAR).add("C", MinorType.VARCHAR).buildSchema();
    final NullColumnBuilder builder = new NullBuilderBuilder().build();
    final ResolvedRow rootTuple = new ResolvedRow(builder);
    new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers());
    final List<ResolvedColumn> columns = rootTuple.columns();
    assertEquals(2, columns.size());
    assertEquals("c", columns.get(0).name());
    assertEquals(2, columns.get(0).sourceIndex());
    assertSame(rootTuple, columns.get(0).source());
    assertEquals("a", columns.get(1).name());
    assertEquals(0, columns.get(1).sourceIndex());
    assertSame(rootTuple, columns.get(1).source());
}
Also used : ResolvedRow(org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) NullBuilderBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 4 with ResolvedRow

use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.

the class TestReaderLevelProjection method testSimpleMapProject.

/**
 * Simple map project. This is an internal case in which the
 * query asks for a set of columns inside a map, and the table
 * loader produces exactly that set. No special projection is
 * needed, the map is projected as a whole.
 */
@Test
public void testSimpleMapProject() {
    // Simulate SELECT a.b, a.c ...
    final ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("a.b", "a.c"), ScanTestUtils.parsers());
    assertEquals(1, scanProj.columns().size());
    // Simulate a data source, with early schema, of (a{b, c})
    final TupleMetadata tableSchema = new SchemaBuilder().addMap("a").add("b", MinorType.BIGINT).add("c", MinorType.FLOAT8).resumeSchema().buildSchema();
    final NullColumnBuilder builder = new NullBuilderBuilder().build();
    final ResolvedRow rootTuple = new ResolvedRow(builder);
    new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers());
    final List<ResolvedColumn> columns = rootTuple.columns();
    assertEquals(1, columns.size());
    // Should have resolved a.b to a map column,
    // a.d to a missing nested map, and a.e.f to a missing
    // nested map member
    // a is projected as a vector, not as a structured map
    final ResolvedColumn aCol = columns.get(0);
    assertEquals("a", aCol.name());
    assertTrue(aCol instanceof ResolvedTableColumn);
    assertSame(rootTuple, aCol.source());
    assertEquals(0, aCol.sourceIndex());
}
Also used : ResolvedRow(org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) NullBuilderBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Example 5 with ResolvedRow

use of org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow in project drill by apache.

the class TestReaderLevelProjection method testArrayMismatch.

/**
 * Project of a non-array as an array
 */
@Test
public void testArrayMismatch() {
    // Simulate SELECT a[0] ...
    final ScanLevelProjection scanProj = ScanLevelProjection.build(RowSetTestUtils.projectList("a[0]"), ScanTestUtils.parsers());
    // Simulate a data source, with early schema, of (a)
    // where a is not an array.
    final TupleMetadata tableSchema = new SchemaBuilder().add("a", MinorType.VARCHAR).buildSchema();
    final NullColumnBuilder builder = new NullBuilderBuilder().build();
    final ResolvedRow rootTuple = new ResolvedRow(builder);
    try {
        new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers());
        fail();
    } catch (final UserException e) {
    // Expected
    }
}
Also used : ResolvedRow(org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) UserException(org.apache.drill.common.exceptions.UserException) NullBuilderBuilder(org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

ResolvedRow (org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow)37 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)36 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)36 SubOperatorTest (org.apache.drill.test.SubOperatorTest)36 Test (org.junit.Test)36 NullBuilderBuilder (org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder)24 ImplicitColumnManager (org.apache.drill.exec.physical.impl.scan.file.ImplicitColumnManager)7 Path (org.apache.hadoop.fs.Path)7 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)6 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)6 VectorContainer (org.apache.drill.exec.record.VectorContainer)6 RowSetComparison (org.apache.drill.test.rowSet.RowSetComparison)6 ResultVectorCache (org.apache.drill.exec.physical.resultSet.ResultVectorCache)4 NullResultVectorCacheImpl (org.apache.drill.exec.physical.resultSet.impl.NullResultVectorCacheImpl)4 ExplicitSchemaProjection (org.apache.drill.exec.physical.impl.scan.project.ExplicitSchemaProjection)3 NullColumnBuilder (org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder)3 ResolvedColumn (org.apache.drill.exec.physical.impl.scan.project.ResolvedColumn)3 ScanLevelProjection (org.apache.drill.exec.physical.impl.scan.project.ScanLevelProjection)3 List (java.util.List)2 UserException (org.apache.drill.common.exceptions.UserException)2