use of org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumnDefn in project drill by apache.
the class ReaderSchemaOrchestrator method projectMetadata.
/**
* Sets {@code PROJECT_METADATA} implicit column value.
*
* @param projectMetadata whether {@code PROJECT_METADATA} should be replaced
* @return {@code true} if {@code PROJECT_METADATA} implicit column
* is present in scan projection and its value is updated
*/
private boolean projectMetadata(boolean projectMetadata) {
ImplicitInternalFileColumns original;
ImplicitInternalFileColumns newColumn;
if (projectMetadata) {
original = ImplicitInternalFileColumns.USE_METADATA;
newColumn = ImplicitInternalFileColumns.PROJECT_METADATA;
} else {
original = ImplicitInternalFileColumns.PROJECT_METADATA;
newColumn = ImplicitInternalFileColumns.USE_METADATA;
}
List<ColumnProjection> outputColumns = scanOrchestrator.scanProj.columns();
for (int i = 0; i < outputColumns.size(); i++) {
ColumnProjection outputColumn = outputColumns.get(i);
if (outputColumn instanceof FileMetadataColumn) {
FileMetadataColumn metadataColumn = (FileMetadataColumn) outputColumn;
if (metadataColumn.defn().defn == original) {
projectMetadata = scanOrchestrator.scanProj.requestedCols().stream().anyMatch(SchemaPath.getSimplePath(metadataColumn.name())::equals);
if (projectMetadata) {
outputColumns.set(i, new FileMetadataColumn(metadataColumn.name(), new FileMetadataColumnDefn(metadataColumn.defn().colName(), newColumn)));
}
return projectMetadata;
}
}
}
return false;
}
use of org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumnDefn in project drill by apache.
the class TestConstantColumnLoader method testFileMetadata.
@Test
public void testFileMetadata() {
FileMetadata fileInfo = new FileMetadata(new Path("hdfs:///w/x/y/z.csv"), new Path("hdfs:///w"));
List<ConstantColumnSpec> defns = new ArrayList<>();
FileMetadataColumnDefn iDefn = new FileMetadataColumnDefn(ScanTestUtils.SUFFIX_COL, ImplicitFileColumns.SUFFIX);
FileMetadataColumn iCol = new FileMetadataColumn(ScanTestUtils.SUFFIX_COL, iDefn, fileInfo, null, 0);
defns.add(iCol);
String partColName = ScanTestUtils.partitionColName(1);
PartitionColumn pCol = new PartitionColumn(partColName, 1, fileInfo, null, 0);
defns.add(pCol);
ResultVectorCacheImpl cache = new ResultVectorCacheImpl(fixture.allocator());
ConstantColumnLoader staticLoader = new ConstantColumnLoader(cache, defns);
// Create a batch
staticLoader.load(2);
// Verify
TupleMetadata expectedSchema = new SchemaBuilder().add(ScanTestUtils.SUFFIX_COL, MinorType.VARCHAR).addNullable(partColName, MinorType.VARCHAR).buildSchema();
SingleRowSet expected = fixture.rowSetBuilder(expectedSchema).addRow("csv", "y").addRow("csv", "y").build();
new RowSetComparison(expected).verifyAndClearAll(fixture.wrap(staticLoader.load(2)));
staticLoader.close();
}
Aggregations