use of org.apache.drill.metastore.iceberg.transform.FilterTransformer in project drill by apache.
the class TestTablesOperationTransformer method testToOverwriteOperation.
@Test
public void testToOverwriteOperation() {
TableMetadataUnit unit = TableMetadataUnit.builder().storagePlugin("dfs").workspace("tmp").tableName("nation").metadataKey("dir0").build();
TableKey tableKey = new TableKey(unit.storagePlugin(), unit.workspace(), unit.tableName());
Map<MetastoreColumn, Object> filterConditions = new HashMap<>(tableKey.toFilterConditions());
filterConditions.put(MetastoreColumn.METADATA_KEY, unit.metadataKey());
String location = tableKey.toLocation(TestTablesOperationTransformer.location);
Expression expression = new FilterTransformer().transform(filterConditions);
Overwrite operation = transformer.toOverwrite(location, expression, Collections.singletonList(unit));
assertEquals(expression.toString(), operation.filter().toString());
Path path = new Path(String.valueOf(operation.dataFile().path()));
File file = new File(path.toUri().getPath());
assertTrue(file.exists());
assertEquals(location, path.getParent().toUri().getPath());
}
use of org.apache.drill.metastore.iceberg.transform.FilterTransformer in project drill by apache.
the class IcebergRead method internalExecute.
@Override
protected List<T> internalExecute() {
String[] selectedColumns = columns.isEmpty() ? defaultColumns : columns.stream().map(MetastoreColumn::columnName).toArray(String[]::new);
FilterTransformer filterTransformer = context.transformer().filter();
Expression rowFilter = filterTransformer.combine(filterTransformer.transform(metadataTypes), filterTransformer.transform(filter));
Iterable<Record> records = IcebergGenerics.read(context.table()).select(selectedColumns).where(rowFilter).build();
return context.transformer().outputData().columns(selectedColumns).records(Lists.newArrayList(records)).execute();
}
Aggregations