use of io.trino.plugin.hive.GenericHiveRecordCursorProvider in project trino by trinodb.
the class AbstractFileFormat method createGenericReader.
@Override
public ConnectorPageSource createGenericReader(ConnectorSession session, HdfsEnvironment hdfsEnvironment, File targetFile, List<ColumnHandle> readColumns, List<String> schemaColumnNames, List<Type> schemaColumnTypes) {
HivePageSourceProvider factory = new HivePageSourceProvider(TESTING_TYPE_MANAGER, hdfsEnvironment, new HiveConfig(), getHivePageSourceFactory(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), getHiveRecordCursorProvider(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), new GenericHiveRecordCursorProvider(hdfsEnvironment, new HiveConfig()), Optional.empty());
Properties schema = createSchema(getFormat(), schemaColumnNames, schemaColumnTypes);
HiveSplit split = new HiveSplit("schema_name", "table_name", "", targetFile.getPath(), 0, targetFile.length(), targetFile.length(), targetFile.lastModified(), schema, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), 0, false, TableToPartitionMapping.empty(), Optional.empty(), Optional.empty(), false, Optional.empty(), 0, SplitWeight.standard());
return factory.createPageSource(TestingConnectorTransactionHandle.INSTANCE, session, split, new HiveTableHandle("schema_name", "table_name", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), readColumns, DynamicFilter.EMPTY);
}
Aggregations