use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.
the class AbstractFileFormat method createGenericReader.
@Override
public ConnectorPageSource createGenericReader(ConnectorSession session, HdfsEnvironment hdfsEnvironment, File targetFile, List<ColumnHandle> readColumns, List<String> schemaColumnNames, List<Type> schemaColumnTypes) {
HivePageSourceProvider factory = new HivePageSourceProvider(TESTING_TYPE_MANAGER, hdfsEnvironment, new HiveConfig(), getHivePageSourceFactory(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), getHiveRecordCursorProvider(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), new GenericHiveRecordCursorProvider(hdfsEnvironment, new HiveConfig()), Optional.empty());
Properties schema = createSchema(getFormat(), schemaColumnNames, schemaColumnTypes);
HiveSplit split = new HiveSplit("schema_name", "table_name", "", targetFile.getPath(), 0, targetFile.length(), targetFile.length(), targetFile.lastModified(), schema, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), 0, false, TableToPartitionMapping.empty(), Optional.empty(), Optional.empty(), false, Optional.empty(), 0, SplitWeight.standard());
return factory.createPageSource(TestingConnectorTransactionHandle.INSTANCE, session, split, new HiveTableHandle("schema_name", "table_name", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), readColumns, DynamicFilter.EMPTY);
}
use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.
the class TestCheckpointWriter method setUp.
@BeforeClass
public void setUp() {
checkpointSchemaManager = new CheckpointSchemaManager(typeManager);
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), Set.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
HiveSessionProperties hiveSessionProperties = getHiveSessionProperties(new HiveConfig());
session = TestingConnectorSession.builder().setPropertyMetadata(hiveSessionProperties.getSessionProperties()).build();
}
use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.
the class TestTimestampMicros method testTimestampMicros.
@Test(dataProvider = "testTimestampMicrosDataProvider")
public void testTimestampMicros(HiveTimestampPrecision timestampPrecision, LocalDateTime expected) throws Exception {
ConnectorSession session = getHiveSession(new HiveConfig().setTimestampPrecision(timestampPrecision));
File parquetFile = new File(Resources.getResource("issue-5483.parquet").toURI());
Type columnType = createTimestampType(timestampPrecision.getPrecision());
try (ConnectorPageSource pageSource = createPageSource(session, parquetFile, "created", HIVE_TIMESTAMP, columnType)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, List.of(columnType)).toTestTypes();
assertThat(result.getMaterializedRows()).containsOnly(new MaterializedRow(List.of(expected)));
}
}
use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.
the class TestTimestampMicros method testTimestampMicrosAsTimestampWithTimeZone.
@Test(dataProvider = "testTimestampMicrosDataProvider")
public void testTimestampMicrosAsTimestampWithTimeZone(HiveTimestampPrecision timestampPrecision, LocalDateTime expected) throws Exception {
ConnectorSession session = getHiveSession(new HiveConfig().setTimestampPrecision(timestampPrecision));
File parquetFile = new File(Resources.getResource("issue-5483.parquet").toURI());
Type columnType = createTimestampWithTimeZoneType(timestampPrecision.getPrecision());
try (ConnectorPageSource pageSource = createPageSource(session, parquetFile, "created", HIVE_TIMESTAMP, columnType)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, List.of(columnType)).toTestTypes();
assertThat(result.getMaterializedRows()).containsOnly(new MaterializedRow(List.of(expected.atZone(ZoneId.of("UTC")))));
}
}
use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.
the class TestMetastoreHiveStatisticsProvider method testGetTableStatisticsSampling.
@Test
public void testGetTableStatisticsSampling() {
MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> {
assertEquals(table, TABLE);
assertEquals(hivePartitions.size(), 1);
return ImmutableMap.of();
});
ConnectorSession session = getHiveSession(new HiveConfig().setPartitionStatisticsSampleSize(1));
statisticsProvider.getTableStatistics(session, TABLE, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of(partition("p1=string1/p2=1234"), partition("p1=string1/p2=1235")));
}
Aggregations