Search in sources :

Example 1 with HiveConfig

use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.

the class AbstractFileFormat method createGenericReader.

@Override
public ConnectorPageSource createGenericReader(ConnectorSession session, HdfsEnvironment hdfsEnvironment, File targetFile, List<ColumnHandle> readColumns, List<String> schemaColumnNames, List<Type> schemaColumnTypes) {
    HivePageSourceProvider factory = new HivePageSourceProvider(TESTING_TYPE_MANAGER, hdfsEnvironment, new HiveConfig(), getHivePageSourceFactory(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), getHiveRecordCursorProvider(hdfsEnvironment).map(ImmutableSet::of).orElse(ImmutableSet.of()), new GenericHiveRecordCursorProvider(hdfsEnvironment, new HiveConfig()), Optional.empty());
    Properties schema = createSchema(getFormat(), schemaColumnNames, schemaColumnTypes);
    HiveSplit split = new HiveSplit("schema_name", "table_name", "", targetFile.getPath(), 0, targetFile.length(), targetFile.length(), targetFile.lastModified(), schema, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), 0, false, TableToPartitionMapping.empty(), Optional.empty(), Optional.empty(), false, Optional.empty(), 0, SplitWeight.standard());
    return factory.createPageSource(TestingConnectorTransactionHandle.INSTANCE, session, split, new HiveTableHandle("schema_name", "table_name", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), readColumns, DynamicFilter.EMPTY);
}
Also used : HiveSplit(io.trino.plugin.hive.HiveSplit) HiveTableHandle(io.trino.plugin.hive.HiveTableHandle) ImmutableSet(com.google.common.collect.ImmutableSet) GenericHiveRecordCursorProvider(io.trino.plugin.hive.GenericHiveRecordCursorProvider) Properties(java.util.Properties) HivePageSourceProvider(io.trino.plugin.hive.HivePageSourceProvider) HiveConfig(io.trino.plugin.hive.HiveConfig)

Example 2 with HiveConfig

use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.

the class TestCheckpointWriter method setUp.

@BeforeClass
public void setUp() {
    checkpointSchemaManager = new CheckpointSchemaManager(typeManager);
    HdfsConfig hdfsConfig = new HdfsConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), Set.of());
    hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
    HiveSessionProperties hiveSessionProperties = getHiveSessionProperties(new HiveConfig());
    session = TestingConnectorSession.builder().setPropertyMetadata(hiveSessionProperties.getSessionProperties()).build();
}
Also used : HdfsConfigurationInitializer(io.trino.plugin.hive.HdfsConfigurationInitializer) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfig(io.trino.plugin.hive.HdfsConfig) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.trino.plugin.hive.HdfsConfiguration) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HiveSessionProperties(io.trino.plugin.hive.HiveSessionProperties) HiveTestUtils.getHiveSessionProperties(io.trino.plugin.hive.HiveTestUtils.getHiveSessionProperties) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) HiveConfig(io.trino.plugin.hive.HiveConfig) BeforeClass(org.testng.annotations.BeforeClass)

Example 3 with HiveConfig

use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.

the class TestTimestampMicros method testTimestampMicros.

@Test(dataProvider = "testTimestampMicrosDataProvider")
public void testTimestampMicros(HiveTimestampPrecision timestampPrecision, LocalDateTime expected) throws Exception {
    ConnectorSession session = getHiveSession(new HiveConfig().setTimestampPrecision(timestampPrecision));
    File parquetFile = new File(Resources.getResource("issue-5483.parquet").toURI());
    Type columnType = createTimestampType(timestampPrecision.getPrecision());
    try (ConnectorPageSource pageSource = createPageSource(session, parquetFile, "created", HIVE_TIMESTAMP, columnType)) {
        MaterializedResult result = materializeSourceDataStream(session, pageSource, List.of(columnType)).toTestTypes();
        assertThat(result.getMaterializedRows()).containsOnly(new MaterializedRow(List.of(expected)));
    }
}
Also used : Type(io.trino.spi.type.Type) HiveType(io.trino.plugin.hive.HiveType) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) TimestampWithTimeZoneType.createTimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType) ConnectorSession(io.trino.spi.connector.ConnectorSession) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) MaterializedResult(io.trino.testing.MaterializedResult) File(java.io.File) MaterializedRow(io.trino.testing.MaterializedRow) HiveConfig(io.trino.plugin.hive.HiveConfig) Test(org.testng.annotations.Test)

Example 4 with HiveConfig

use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.

the class TestTimestampMicros method testTimestampMicrosAsTimestampWithTimeZone.

@Test(dataProvider = "testTimestampMicrosDataProvider")
public void testTimestampMicrosAsTimestampWithTimeZone(HiveTimestampPrecision timestampPrecision, LocalDateTime expected) throws Exception {
    ConnectorSession session = getHiveSession(new HiveConfig().setTimestampPrecision(timestampPrecision));
    File parquetFile = new File(Resources.getResource("issue-5483.parquet").toURI());
    Type columnType = createTimestampWithTimeZoneType(timestampPrecision.getPrecision());
    try (ConnectorPageSource pageSource = createPageSource(session, parquetFile, "created", HIVE_TIMESTAMP, columnType)) {
        MaterializedResult result = materializeSourceDataStream(session, pageSource, List.of(columnType)).toTestTypes();
        assertThat(result.getMaterializedRows()).containsOnly(new MaterializedRow(List.of(expected.atZone(ZoneId.of("UTC")))));
    }
}
Also used : Type(io.trino.spi.type.Type) HiveType(io.trino.plugin.hive.HiveType) TimestampType.createTimestampType(io.trino.spi.type.TimestampType.createTimestampType) TimestampWithTimeZoneType.createTimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType) ConnectorSession(io.trino.spi.connector.ConnectorSession) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) MaterializedResult(io.trino.testing.MaterializedResult) File(java.io.File) MaterializedRow(io.trino.testing.MaterializedRow) HiveConfig(io.trino.plugin.hive.HiveConfig) Test(org.testng.annotations.Test)

Example 5 with HiveConfig

use of io.trino.plugin.hive.HiveConfig in project trino by trinodb.

the class TestMetastoreHiveStatisticsProvider method testGetTableStatisticsSampling.

@Test
public void testGetTableStatisticsSampling() {
    MetastoreHiveStatisticsProvider statisticsProvider = new MetastoreHiveStatisticsProvider((session, table, hivePartitions) -> {
        assertEquals(table, TABLE);
        assertEquals(hivePartitions.size(), 1);
        return ImmutableMap.of();
    });
    ConnectorSession session = getHiveSession(new HiveConfig().setPartitionStatisticsSampleSize(1));
    statisticsProvider.getTableStatistics(session, TABLE, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of(partition("p1=string1/p2=1234"), partition("p1=string1/p2=1235")));
}
Also used : ConnectorSession(io.trino.spi.connector.ConnectorSession) HiveConfig(io.trino.plugin.hive.HiveConfig) Test(org.testng.annotations.Test)

Aggregations

HiveConfig (io.trino.plugin.hive.HiveConfig)9 ConnectorSession (io.trino.spi.connector.ConnectorSession)5 Test (org.testng.annotations.Test)5 ConnectorPageSource (io.trino.spi.connector.ConnectorPageSource)3 HdfsConfig (io.trino.plugin.hive.HdfsConfig)2 HiveSessionProperties (io.trino.plugin.hive.HiveSessionProperties)2 HiveType (io.trino.plugin.hive.HiveType)2 OrcReaderConfig (io.trino.plugin.hive.orc.OrcReaderConfig)2 TimestampType.createTimestampType (io.trino.spi.type.TimestampType.createTimestampType)2 TimestampWithTimeZoneType.createTimestampWithTimeZoneType (io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType)2 Type (io.trino.spi.type.Type)2 MaterializedResult (io.trino.testing.MaterializedResult)2 MaterializedRow (io.trino.testing.MaterializedRow)2 File (java.io.File)2 JobConf (org.apache.hadoop.mapred.JobConf)2 BeforeClass (org.testng.annotations.BeforeClass)2 AbstractIterator (com.google.common.collect.AbstractIterator)1 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableSet (com.google.common.collect.ImmutableSet)1 GenericHiveRecordCursorProvider (io.trino.plugin.hive.GenericHiveRecordCursorProvider)1