Search in sources :

Example 81 with ConnectorMetadata

use of io.trino.spi.connector.ConnectorMetadata in project trino by trinodb.

the class AbstractTestHive method doInsertUnsupportedWriteType.

private void doInsertUnsupportedWriteType(HiveStorageFormat storageFormat, SchemaTableName tableName) throws Exception {
    List<Column> columns = ImmutableList.of(new Column("dummy", HiveType.valueOf("uniontype<smallint,tinyint>"), Optional.empty()));
    List<Column> partitionColumns = ImmutableList.of(new Column("name", HIVE_STRING, Optional.empty()));
    createEmptyTable(tableName, storageFormat, columns, partitionColumns);
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
        metadata.beginInsert(session, tableHandle, ImmutableList.of(), NO_RETRIES);
        fail("expected failure");
    } catch (TrinoException e) {
        assertThat(e).hasMessageMatching("Inserting into Hive table .* with column type uniontype<smallint,tinyint> not supported");
    }
}
Also used : HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle)

Example 82 with ConnectorMetadata

use of io.trino.spi.connector.ConnectorMetadata in project trino by trinodb.

the class AbstractTestHive method doTestBucketedTableValidation.

private void doTestBucketedTableValidation(HiveStorageFormat storageFormat, SchemaTableName tableName) throws Exception {
    prepareInvalidBuckets(storageFormat, tableName);
    // read succeeds when validation is disabled
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession(ImmutableMap.of("validate_bucketing", false));
        metadata.beginQuery(session);
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
        List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values());
        MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.empty(), Optional.of(storageFormat));
        // fewer rows due to deleted file
        assertEquals(result.getRowCount(), 87);
    }
    // read fails due to validation failure
    assertReadFailsWithMessageMatching(storageFormat, tableName, "Hive table is corrupt\\. File '.*/000002_0_.*' is for bucket 2, but contains a row for bucket 5.");
}
Also used : HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) MaterializedResult(io.trino.testing.MaterializedResult) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle)

Example 83 with ConnectorMetadata

use of io.trino.spi.connector.ConnectorMetadata in project trino by trinodb.

the class AbstractTestHive method testNewDirectoryPermissions.

@Test
public void testNewDirectoryPermissions() throws Exception {
    SchemaTableName tableName = temporaryTable("empty_file");
    List<Column> columns = ImmutableList.of(new Column("test", HIVE_STRING, Optional.empty()));
    createEmptyTable(tableName, ORC, columns, ImmutableList.of(), Optional.empty());
    try {
        Transaction transaction = newTransaction();
        ConnectorSession session = newSession();
        ConnectorMetadata metadata = transaction.getMetadata();
        metadata.beginQuery(session);
        Table table = transaction.getMetastore().getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow();
        // create new directory and set directory permission after creation
        HdfsContext context = new HdfsContext(session);
        Path location = new Path(table.getStorage().getLocation());
        Path defaultPath = new Path(location + "/defaultperms");
        createDirectory(context, hdfsEnvironment, defaultPath);
        FileStatus defaultFsStatus = hdfsEnvironment.getFileSystem(context, defaultPath).getFileStatus(defaultPath);
        assertEquals(defaultFsStatus.getPermission().toOctal(), 777);
        // use hdfs config that skips setting directory permissions after creation
        HdfsConfig configWithSkip = new HdfsConfig();
        configWithSkip.setNewDirectoryPermissions(HdfsConfig.SKIP_DIR_PERMISSIONS);
        HdfsEnvironment hdfsEnvironmentWithSkip = new HdfsEnvironment(createTestHdfsConfiguration(), configWithSkip, new NoHdfsAuthentication());
        Path skipPath = new Path(location + "/skipperms");
        createDirectory(context, hdfsEnvironmentWithSkip, skipPath);
        FileStatus skipFsStatus = hdfsEnvironmentWithSkip.getFileSystem(context, skipPath).getFileStatus(skipPath);
        assertEquals(skipFsStatus.getPermission().toOctal(), 755);
    } finally {
        dropTable(tableName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) Table(io.trino.plugin.hive.metastore.Table) FileStatus(org.apache.hadoop.fs.FileStatus) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) Test(org.testng.annotations.Test)

Example 84 with ConnectorMetadata

use of io.trino.spi.connector.ConnectorMetadata in project trino by trinodb.

the class AbstractTestHive method testGetTableSchemaOfflinePartition.

@Test
public void testGetTableSchemaOfflinePartition() {
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tableOfflinePartition);
        ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(newSession(), tableHandle);
        Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
        assertPrimitiveField(map, "t_string", createUnboundedVarcharType(), false);
    }
}
Also used : ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Test(org.testng.annotations.Test)

Example 85 with ConnectorMetadata

use of io.trino.spi.connector.ConnectorMetadata in project trino by trinodb.

the class AbstractTestHive method testGetTableSchemaPartitionFormat.

@Test
public void testGetTableSchemaPartitionFormat() {
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(newSession(), getTableHandle(metadata, tablePartitionFormat));
        Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
        assertPrimitiveField(map, "t_string", createUnboundedVarcharType(), false);
        assertPrimitiveField(map, "t_tinyint", TINYINT, false);
        assertPrimitiveField(map, "t_smallint", SMALLINT, false);
        assertPrimitiveField(map, "t_int", INTEGER, false);
        assertPrimitiveField(map, "t_bigint", BIGINT, false);
        assertPrimitiveField(map, "t_float", REAL, false);
        assertPrimitiveField(map, "t_double", DOUBLE, false);
        assertPrimitiveField(map, "t_boolean", BOOLEAN, false);
        assertPrimitiveField(map, "ds", createUnboundedVarcharType(), true);
        assertPrimitiveField(map, "file_format", createUnboundedVarcharType(), true);
        assertPrimitiveField(map, "dummy", INTEGER, true);
    }
}
Also used : ColumnMetadata(io.trino.spi.connector.ColumnMetadata) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata) Test(org.testng.annotations.Test)

Aggregations

ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)190 CatalogName (io.trino.connector.CatalogName)101 ConnectorSession (io.trino.spi.connector.ConnectorSession)97 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)70 Test (org.testng.annotations.Test)63 TestingConnectorSession (io.trino.testing.TestingConnectorSession)52 SchemaTableName (io.trino.spi.connector.SchemaTableName)48 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)43 ColumnHandle (io.trino.spi.connector.ColumnHandle)40 ConnectorOutputTableHandle (io.trino.spi.connector.ConnectorOutputTableHandle)32 ConnectorTableMetadata (io.trino.spi.connector.ConnectorTableMetadata)32 ConnectorInsertTableHandle (io.trino.spi.connector.ConnectorInsertTableHandle)31 Constraint (io.trino.spi.connector.Constraint)31 MaterializedResult (io.trino.testing.MaterializedResult)27 HiveColumnHandle.bucketColumnHandle (io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle)26 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)26 TrinoException (io.trino.spi.TrinoException)23 Slice (io.airlift.slice.Slice)22 ConnectorTransactionHandle (io.trino.spi.connector.ConnectorTransactionHandle)21 SchemaTablePrefix (io.trino.spi.connector.SchemaTablePrefix)20