Search in sources :

Example 21 with Column

use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.

the class AbstractTestHive method assertEmptyFile.

private void assertEmptyFile(HiveStorageFormat format) throws Exception {
    SchemaTableName tableName = temporaryTable("empty_file");
    try {
        List<Column> columns = ImmutableList.of(new Column("test", HIVE_STRING, Optional.empty()));
        createEmptyTable(tableName, format, columns, ImmutableList.of());
        try (Transaction transaction = newTransaction()) {
            ConnectorSession session = newSession();
            ConnectorMetadata metadata = transaction.getMetadata();
            metadata.beginQuery(session);
            ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
            List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values());
            Table table = transaction.getMetastore().getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow(AssertionError::new);
            // verify directory is empty
            HdfsContext context = new HdfsContext(session);
            Path location = new Path(table.getStorage().getLocation());
            assertTrue(listDirectory(context, location).isEmpty());
            // read table with empty directory
            readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.of(0), Optional.of(ORC));
            // create empty file
            FileSystem fileSystem = hdfsEnvironment.getFileSystem(context, location);
            assertTrue(fileSystem.createNewFile(new Path(location, "empty-file")));
            assertEquals(listDirectory(context, location), ImmutableList.of("empty-file"));
            // read table with empty file
            MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.of(0), Optional.empty());
            assertEquals(result.getRowCount(), 0);
        }
    } finally {
        dropTable(tableName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) Table(io.trino.plugin.hive.metastore.Table) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) FileSystem(org.apache.hadoop.fs.FileSystem) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) MaterializedResult(io.trino.testing.MaterializedResult)

Example 22 with Column

use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.

the class AbstractTestHive method doInsertUnsupportedWriteType.

private void doInsertUnsupportedWriteType(HiveStorageFormat storageFormat, SchemaTableName tableName) throws Exception {
    List<Column> columns = ImmutableList.of(new Column("dummy", HiveType.valueOf("uniontype<smallint,tinyint>"), Optional.empty()));
    List<Column> partitionColumns = ImmutableList.of(new Column("name", HIVE_STRING, Optional.empty()));
    createEmptyTable(tableName, storageFormat, columns, partitionColumns);
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();
        ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
        metadata.beginInsert(session, tableHandle, ImmutableList.of(), NO_RETRIES);
        fail("expected failure");
    } catch (TrinoException e) {
        assertThat(e).hasMessageMatching("Inserting into Hive table .* with column type uniontype<smallint,tinyint> not supported");
    }
}
Also used : HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle)

Example 23 with Column

use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.

the class AbstractTestHive method testNewDirectoryPermissions.

@Test
public void testNewDirectoryPermissions() throws Exception {
    SchemaTableName tableName = temporaryTable("empty_file");
    List<Column> columns = ImmutableList.of(new Column("test", HIVE_STRING, Optional.empty()));
    createEmptyTable(tableName, ORC, columns, ImmutableList.of(), Optional.empty());
    try {
        Transaction transaction = newTransaction();
        ConnectorSession session = newSession();
        ConnectorMetadata metadata = transaction.getMetadata();
        metadata.beginQuery(session);
        Table table = transaction.getMetastore().getTable(tableName.getSchemaName(), tableName.getTableName()).orElseThrow();
        // create new directory and set directory permission after creation
        HdfsContext context = new HdfsContext(session);
        Path location = new Path(table.getStorage().getLocation());
        Path defaultPath = new Path(location + "/defaultperms");
        createDirectory(context, hdfsEnvironment, defaultPath);
        FileStatus defaultFsStatus = hdfsEnvironment.getFileSystem(context, defaultPath).getFileStatus(defaultPath);
        assertEquals(defaultFsStatus.getPermission().toOctal(), 777);
        // use hdfs config that skips setting directory permissions after creation
        HdfsConfig configWithSkip = new HdfsConfig();
        configWithSkip.setNewDirectoryPermissions(HdfsConfig.SKIP_DIR_PERMISSIONS);
        HdfsEnvironment hdfsEnvironmentWithSkip = new HdfsEnvironment(createTestHdfsConfiguration(), configWithSkip, new NoHdfsAuthentication());
        Path skipPath = new Path(location + "/skipperms");
        createDirectory(context, hdfsEnvironmentWithSkip, skipPath);
        FileStatus skipFsStatus = hdfsEnvironmentWithSkip.getFileSystem(context, skipPath).getFileStatus(skipPath);
        assertEquals(skipFsStatus.getPermission().toOctal(), 755);
    } finally {
        dropTable(tableName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveSessionProperties.getTemporaryStagingDirectoryPath(io.trino.plugin.hive.HiveSessionProperties.getTemporaryStagingDirectoryPath) Table(io.trino.plugin.hive.metastore.Table) FileStatus(org.apache.hadoop.fs.FileStatus) CatalogSchemaTableName(io.trino.spi.connector.CatalogSchemaTableName) SchemaTableName(io.trino.spi.connector.SchemaTableName) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn) Column(io.trino.plugin.hive.metastore.Column) ViewColumn(io.trino.spi.connector.ConnectorViewDefinition.ViewColumn) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) HdfsContext(io.trino.plugin.hive.HdfsEnvironment.HdfsContext) Test(org.testng.annotations.Test)

Example 24 with Column

use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.

the class TestBackgroundHiveSplitLoader method table.

private static Table table(String location, List<Column> partitionColumns, Optional<HiveBucketProperty> bucketProperty, Map<String, String> tableParameters, StorageFormat storageFormat) {
    Table.Builder tableBuilder = Table.builder();
    tableBuilder.getStorageBuilder().setStorageFormat(storageFormat).setLocation(location).setSkewed(false).setBucketProperty(bucketProperty);
    return tableBuilder.setDatabaseName("test_dbname").setOwner(Optional.of("testOwner")).setTableName("test_table").setTableType(TableType.MANAGED_TABLE.toString()).setDataColumns(ImmutableList.of(new Column("col1", HIVE_STRING, Optional.empty()))).setParameters(tableParameters).setPartitionColumns(partitionColumns).build();
}
Also used : Table(io.trino.plugin.hive.metastore.Table) Column(io.trino.plugin.hive.metastore.Column) HiveColumnHandle.createBaseColumn(io.trino.plugin.hive.HiveColumnHandle.createBaseColumn)

Example 25 with Column

use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.

the class TestProtoUtils method testTable.

@Test
public void testTable() {
    alluxio.grpc.table.TableInfo.Builder table = TestingAlluxioMetastoreObjects.getTestingTableInfo();
    alluxio.grpc.table.FieldSchema fieldSchema = TestingAlluxioMetastoreObjects.getTestingFieldSchema().build();
    Table t = ProtoUtils.fromProto(table.build());
    Column c = t.getColumn(TestingAlluxioMetastoreObjects.COLUMN_NAME).get();
    assertEquals(table.getDbName(), t.getDatabaseName());
    assertEquals(table.getTableName(), t.getTableName());
    assertEquals(table.getOwner(), t.getOwner().orElse(null));
    assertEquals(table.getType().toString(), t.getTableType());
    assertEquals(0, t.getDataColumns().size());
    assertEquals(1, t.getPartitionColumns().size());
    assertEquals(table.getParametersMap(), t.getParameters());
    assertEquals(Optional.empty(), t.getViewOriginalText());
    assertEquals(Optional.empty(), t.getViewExpandedText());
    assertEquals(fieldSchema.getName(), c.getName());
    assertEquals(fieldSchema.getComment(), c.getComment().get());
    assertEquals(fieldSchema.getType(), c.getType().toString());
    Storage s = t.getStorage();
    alluxio.grpc.table.layout.hive.Storage storage = TestingAlluxioMetastoreObjects.getTestingPartitionInfo().getStorage();
    assertEquals(storage.getSkewed(), s.isSkewed());
    assertEquals(ProtoUtils.fromProto(storage.getStorageFormat()), s.getStorageFormat());
    assertEquals(storage.getLocation(), s.getLocation());
    assertEquals(ProtoUtils.fromProto(table.getParametersMap(), storage.getBucketProperty()), s.getBucketProperty());
    assertEquals(storage.getStorageFormat().getSerdelibParametersMap(), s.getSerdeParameters());
}
Also used : Table(io.trino.plugin.hive.metastore.Table) Storage(io.trino.plugin.hive.metastore.Storage) Column(io.trino.plugin.hive.metastore.Column) SortingColumn(io.trino.plugin.hive.metastore.SortingColumn) Test(org.testng.annotations.Test)

Aggregations

Column (io.trino.plugin.hive.metastore.Column)68 SortingColumn (io.trino.plugin.hive.metastore.SortingColumn)47 Table (io.trino.plugin.hive.metastore.Table)44 TrinoException (io.trino.spi.TrinoException)42 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)39 HiveColumnHandle.createBaseColumn (io.trino.plugin.hive.HiveColumnHandle.createBaseColumn)39 ImmutableList (com.google.common.collect.ImmutableList)37 SchemaTableName (io.trino.spi.connector.SchemaTableName)37 ConnectorSession (io.trino.spi.connector.ConnectorSession)36 List (java.util.List)35 Map (java.util.Map)34 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)33 Optional (java.util.Optional)33 Path (org.apache.hadoop.fs.Path)32 ImmutableMap (com.google.common.collect.ImmutableMap)31 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)30 PrincipalPrivileges (io.trino.plugin.hive.metastore.PrincipalPrivileges)28 ImmutableSet (com.google.common.collect.ImmutableSet)27 TupleDomain (io.trino.spi.predicate.TupleDomain)26 String.format (java.lang.String.format)26