use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class TestProtoUtils method testColumn.
@Test
public void testColumn() {
alluxio.grpc.table.FieldSchema.Builder fieldSchema = TestingAlluxioMetastoreObjects.getTestingFieldSchema();
Column column = ProtoUtils.fromProto(fieldSchema.build());
assertTrue(column.getComment().isPresent());
assertEquals(fieldSchema.getComment(), column.getComment().get());
assertEquals(fieldSchema.getName(), column.getName());
assertEquals(HiveType.valueOf(fieldSchema.getType()), column.getType());
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class TestDeltaLakeGlueMetastore method createTable.
private void createTable(SchemaTableName tableName, String tableLocation, Consumer<Table.Builder> tableConfiguration) {
Table.Builder table = Table.builder().setDatabaseName(tableName.getSchemaName()).setTableName(tableName.getTableName()).setOwner(Optional.of(session.getUser())).setTableType(EXTERNAL_TABLE.name()).setDataColumns(List.of(new Column("a_column", HIVE_STRING, Optional.empty())));
table.getStorageBuilder().setStorageFormat(fromHiveStorageFormat(PARQUET)).setLocation(tableLocation);
tableConfiguration.accept(table);
PrincipalPrivileges principalPrivileges = new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of());
metastoreClient.createTable(table.build(), principalPrivileges);
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class TrinoHiveCatalog method createView.
@Override
public void createView(ConnectorSession session, SchemaTableName schemaViewName, ConnectorViewDefinition definition, boolean replace) {
if (isUsingSystemSecurity) {
definition = definition.withoutOwner();
}
Map<String, String> properties = ImmutableMap.<String, String>builder().put(PRESTO_VIEW_FLAG, "true").put(TRINO_CREATED_BY, TRINO_CREATED_BY_VALUE).put(PRESTO_VERSION_NAME, trinoVersion).put(PRESTO_QUERY_ID_NAME, session.getQueryId()).put(TABLE_COMMENT, PRESTO_VIEW_COMMENT).buildOrThrow();
io.trino.plugin.hive.metastore.Table.Builder tableBuilder = io.trino.plugin.hive.metastore.Table.builder().setDatabaseName(schemaViewName.getSchemaName()).setTableName(schemaViewName.getTableName()).setOwner(isUsingSystemSecurity ? Optional.empty() : Optional.of(session.getUser())).setTableType(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW.name()).setDataColumns(ImmutableList.of(new Column("dummy", HIVE_STRING, Optional.empty()))).setPartitionColumns(ImmutableList.of()).setParameters(properties).setViewOriginalText(Optional.of(encodeViewData(definition))).setViewExpandedText(Optional.of(PRESTO_VIEW_EXPANDED_TEXT_MARKER));
tableBuilder.getStorageBuilder().setStorageFormat(VIEW_STORAGE_FORMAT).setLocation("");
io.trino.plugin.hive.metastore.Table table = tableBuilder.build();
PrincipalPrivileges principalPrivileges = isUsingSystemSecurity ? NO_PRIVILEGES : buildInitialPrivilegeSet(session.getUser());
Optional<io.trino.plugin.hive.metastore.Table> existing = metastore.getTable(schemaViewName.getSchemaName(), schemaViewName.getTableName());
if (existing.isPresent()) {
if (!replace || !isPrestoView(existing.get())) {
throw new ViewAlreadyExistsException(schemaViewName);
}
metastore.replaceTable(schemaViewName.getSchemaName(), schemaViewName.getTableName(), table, principalPrivileges);
return;
}
try {
metastore.createTable(table, principalPrivileges);
} catch (TableAlreadyExistsException e) {
throw new ViewAlreadyExistsException(e.getTableName());
}
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class AbstractTestHive method testHideDeltaLakeTables.
@Test
public void testHideDeltaLakeTables() {
ConnectorSession session = newSession();
SchemaTableName tableName = temporaryTable("trino_delta_lake_table");
Table.Builder table = Table.builder().setDatabaseName(tableName.getSchemaName()).setTableName(tableName.getTableName()).setOwner(Optional.of(session.getUser())).setTableType(MANAGED_TABLE.name()).setPartitionColumns(List.of(new Column("a_partition_column", HIVE_INT, Optional.empty()))).setDataColumns(List.of(new Column("a_column", HIVE_STRING, Optional.empty()))).setParameter(SPARK_TABLE_PROVIDER_KEY, DELTA_LAKE_PROVIDER);
table.getStorageBuilder().setStorageFormat(fromHiveStorageFormat(PARQUET)).setLocation(getTableDefaultLocation(metastoreClient.getDatabase(tableName.getSchemaName()).orElseThrow(), new HdfsContext(session.getIdentity()), hdfsEnvironment, tableName.getSchemaName(), tableName.getTableName()).toString());
metastoreClient.createTable(table.build(), NO_PRIVILEGES);
try {
// Verify the table was created as a Delta Lake table
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
metadata.beginQuery(session);
assertThatThrownBy(() -> getTableHandle(metadata, tableName)).hasMessage(format("Cannot query Delta Lake table '%s'", tableName));
}
// Verify the hidden `$properties` and `$partitions` Delta Lake table handle can't be obtained within the hive connector
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
metadata.beginQuery(session);
SchemaTableName propertiesTableName = new SchemaTableName(tableName.getSchemaName(), format("%s$properties", tableName.getTableName()));
assertThat(metadata.getSystemTable(newSession(), propertiesTableName)).isEmpty();
SchemaTableName partitionsTableName = new SchemaTableName(tableName.getSchemaName(), format("%s$partitions", tableName.getTableName()));
assertThat(metadata.getSystemTable(newSession(), partitionsTableName)).isEmpty();
}
// Assert that table is hidden
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
// TODO (https://github.com/trinodb/trino/issues/5426) these assertions should use information_schema instead of metadata directly,
// as information_schema or MetadataManager may apply additional logic
// list all tables
assertThat(metadata.listTables(session, Optional.empty())).doesNotContain(tableName);
// list all tables in a schema
assertThat(metadata.listTables(session, Optional.of(tableName.getSchemaName()))).doesNotContain(tableName);
// list all columns
assertThat(listTableColumns(metadata, session, new SchemaTablePrefix()).keySet()).doesNotContain(tableName);
// list all columns in a schema
assertThat(listTableColumns(metadata, session, new SchemaTablePrefix(tableName.getSchemaName())).keySet()).doesNotContain(tableName);
// list all columns in a table
assertThat(listTableColumns(metadata, session, new SchemaTablePrefix(tableName.getSchemaName(), tableName.getTableName())).keySet()).doesNotContain(tableName);
}
} finally {
// Clean up
metastoreClient.dropTable(tableName.getSchemaName(), tableName.getTableName(), true);
}
}
use of io.trino.plugin.hive.metastore.Column in project trino by trinodb.
the class AbstractTestHive method prepareInvalidBuckets.
private void prepareInvalidBuckets(HiveStorageFormat storageFormat, SchemaTableName tableName) throws Exception {
createEmptyTable(tableName, storageFormat, ImmutableList.of(new Column("id", HIVE_LONG, Optional.empty()), new Column("name", HIVE_STRING, Optional.empty())), ImmutableList.of(), Optional.of(new HiveBucketProperty(ImmutableList.of("id"), BUCKETING_V1, 8, ImmutableList.of())));
MaterializedResult.Builder dataBuilder = MaterializedResult.resultBuilder(SESSION, BIGINT, VARCHAR);
for (long id = 0; id < 100; id++) {
dataBuilder.row(id, String.valueOf(id));
}
insertData(tableName, dataBuilder.build());
try (Transaction transaction = newTransaction()) {
Set<String> files = listAllDataFiles(transaction, tableName.getSchemaName(), tableName.getTableName());
Path bucket2 = files.stream().map(Path::new).filter(path -> path.getName().startsWith("000002_0_")).collect(onlyElement());
Path bucket5 = files.stream().map(Path::new).filter(path -> path.getName().startsWith("000005_0_")).collect(onlyElement());
HdfsContext context = new HdfsContext(newSession());
FileSystem fileSystem = hdfsEnvironment.getFileSystem(context, bucket2);
fileSystem.delete(bucket2, false);
fileSystem.rename(bucket5, bucket2);
}
}
Aggregations