use of io.trino.plugin.hive.authentication.HiveIdentity in project trino by trinodb.
the class AbstractTestHive method setup.
protected final void setup(String host, int port, String databaseName, String timeZone) {
HiveConfig hiveConfig = getHiveConfig().setParquetTimeZone(timeZone).setRcfileTimeZone(timeZone);
Optional<HostAndPort> proxy = Optional.ofNullable(System.getProperty("hive.metastore.thrift.client.socks-proxy")).map(HostAndPort::fromString);
MetastoreLocator metastoreLocator = new TestingMetastoreLocator(proxy, HostAndPort.fromParts(host, port));
hdfsEnvironment = new HdfsEnvironment(createTestHdfsConfiguration(), new HdfsConfig(), new NoHdfsAuthentication());
HiveMetastore metastore = cachingHiveMetastore(new BridgingHiveMetastore(new ThriftHiveMetastore(metastoreLocator, hiveConfig, new MetastoreConfig(), new ThriftMetastoreConfig(), hdfsEnvironment, false), new HiveIdentity(SESSION.getIdentity())), executor, new Duration(1, MINUTES), Optional.of(new Duration(15, SECONDS)), 10000);
setup(databaseName, hiveConfig, metastore, hdfsEnvironment);
}
use of io.trino.plugin.hive.authentication.HiveIdentity in project trino by trinodb.
the class InMemoryThriftMetastore method listAllDataPaths.
private static List<String> listAllDataPaths(HiveIdentity identity, ThriftMetastore metastore, String schemaName, String tableName) {
ImmutableList.Builder<String> locations = builder();
Table table = metastore.getTable(identity, schemaName, tableName).get();
if (table.getSd().getLocation() != null) {
// For unpartitioned table, there should be nothing directly under this directory.
// But including this location in the set makes the directory content assert more
// extensive, which is desirable.
locations.add(table.getSd().getLocation());
}
List<String> partitionColumnNames = table.getPartitionKeys().stream().map(FieldSchema::getName).collect(toImmutableList());
Optional<List<String>> partitionNames = metastore.getPartitionNamesByFilter(identity, schemaName, tableName, partitionColumnNames, TupleDomain.all());
if (partitionNames.isPresent()) {
metastore.getPartitionsByNames(identity, schemaName, tableName, partitionNames.get()).stream().map(partition -> partition.getSd().getLocation()).filter(location -> !location.startsWith(table.getSd().getLocation())).forEach(locations::add);
}
return locations.build();
}
use of io.trino.plugin.hive.authentication.HiveIdentity in project trino by trinodb.
the class ThriftHiveMetastore method updateTableStatistics.
@Override
public void updateTableStatistics(HiveIdentity identity, String databaseName, String tableName, AcidTransaction transaction, Function<PartitionStatistics, PartitionStatistics> update) {
Table originalTable = getTable(identity, databaseName, tableName).orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
PartitionStatistics currentStatistics = getTableStatistics(identity, originalTable);
PartitionStatistics updatedStatistics = update.apply(currentStatistics);
Table modifiedTable = originalTable.deepCopy();
HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics();
modifiedTable.setParameters(updateStatisticsParameters(modifiedTable.getParameters(), basicStatistics));
if (transaction.isAcidTransactionRunning()) {
modifiedTable.setWriteId(transaction.getWriteId());
}
alterTable(identity, databaseName, tableName, modifiedTable);
io.trino.plugin.hive.metastore.Table table = fromMetastoreApiTable(modifiedTable);
OptionalLong rowCount = basicStatistics.getRowCount();
List<ColumnStatisticsObj> metastoreColumnStatistics = updatedStatistics.getColumnStatistics().entrySet().stream().flatMap(entry -> {
Optional<Column> column = table.getColumn(entry.getKey());
if (column.isEmpty() && isAvroTableWithSchemaSet(modifiedTable)) {
// to store statistics for a column it does not know about.
return Stream.of();
}
HiveType type = column.orElseThrow(() -> new IllegalStateException("Column not found: " + entry.getKey())).getType();
return Stream.of(createMetastoreColumnStatistics(entry.getKey(), type, entry.getValue(), rowCount));
}).collect(toImmutableList());
if (!metastoreColumnStatistics.isEmpty()) {
setTableColumnStatistics(identity, databaseName, tableName, metastoreColumnStatistics);
}
Set<String> removedColumnStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet());
removedColumnStatistics.forEach(column -> deleteTableColumnStatistics(identity, databaseName, tableName, column));
}
use of io.trino.plugin.hive.authentication.HiveIdentity in project trino by trinodb.
the class ThriftHiveMetastore method getPartitionStatistics.
@Override
public Map<String, PartitionStatistics> getPartitionStatistics(HiveIdentity identity, Table table, List<Partition> partitions) {
List<String> dataColumns = table.getSd().getCols().stream().map(FieldSchema::getName).collect(toImmutableList());
List<String> partitionColumns = table.getPartitionKeys().stream().map(FieldSchema::getName).collect(toImmutableList());
Map<String, HiveBasicStatistics> partitionBasicStatistics = partitions.stream().collect(toImmutableMap(partition -> makePartName(partitionColumns, partition.getValues()), partition -> getHiveBasicStatistics(partition.getParameters())));
Map<String, OptionalLong> partitionRowCounts = partitionBasicStatistics.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getRowCount()));
Map<String, Map<String, HiveColumnStatistics>> partitionColumnStatistics = getPartitionColumnStatistics(identity, table.getDbName(), table.getTableName(), partitionBasicStatistics.keySet(), dataColumns, partitionRowCounts);
ImmutableMap.Builder<String, PartitionStatistics> result = ImmutableMap.builder();
for (String partitionName : partitionBasicStatistics.keySet()) {
HiveBasicStatistics basicStatistics = partitionBasicStatistics.get(partitionName);
Map<String, HiveColumnStatistics> columnStatistics = partitionColumnStatistics.getOrDefault(partitionName, ImmutableMap.of());
result.put(partitionName, new PartitionStatistics(basicStatistics, columnStatistics));
}
return result.buildOrThrow();
}
use of io.trino.plugin.hive.authentication.HiveIdentity in project trino by trinodb.
the class ThriftHiveMetastore method updatePartitionStatistics.
@Override
public void updatePartitionStatistics(HiveIdentity identity, Table table, String partitionName, Function<PartitionStatistics, PartitionStatistics> update) {
List<Partition> partitions = getPartitionsByNames(identity, table.getDbName(), table.getTableName(), ImmutableList.of(partitionName));
if (partitions.size() != 1) {
throw new TrinoException(HIVE_METASTORE_ERROR, "Metastore returned multiple partitions for name: " + partitionName);
}
Partition originalPartition = getOnlyElement(partitions);
PartitionStatistics currentStatistics = requireNonNull(getPartitionStatistics(identity, table, partitions).get(partitionName), "getPartitionStatistics() did not return statistics for partition");
PartitionStatistics updatedStatistics = update.apply(currentStatistics);
Partition modifiedPartition = originalPartition.deepCopy();
HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics();
modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), basicStatistics));
alterPartitionWithoutStatistics(identity, table.getDbName(), table.getTableName(), modifiedPartition);
Map<String, HiveType> columns = modifiedPartition.getSd().getCols().stream().collect(toImmutableMap(FieldSchema::getName, schema -> HiveType.valueOf(schema.getType())));
setPartitionColumnStatistics(identity, table.getDbName(), table.getTableName(), partitionName, columns, updatedStatistics.getColumnStatistics(), basicStatistics.getRowCount());
Set<String> removedStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet());
removedStatistics.forEach(column -> deletePartitionColumnStatistics(identity, table.getDbName(), table.getTableName(), partitionName, column));
}
Aggregations