use of io.trino.plugin.hive.HiveTransactionHandle in project trino by trinodb.
the class TestDeltaLakeSplitManager method getSplits.
private List<DeltaLakeSplit> getSplits(DeltaLakeSplitManager splitManager, DeltaLakeConfig deltaLakeConfig) throws ExecutionException, InterruptedException {
ConnectorSplitSource splitSource = splitManager.getSplits(// ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle handle, SplitSchedulingStrategy splitSchedulingStrategy
new HiveTransactionHandle(false), testingConnectorSessionWithConfig(deltaLakeConfig), tableHandle, ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING, DynamicFilter.EMPTY, Constraint.alwaysTrue());
ImmutableList.Builder<DeltaLakeSplit> splits = ImmutableList.builder();
while (!splitSource.isFinished()) {
List<ConnectorSplit> nextBatch = splitSource.getNextBatch(NOT_PARTITIONED, 10).get().getSplits();
splits.addAll(nextBatch.stream().map(split -> (DeltaLakeSplit) split).collect(Collectors.toList()));
}
return splits.build();
}
use of io.trino.plugin.hive.HiveTransactionHandle in project trino by trinodb.
the class TestConnectorPushdownRulesWithHive method testPredicatePushdown.
@Test
public void testPredicatePushdown() {
String tableName = "predicate_test";
tester().getQueryRunner().execute(format("CREATE TABLE %s (a, b) AS SELECT 5, 6", tableName));
PushPredicateIntoTableScan pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getPlannerContext(), tester().getTypeAnalyzer());
HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty());
TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(false));
HiveColumnHandle column = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty());
tester().assertThat(pushPredicateIntoTableScan).on(p -> p.filter(PlanBuilder.expression("a = 5"), p.tableScan(table, ImmutableList.of(p.symbol("a", INTEGER)), ImmutableMap.of(p.symbol("a", INTEGER), column)))).matches(filter("a = 5", tableScan(tableHandle -> ((HiveTableHandle) tableHandle).getCompactEffectivePredicate().getDomains().get().equals(ImmutableMap.of(column, Domain.singleValue(INTEGER, 5L))), TupleDomain.all(), ImmutableMap.of("a", column::equals))));
metastore.dropTable(SCHEMA_NAME, tableName, true);
}
use of io.trino.plugin.hive.HiveTransactionHandle in project trino by trinodb.
the class TestConnectorPushdownRulesWithHive method testColumnPruningProjectionPushdown.
@Test
public void testColumnPruningProjectionPushdown() {
String tableName = "column_pruning_projection_test";
tester().getQueryRunner().execute(format("CREATE TABLE %s (a, b) AS SELECT 5, 6", tableName));
PruneTableScanColumns pruneTableScanColumns = new PruneTableScanColumns(tester().getMetadata());
HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty());
TableHandle table = new TableHandle(new CatalogName(HIVE_CATALOG_NAME), hiveTable, new HiveTransactionHandle(false));
HiveColumnHandle columnA = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty());
HiveColumnHandle columnB = createBaseColumn("b", 1, HIVE_INT, INTEGER, REGULAR, Optional.empty());
tester().assertThat(pruneTableScanColumns).on(p -> {
Symbol symbolA = p.symbol("a", INTEGER);
Symbol symbolB = p.symbol("b", INTEGER);
return p.project(Assignments.of(p.symbol("x"), symbolA.toSymbolReference()), p.tableScan(table, ImmutableList.of(symbolA, symbolB), ImmutableMap.of(symbolA, columnA, symbolB, columnB)));
}).matches(strictProject(ImmutableMap.of("expr", expression("COLA")), tableScan(hiveTable.withProjectedColumns(ImmutableSet.of(columnA))::equals, TupleDomain.all(), ImmutableMap.of("COLA", columnA::equals))));
metastore.dropTable(SCHEMA_NAME, tableName, true);
}
use of io.trino.plugin.hive.HiveTransactionHandle in project trino by trinodb.
the class DeltaLakeConnector method beginTransaction.
@Override
public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly, boolean autoCommit) {
checkConnectorSupports(READ_COMMITTED, isolationLevel);
verify(autoCommit, "Catalog only supports writes using autocommit: DeltaLake");
ConnectorTransactionHandle transaction = new HiveTransactionHandle(true);
transactionManager.begin(transaction);
return transaction;
}
use of io.trino.plugin.hive.HiveTransactionHandle in project trino by trinodb.
the class IcebergConnector method beginTransaction.
@Override
public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly, boolean autoCommit) {
checkConnectorSupports(SERIALIZABLE, isolationLevel);
ConnectorTransactionHandle transaction = new HiveTransactionHandle(autoCommit);
transactionManager.begin(transaction);
return transaction;
}
Aggregations