Search in sources :

Example 11 with ConnectorSplitSource

use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.

the class TestRaptorSplitManager method testNoHostForShard.

@Test(expectedExceptions = TrinoException.class, expectedExceptionsMessageRegExp = "No host for shard .* found: \\[\\]")
public void testNoHostForShard() {
    deleteShardNodes();
    ConnectorSplitSource splitSource = getSplits(raptorSplitManager, tableHandle);
    getSplits(splitSource, 1000);
}
Also used : ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) Test(org.testng.annotations.Test)

Example 12 with ConnectorSplitSource

use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.

the class AbstractTestHive method testPartitionSchemaNonCanonical.

// TODO coercion of non-canonical values should be supported
@Test(enabled = false)
public void testPartitionSchemaNonCanonical() throws Exception {
    try (Transaction transaction = newTransaction()) {
        ConnectorSession session = newSession();
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorTableHandle table = getTableHandle(metadata, tablePartitionSchemaChangeNonCanonical);
        ColumnHandle column = metadata.getColumnHandles(session, table).get("t_boolean");
        Constraint constraint = new Constraint(TupleDomain.fromFixedValues(ImmutableMap.of(column, NullableValue.of(BOOLEAN, false))));
        table = applyFilter(metadata, table, constraint);
        HivePartition partition = getOnlyElement(((HiveTableHandle) table).getPartitions().orElseThrow(AssertionError::new));
        assertEquals(getPartitionId(partition), "t_boolean=0");
        ConnectorSplitSource splitSource = getSplits(splitManager, transaction, session, table);
        ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
        ImmutableList<ColumnHandle> columnHandles = ImmutableList.of(column);
        try (ConnectorPageSource ignored = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, table, columnHandles, DynamicFilter.EMPTY)) {
            fail("expected exception");
        } catch (TrinoException e) {
            assertEquals(e.getErrorCode(), HIVE_INVALID_PARTITION_VALUE.toErrorCode());
        }
    }
}
Also used : HiveColumnHandle.bucketColumnHandle(io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle) ColumnHandle(io.trino.spi.connector.ColumnHandle) Constraint(io.trino.spi.connector.Constraint) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) TrinoException(io.trino.spi.TrinoException) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) Test(org.testng.annotations.Test)

Example 13 with ConnectorSplitSource

use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.

the class SplitManager method getSplits.

public SplitSource getSplits(Session session, TableHandle table, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter, Constraint constraint) {
    CatalogName catalogName = table.getCatalogName();
    ConnectorSplitManager splitManager = getConnectorSplitManager(catalogName);
    if (!isAllowPushdownIntoConnectors(session)) {
        dynamicFilter = DynamicFilter.EMPTY;
    }
    ConnectorSession connectorSession = session.toConnectorSession(catalogName);
    ConnectorSplitSource source = splitManager.getSplits(table.getTransaction(), connectorSession, table.getConnectorHandle(), splitSchedulingStrategy, dynamicFilter, constraint);
    SplitSource splitSource = new ConnectorAwareSplitSource(catalogName, source);
    if (minScheduleSplitBatchSize > 1) {
        splitSource = new BufferingSplitSource(splitSource, minScheduleSplitBatchSize);
    }
    return splitSource;
}
Also used : ConnectorSplitManager(io.trino.spi.connector.ConnectorSplitManager) CatalogName(io.trino.connector.CatalogName) ConnectorSession(io.trino.spi.connector.ConnectorSession) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource)

Example 14 with ConnectorSplitSource

use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.

the class AbstractTestHiveFileSystem method readTable.

protected MaterializedResult readTable(SchemaTableName tableName) throws IOException {
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();
        ConnectorTableHandle table = getTableHandle(metadata, tableName);
        List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, table).values());
        metadata.beginQuery(session);
        ConnectorSplitSource splitSource = getSplits(splitManager, transaction, session, table);
        List<Type> allTypes = getTypes(columnHandles);
        List<Type> dataTypes = getTypes(columnHandles.stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toImmutableList()));
        MaterializedResult.Builder result = MaterializedResult.resultBuilder(session, dataTypes);
        List<ConnectorSplit> splits = getAllSplits(splitSource);
        for (ConnectorSplit split : splits) {
            try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, table, columnHandles, DynamicFilter.EMPTY)) {
                MaterializedResult pageSourceResult = materializeSourceDataStream(session, pageSource, allTypes);
                for (MaterializedRow row : pageSourceResult.getMaterializedRows()) {
                    Object[] dataValues = IntStream.range(0, row.getFieldCount()).filter(channel -> !((HiveColumnHandle) columnHandles.get(channel)).isHidden()).mapToObj(row::getField).toArray();
                    result.row(dataValues);
                }
            }
        }
        metadata.cleanupQuery(session);
        return result.build();
    }
}
Also used : ColumnHandle(io.trino.spi.connector.ColumnHandle) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Type(io.trino.spi.type.Type) Transaction(io.trino.plugin.hive.AbstractTestHive.Transaction) HiveTransaction(io.trino.plugin.hive.AbstractTestHive.HiveTransaction) ConnectorSession(io.trino.spi.connector.ConnectorSession) ConnectorMetadata(io.trino.spi.connector.ConnectorMetadata) MaterializedResult(io.trino.testing.MaterializedResult) ConnectorSplit(io.trino.spi.connector.ConnectorSplit) MaterializedRow(io.trino.testing.MaterializedRow)

Example 15 with ConnectorSplitSource

use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.

the class TestJdbcRecordSetProvider method getCursor.

private RecordCursor getCursor(JdbcTableHandle jdbcTableHandle, List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> domain) {
    jdbcTableHandle = new JdbcTableHandle(jdbcTableHandle.getRelationHandle(), domain, ImmutableList.of(), Optional.empty(), OptionalLong.empty(), Optional.empty(), jdbcTableHandle.getOtherReferencedTables(), jdbcTableHandle.getNextSyntheticColumnId());
    ConnectorSplitSource splits = jdbcClient.getSplits(SESSION, jdbcTableHandle);
    JdbcSplit split = (JdbcSplit) getOnlyElement(getFutureValue(splits.getNextBatch(NOT_PARTITIONED, 1000)).getSplits());
    ConnectorTransactionHandle transaction = new JdbcTransactionHandle();
    JdbcRecordSetProvider recordSetProvider = new JdbcRecordSetProvider(jdbcClient, executor);
    RecordSet recordSet = recordSetProvider.getRecordSet(transaction, SESSION, split, jdbcTableHandle, columns);
    return recordSet.cursor();
}
Also used : ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) RecordSet(io.trino.spi.connector.RecordSet)

Aggregations

ConnectorSplitSource (io.trino.spi.connector.ConnectorSplitSource)26 Test (org.testng.annotations.Test)14 ConnectorSession (io.trino.spi.connector.ConnectorSession)12 ConnectorSplit (io.trino.spi.connector.ConnectorSplit)12 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)10 ImmutableList (com.google.common.collect.ImmutableList)6 ColumnHandle (io.trino.spi.connector.ColumnHandle)6 ConnectorSplitManager (io.trino.spi.connector.ConnectorSplitManager)6 ConnectorTransactionHandle (io.trino.spi.connector.ConnectorTransactionHandle)6 TrinoException (io.trino.spi.TrinoException)5 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)5 DynamicFilter (io.trino.spi.connector.DynamicFilter)5 FixedSplitSource (io.trino.spi.connector.FixedSplitSource)5 List (java.util.List)5 Objects.requireNonNull (java.util.Objects.requireNonNull)5 Optional (java.util.Optional)5 Inject (javax.inject.Inject)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)4 TestingConnectorSession (io.trino.testing.TestingConnectorSession)4 URI (java.net.URI)4