Search in sources :

Example 76 with ConnectorSession

use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.

the class TestHiveFileFormats method testPageSourceFactory.

private void testPageSourceFactory(HivePageSourceFactory sourceFactory, FileSplit split, HiveStorageFormat storageFormat, List<TestColumn> testReadColumns, ConnectorSession session, long fileSize, int rowCount) throws IOException {
    Properties splitProperties = new Properties();
    splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat());
    splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerde());
    // Use full columns in split properties
    ImmutableList.Builder<String> splitPropertiesColumnNames = ImmutableList.builder();
    ImmutableList.Builder<String> splitPropertiesColumnTypes = ImmutableList.builder();
    Set<String> baseColumnNames = new HashSet<>();
    for (TestColumn testReadColumn : testReadColumns) {
        String name = testReadColumn.getBaseName();
        if (!baseColumnNames.contains(name) && !testReadColumn.isPartitionKey()) {
            baseColumnNames.add(name);
            splitPropertiesColumnNames.add(name);
            splitPropertiesColumnTypes.add(testReadColumn.getBaseObjectInspector().getTypeName());
        }
    }
    splitProperties.setProperty("columns", splitPropertiesColumnNames.build().stream().collect(Collectors.joining(",")));
    splitProperties.setProperty("columns.types", splitPropertiesColumnTypes.build().stream().collect(Collectors.joining(",")));
    List<HivePartitionKey> partitionKeys = testReadColumns.stream().filter(TestColumn::isPartitionKey).map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())).collect(toList());
    String partitionName = String.join("/", partitionKeys.stream().map(partitionKey -> format("%s=%s", partitionKey.getName(), partitionKey.getValue())).collect(toImmutableList()));
    List<HiveColumnHandle> columnHandles = getColumnHandles(testReadColumns);
    List<HivePageSourceProvider.ColumnMapping> columnMappings = buildColumnMappings(partitionName, partitionKeys, columnHandles, ImmutableList.of(), TableToPartitionMapping.empty(), split.getPath(), OptionalInt.empty(), fileSize, Instant.now().toEpochMilli());
    Optional<ConnectorPageSource> pageSource = HivePageSourceProvider.createHivePageSource(ImmutableSet.of(sourceFactory), ImmutableSet.of(), new Configuration(false), session, split.getPath(), OptionalInt.empty(), split.getStart(), split.getLength(), fileSize, splitProperties, TupleDomain.all(), columnHandles, TESTING_TYPE_MANAGER, Optional.empty(), Optional.empty(), false, Optional.empty(), false, NO_ACID_TRANSACTION, columnMappings);
    assertTrue(pageSource.isPresent());
    checkPageSource(pageSource.get(), testReadColumns, getTypes(columnHandles), rowCount);
}
Also used : OrcFileWriterFactory(io.trino.plugin.hive.orc.OrcFileWriterFactory) ParquetFileWriterFactory(io.trino.plugin.hive.parquet.ParquetFileWriterFactory) Test(org.testng.annotations.Test) NO_ACID_TRANSACTION(io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION) HiveTestUtils.createGenericHiveRecordCursorProvider(io.trino.plugin.hive.HiveTestUtils.createGenericHiveRecordCursorProvider) TrinoExceptionAssert.assertTrinoExceptionThrownBy(io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy) PARQUET(io.trino.plugin.hive.HiveStorageFormat.PARQUET) FileSplit(org.apache.hadoop.mapred.FileSplit) Locale(java.util.Locale) Configuration(org.apache.hadoop.conf.Configuration) StructuralTestUtil.rowBlockOf(io.trino.testing.StructuralTestUtil.rowBlockOf) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) AVRO(io.trino.plugin.hive.HiveStorageFormat.AVRO) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) LzoCodec(io.airlift.compress.lzo.LzoCodec) ImmutableSet(com.google.common.collect.ImmutableSet) TimeZone(java.util.TimeZone) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) BeforeClass(org.testng.annotations.BeforeClass) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Set(java.util.Set) Assert.assertNotNull(org.testng.Assert.assertNotNull) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HDFS_ENVIRONMENT(io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT) String.format(java.lang.String.format) Preconditions.checkState(com.google.common.base.Preconditions.checkState) List(java.util.List) ColumnMapping.buildColumnMappings(io.trino.plugin.hive.HivePageSourceProvider.ColumnMapping.buildColumnMappings) OrcReaderConfig(io.trino.plugin.hive.orc.OrcReaderConfig) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) Optional(java.util.Optional) ParquetReaderConfig(io.trino.plugin.hive.parquet.ParquetReaderConfig) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) RcFilePageSourceFactory(io.trino.plugin.hive.rcfile.RcFilePageSourceFactory) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) DataProvider(org.testng.annotations.DataProvider) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) CSV(io.trino.plugin.hive.HiveStorageFormat.CSV) OptionalInt(java.util.OptionalInt) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) LzopCodec(io.airlift.compress.lzo.LzopCodec) SymlinkTextInputFormat(org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ParquetPageSourceFactory(io.trino.plugin.hive.parquet.ParquetPageSourceFactory) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) ParquetWriterConfig(io.trino.plugin.hive.parquet.ParquetWriterConfig) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) SEQUENCEFILE(io.trino.plugin.hive.HiveStorageFormat.SEQUENCEFILE) OrcReaderOptions(io.trino.orc.OrcReaderOptions) OrcPageSourceFactory(io.trino.plugin.hive.orc.OrcPageSourceFactory) RecordPageSource(io.trino.spi.connector.RecordPageSource) Objects.requireNonNull(java.util.Objects.requireNonNull) TEXTFILE(io.trino.plugin.hive.HiveStorageFormat.TEXTFILE) JSON(io.trino.plugin.hive.HiveStorageFormat.JSON) OrcWriterConfig(io.trino.plugin.hive.orc.OrcWriterConfig) RCBINARY(io.trino.plugin.hive.HiveStorageFormat.RCBINARY) RecordCursor(io.trino.spi.connector.RecordCursor) Properties(java.util.Properties) ORC(io.trino.plugin.hive.HiveStorageFormat.ORC) HiveTestUtils.getTypes(io.trino.plugin.hive.HiveTestUtils.getTypes) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) IOException(java.io.IOException) ConnectorSession(io.trino.spi.connector.ConnectorSession) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) TupleDomain(io.trino.spi.predicate.TupleDomain) UTC(org.joda.time.DateTimeZone.UTC) File(java.io.File) TestingConnectorSession(io.trino.testing.TestingConnectorSession) SESSION(io.trino.plugin.hive.HiveTestUtils.SESSION) HiveTestUtils.getHiveSession(io.trino.plugin.hive.HiveTestUtils.getHiveSession) Collectors.toList(java.util.stream.Collectors.toList) OrcWriterOptions(io.trino.orc.OrcWriterOptions) RCTEXT(io.trino.plugin.hive.HiveStorageFormat.RCTEXT) FILE_INPUT_FORMAT(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) Configuration(org.apache.hadoop.conf.Configuration) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) Properties(java.util.Properties) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) HashSet(java.util.HashSet)

Example 77 with ConnectorSession

use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.

the class TestHiveFileFormats method testOptimizedParquetWriter.

@Test(dataProvider = "rowCount")
public void testOptimizedParquetWriter(int rowCount) throws Exception {
    ConnectorSession session = getHiveSession(new HiveConfig(), new ParquetWriterConfig().setParquetOptimizedWriterEnabled(true));
    assertTrue(HiveSessionProperties.isParquetOptimizedWriterEnabled(session));
    List<TestColumn> testColumns = getTestColumnsSupportedByParquet();
    assertThatFileFormat(PARQUET).withSession(session).withColumns(testColumns).withRowsCount(rowCount).withFileWriterFactory(new ParquetFileWriterFactory(HDFS_ENVIRONMENT, new NodeVersion("test-version"), TESTING_TYPE_MANAGER)).isReadableByPageSource(new ParquetPageSourceFactory(HDFS_ENVIRONMENT, STATS, new ParquetReaderConfig(), new HiveConfig()));
}
Also used : ParquetWriterConfig(io.trino.plugin.hive.parquet.ParquetWriterConfig) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) ParquetFileWriterFactory(io.trino.plugin.hive.parquet.ParquetFileWriterFactory) ParquetPageSourceFactory(io.trino.plugin.hive.parquet.ParquetPageSourceFactory) ParquetReaderConfig(io.trino.plugin.hive.parquet.ParquetReaderConfig) Test(org.testng.annotations.Test)

Example 78 with ConnectorSession

use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.

the class TestBackgroundHiveSplitLoader method backgroundHiveSplitLoader.

private BackgroundHiveSplitLoader backgroundHiveSplitLoader(List<LocatedFileStatus> files, DirectoryLister directoryLister) {
    List<HivePartitionMetadata> hivePartitionMetadatas = ImmutableList.of(new HivePartitionMetadata(new HivePartition(new SchemaTableName("testSchema", "table_name")), Optional.empty(), TableToPartitionMapping.empty()));
    ConnectorSession connectorSession = getHiveSession(new HiveConfig().setMaxSplitSize(DataSize.of(1, GIGABYTE)));
    return new BackgroundHiveSplitLoader(SIMPLE_TABLE, NO_ACID_TRANSACTION, hivePartitionMetadatas, TupleDomain.none(), DynamicFilter.EMPTY, new Duration(0, SECONDS), TESTING_TYPE_MANAGER, Optional.empty(), connectorSession, new TestingHdfsEnvironment(files), new NamenodeStats(), directoryLister, executor, 2, false, false, true, Optional.empty(), Optional.empty());
}
Also used : ConnectorSession(io.trino.spi.connector.ConnectorSession) Duration(io.airlift.units.Duration) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 79 with ConnectorSession

use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.

the class TestOrcPageSourceMemoryTracking method testMaxReadBytes.

@Test(dataProvider = "rowCount")
public void testMaxReadBytes(int rowCount) throws Exception {
    int maxReadBytes = 1_000;
    HiveSessionProperties hiveSessionProperties = new HiveSessionProperties(new HiveConfig(), new OrcReaderConfig().setMaxBlockSize(DataSize.ofBytes(maxReadBytes)), new OrcWriterConfig(), new ParquetReaderConfig(), new ParquetWriterConfig());
    ConnectorSession session = TestingConnectorSession.builder().setPropertyMetadata(hiveSessionProperties.getSessionProperties()).build();
    FileFormatDataSourceStats stats = new FileFormatDataSourceStats();
    // Build a table where every row gets larger, so we can test that the "batchSize" reduces
    int numColumns = 5;
    int step = 250;
    ImmutableList.Builder<TestColumn> columnBuilder = ImmutableList.<TestColumn>builder().add(new TestColumn("p_empty_string", javaStringObjectInspector, () -> "", true));
    GrowingTestColumn[] dataColumns = new GrowingTestColumn[numColumns];
    for (int i = 0; i < numColumns; i++) {
        dataColumns[i] = new GrowingTestColumn("p_string" + "_" + i, javaStringObjectInspector, () -> Long.toHexString(random.nextLong()), false, step * (i + 1));
        columnBuilder.add(dataColumns[i]);
    }
    List<TestColumn> testColumns = columnBuilder.build();
    File tempFile = File.createTempFile("trino_test_orc_page_source_max_read_bytes", "orc");
    tempFile.delete();
    TestPreparer testPreparer = new TestPreparer(tempFile.getAbsolutePath(), testColumns, rowCount, rowCount);
    ConnectorPageSource pageSource = testPreparer.newPageSource(stats, session);
    try {
        int positionCount = 0;
        while (true) {
            Page page = pageSource.getNextPage();
            if (pageSource.isFinished()) {
                break;
            }
            assertNotNull(page);
            page = page.getLoadedPage();
            positionCount += page.getPositionCount();
            // ignore the first MAX_BATCH_SIZE rows given the sizes are set when loading the blocks
            if (positionCount > MAX_BATCH_SIZE) {
                // either the block is bounded by maxReadBytes or we just load one single large block
                // an error margin MAX_BATCH_SIZE / step is needed given the block sizes are increasing
                assertTrue(page.getSizeInBytes() < maxReadBytes * (MAX_BATCH_SIZE / step) || 1 == page.getPositionCount());
            }
        }
        // verify the stats are correctly recorded
        Distribution distribution = stats.getMaxCombinedBytesPerRow().getAllTime();
        assertEquals((int) distribution.getCount(), 1);
        // the block is VariableWidthBlock that contains valueIsNull and offsets arrays as overhead
        assertEquals((int) distribution.getMax(), Arrays.stream(dataColumns).mapToInt(GrowingTestColumn::getMaxSize).sum() + (Integer.BYTES + Byte.BYTES) * numColumns);
        pageSource.close();
    } finally {
        tempFile.delete();
    }
}
Also used : ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) ParquetWriterConfig(io.trino.plugin.hive.parquet.ParquetWriterConfig) OrcWriterConfig(io.trino.plugin.hive.orc.OrcWriterConfig) Page(io.trino.spi.Page) ConnectorPageSource(io.trino.spi.connector.ConnectorPageSource) OrcReaderConfig(io.trino.plugin.hive.orc.OrcReaderConfig) Distribution(io.airlift.stats.Distribution) ConnectorSession(io.trino.spi.connector.ConnectorSession) TestingConnectorSession(io.trino.testing.TestingConnectorSession) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) ParquetReaderConfig(io.trino.plugin.hive.parquet.ParquetReaderConfig) Test(org.testng.annotations.Test)

Example 80 with ConnectorSession

use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.

the class TestPushProjectionIntoTableScan method createMockFactory.

private MockConnectorFactory createMockFactory(Map<String, ColumnHandle> assignments, Optional<MockConnectorFactory.ApplyProjection> applyProjection) {
    List<ColumnMetadata> metadata = assignments.entrySet().stream().map(entry -> new ColumnMetadata(entry.getKey(), ((TpchColumnHandle) entry.getValue()).getType())).collect(toImmutableList());
    MockConnectorFactory.Builder builder = MockConnectorFactory.builder().withListSchemaNames(connectorSession -> ImmutableList.of(TEST_SCHEMA)).withListTables((connectorSession, schema) -> TEST_SCHEMA.equals(schema) ? ImmutableList.of(TEST_SCHEMA_TABLE) : ImmutableList.of()).withGetColumns(schemaTableName -> metadata).withGetTableProperties((session, tableHandle) -> {
        MockConnectorTableHandle mockTableHandle = (MockConnectorTableHandle) tableHandle;
        if (mockTableHandle.getTableName().getTableName().equals(TEST_TABLE)) {
            return new ConnectorTableProperties(TupleDomain.all(), Optional.of(new ConnectorTablePartitioning(PARTITIONING_HANDLE, ImmutableList.of(column("col", VARCHAR)))), Optional.empty(), Optional.empty(), ImmutableList.of());
        }
        return new ConnectorTableProperties();
    });
    if (applyProjection.isPresent()) {
        builder = builder.withApplyProjection(applyProjection.get());
    }
    return builder.build();
}
Also used : Test(org.testng.annotations.Test) CatalogName(io.trino.connector.CatalogName) MockConnectorFactory(io.trino.connector.MockConnectorFactory) LongLiteral(io.trino.sql.tree.LongLiteral) Arrays.asList(java.util.Arrays.asList) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Map(java.util.Map) ProjectionApplicationResult(io.trino.spi.connector.ProjectionApplicationResult) FunctionCall(io.trino.sql.tree.FunctionCall) ENGLISH(java.util.Locale.ENGLISH) PlanNodeStatsEstimate(io.trino.cost.PlanNodeStatsEstimate) PlanMatchPattern.expression(io.trino.sql.planner.assertions.PlanMatchPattern.expression) TpchColumnHandle(io.trino.plugin.tpch.TpchColumnHandle) RowType(io.trino.spi.type.RowType) ImmutableMap(com.google.common.collect.ImmutableMap) Call(io.trino.spi.expression.Call) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Assignments(io.trino.sql.planner.plan.Assignments) ScalarStatsCalculator(io.trino.cost.ScalarStatsCalculator) Collectors(java.util.stream.Collectors) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) TestingSession.testSessionBuilder(io.trino.testing.TestingSession.testSessionBuilder) PlanMatchPattern.anyTree(io.trino.sql.planner.assertions.PlanMatchPattern.anyTree) BIGINT(io.trino.spi.type.BigintType.BIGINT) ConnectorPartitioningHandle(io.trino.spi.connector.ConnectorPartitioningHandle) SymbolReference(io.trino.sql.tree.SymbolReference) Assignment(io.trino.spi.connector.Assignment) Optional(java.util.Optional) Expression(io.trino.sql.tree.Expression) RowType.field(io.trino.spi.type.RowType.field) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) ConnectorExpressionTranslator.translate(io.trino.sql.planner.ConnectorExpressionTranslator.translate) Session(io.trino.Session) PlannerContext(io.trino.sql.PlannerContext) TypeAnalyzer.createTestingTypeAnalyzer(io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) Type(io.trino.spi.type.Type) Variable(io.trino.spi.expression.Variable) Function(java.util.function.Function) SubscriptExpression(io.trino.sql.tree.SubscriptExpression) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) ColumnHandle(io.trino.spi.connector.ColumnHandle) Constant(io.trino.spi.expression.Constant) TypeProvider.viewOf(io.trino.sql.planner.TypeProvider.viewOf) Symbol(io.trino.sql.planner.Symbol) SymbolStatsEstimate(io.trino.cost.SymbolStatsEstimate) StringLiteral(io.trino.sql.tree.StringLiteral) RuleTester.defaultRuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester.defaultRuleTester) FieldDereference(io.trino.spi.expression.FieldDereference) TestingTransactionHandle(io.trino.testing.TestingTransactionHandle) ConnectorSession(io.trino.spi.connector.ConnectorSession) RuleTester(io.trino.sql.planner.iterative.rule.test.RuleTester) TupleDomain(io.trino.spi.predicate.TupleDomain) QualifiedName(io.trino.sql.tree.QualifiedName) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) TableHandle(io.trino.metadata.TableHandle) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties) ConnectorExpression(io.trino.spi.expression.ConnectorExpression) TypeAnalyzer(io.trino.sql.planner.TypeAnalyzer) PlanMatchPattern.project(io.trino.sql.planner.assertions.PlanMatchPattern.project) TransactionId(io.trino.transaction.TransactionId) PlanMatchPattern.tableScan(io.trino.sql.planner.assertions.PlanMatchPattern.tableScan) MockConnectorFactory(io.trino.connector.MockConnectorFactory) ColumnMetadata(io.trino.spi.connector.ColumnMetadata) MockConnectorTableHandle(io.trino.connector.MockConnectorTableHandle) ConnectorTablePartitioning(io.trino.spi.connector.ConnectorTablePartitioning) ConnectorTableProperties(io.trino.spi.connector.ConnectorTableProperties)

Aggregations

ConnectorSession (io.trino.spi.connector.ConnectorSession)252 SchemaTableName (io.trino.spi.connector.SchemaTableName)131 ConnectorMetadata (io.trino.spi.connector.ConnectorMetadata)122 ConnectorTableHandle (io.trino.spi.connector.ConnectorTableHandle)119 List (java.util.List)111 ColumnHandle (io.trino.spi.connector.ColumnHandle)108 Optional (java.util.Optional)107 ImmutableList (com.google.common.collect.ImmutableList)98 Objects.requireNonNull (java.util.Objects.requireNonNull)97 TrinoException (io.trino.spi.TrinoException)94 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)92 Map (java.util.Map)88 TestingConnectorSession (io.trino.testing.TestingConnectorSession)87 ImmutableMap (com.google.common.collect.ImmutableMap)85 TupleDomain (io.trino.spi.predicate.TupleDomain)85 ConnectorTableMetadata (io.trino.spi.connector.ConnectorTableMetadata)83 Test (org.testng.annotations.Test)82 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)80 Constraint (io.trino.spi.connector.Constraint)76 Type (io.trino.spi.type.Type)72