Search in sources :

Example 11 with TestingTypeManager

use of io.trino.spi.type.TestingTypeManager in project trino by trinodb.

the class TestAvroSchemaConverter method testUnsupportedUnionType.

@Test
public void testUnsupportedUnionType() {
    assertThatThrownBy(() -> new AvroSchemaConverter(new TestingTypeManager(), IGNORE).convertAvroSchema(SchemaBuilder.record(RECORD_NAME).fields().name("union_col").type().unionOf().nullType().and().floatType().and().longType().endUnion().noDefault().endRecord())).isInstanceOf(UnsupportedOperationException.class).hasMessageStartingWith("Incompatible UNION type:");
    assertThatThrownBy(() -> new AvroSchemaConverter(new TestingTypeManager(), IGNORE).convertAvroSchema(SchemaBuilder.record(RECORD_NAME).fields().name("union_col").type().unionOf().nullType().and().fixed("fixed").size(5).and().stringType().endUnion().noDefault().endRecord())).isInstanceOf(UnsupportedOperationException.class).hasMessageStartingWith("Incompatible UNION type:");
    assertThatThrownBy(() -> new AvroSchemaConverter(new TestingTypeManager(), IGNORE).convertAvroSchema(SchemaBuilder.record(RECORD_NAME).fields().name("union_col").type().unionOf().nullType().and().booleanType().and().intType().endUnion().noDefault().endRecord())).isInstanceOf(UnsupportedOperationException.class).hasMessageStartingWith("Incompatible UNION type:");
}
Also used : TestingTypeManager(io.trino.spi.type.TestingTypeManager) Test(org.testng.annotations.Test)

Example 12 with TestingTypeManager

use of io.trino.spi.type.TestingTypeManager in project trino by trinodb.

the class TestAvroSchemaConverter method testConvertSchema.

@Test
public void testConvertSchema() {
    Schema schema = SchemaBuilder.record(RECORD_NAME).fields().name("bool_col").type().booleanType().noDefault().name("int_col").type().intType().noDefault().name("long_col").type().longType().noDefault().name("float_col").type().floatType().noDefault().name("double_col").type().doubleType().noDefault().name("string_col").type().stringType().noDefault().name("enum_col").type().enumeration("colors").symbols("blue", "red", "yellow").noDefault().name("bytes_col").type().bytesType().noDefault().name("fixed_col").type().fixed("fixed").size(5).noDefault().name("union_col").type().unionOf().nullType().and().floatType().and().doubleType().endUnion().noDefault().name("union_col2").type().unionOf().nullType().and().intType().and().longType().endUnion().noDefault().name("union_col3").type().unionOf().nullType().and().bytesType().and().type("fixed").endUnion().noDefault().name("union_col4").type().unionOf().nullType().and().type("colors").and().stringType().endUnion().noDefault().name("list_col").type().array().items().intType().noDefault().name("map_col").type().map().values().intType().noDefault().name("record_col").type().record("record_col").fields().name("nested_list").type().array().items().map().values().stringType().noDefault().name("nested_map").type().map().values().array().items().stringType().noDefault().endRecord().noDefault().endRecord();
    AvroSchemaConverter avroSchemaConverter = new AvroSchemaConverter(new TestingTypeManager(), IGNORE);
    List<Type> types = avroSchemaConverter.convertAvroSchema(schema);
    List<Type> expected = ImmutableList.<Type>builder().add(BOOLEAN).add(INTEGER).add(BIGINT).add(REAL).add(DOUBLE).add(VARCHAR).add(VARCHAR).add(VARBINARY).add(VARBINARY).add(DOUBLE).add(BIGINT).add(VARBINARY).add(VARCHAR).add(new ArrayType(INTEGER)).add(createType(INTEGER)).add(RowType.from(ImmutableList.<RowType.Field>builder().add(new RowType.Field(Optional.of("nested_list"), new ArrayType(createType(VARCHAR)))).add(new RowType.Field(Optional.of("nested_map"), createType(new ArrayType(VARCHAR)))).build())).build();
    assertEquals(types, expected);
}
Also used : ArrayType(io.trino.spi.type.ArrayType) Type(io.trino.spi.type.Type) RowType(io.trino.spi.type.RowType) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) Schema(org.apache.avro.Schema) RowType(io.trino.spi.type.RowType) TestingTypeManager(io.trino.spi.type.TestingTypeManager) Test(org.testng.annotations.Test)

Example 13 with TestingTypeManager

use of io.trino.spi.type.TestingTypeManager in project trino by trinodb.

the class TestHiveSplit method testJsonRoundTrip.

@Test
public void testJsonRoundTrip() {
    ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
    objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new TypeDeserializer(new TestingTypeManager())));
    JsonCodec<HiveSplit> codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveSplit.class);
    Properties schema = new Properties();
    schema.setProperty("foo", "bar");
    schema.setProperty("bar", "baz");
    ImmutableList<HivePartitionKey> partitionKeys = ImmutableList.of(new HivePartitionKey("a", "apple"), new HivePartitionKey("b", "42"));
    ImmutableList<HostAddress> addresses = ImmutableList.of(HostAddress.fromParts("127.0.0.1", 44), HostAddress.fromParts("127.0.0.1", 45));
    AcidInfo.Builder acidInfoBuilder = AcidInfo.builder(new Path("file:///data/fullacid"));
    acidInfoBuilder.addDeleteDelta(new Path("file:///data/fullacid/delete_delta_0000004_0000004_0000"));
    acidInfoBuilder.addDeleteDelta(new Path("file:///data/fullacid/delete_delta_0000007_0000007_0000"));
    AcidInfo acidInfo = acidInfoBuilder.build().get();
    HiveSplit expected = new HiveSplit("db", "table", "partitionId", "path", 42, 87, 88, Instant.now().toEpochMilli(), schema, partitionKeys, addresses, OptionalInt.empty(), 0, true, TableToPartitionMapping.mapColumnsByIndex(ImmutableMap.of(1, new HiveTypeName("string"))), Optional.of(new HiveSplit.BucketConversion(BUCKETING_V1, 32, 16, ImmutableList.of(createBaseColumn("col", 5, HIVE_LONG, BIGINT, ColumnType.REGULAR, Optional.of("comment"))))), Optional.empty(), false, Optional.of(acidInfo), 555534, // some non-standard value
    SplitWeight.fromProportion(2.0));
    String json = codec.toJson(expected);
    HiveSplit actual = codec.fromJson(json);
    assertEquals(actual.getDatabase(), expected.getDatabase());
    assertEquals(actual.getTable(), expected.getTable());
    assertEquals(actual.getPartitionName(), expected.getPartitionName());
    assertEquals(actual.getPath(), expected.getPath());
    assertEquals(actual.getStart(), expected.getStart());
    assertEquals(actual.getLength(), expected.getLength());
    assertEquals(actual.getEstimatedFileSize(), expected.getEstimatedFileSize());
    assertEquals(actual.getSchema(), expected.getSchema());
    assertEquals(actual.getPartitionKeys(), expected.getPartitionKeys());
    assertEquals(actual.getAddresses(), expected.getAddresses());
    assertEquals(actual.getTableToPartitionMapping().getPartitionColumnCoercions(), expected.getTableToPartitionMapping().getPartitionColumnCoercions());
    assertEquals(actual.getTableToPartitionMapping().getTableToPartitionColumns(), expected.getTableToPartitionMapping().getTableToPartitionColumns());
    assertEquals(actual.getBucketConversion(), expected.getBucketConversion());
    assertEquals(actual.isForceLocalScheduling(), expected.isForceLocalScheduling());
    assertEquals(actual.isS3SelectPushdownEnabled(), expected.isS3SelectPushdownEnabled());
    assertEquals(actual.getAcidInfo().get(), expected.getAcidInfo().get());
    assertEquals(actual.getSplitNumber(), expected.getSplitNumber());
    assertEquals(actual.getSplitWeight(), expected.getSplitWeight());
}
Also used : Path(org.apache.hadoop.fs.Path) Properties(java.util.Properties) HostAddress(io.trino.spi.HostAddress) ObjectMapperProvider(io.airlift.json.ObjectMapperProvider) Type(io.trino.spi.type.Type) ColumnType(io.trino.plugin.hive.HiveColumnHandle.ColumnType) TypeDeserializer(io.trino.plugin.base.TypeDeserializer) JsonCodecFactory(io.airlift.json.JsonCodecFactory) TestingTypeManager(io.trino.spi.type.TestingTypeManager) Test(org.testng.annotations.Test)

Example 14 with TestingTypeManager

use of io.trino.spi.type.TestingTypeManager in project trino by trinodb.

the class TestIcebergOrcMetricsCollection method createQueryRunner.

@Override
protected QueryRunner createQueryRunner() throws Exception {
    Session session = testSessionBuilder().setCatalog("iceberg").setSchema("test_schema").setSystemProperty(TASK_CONCURRENCY, "1").setSystemProperty(TASK_WRITER_COUNT, "1").setSystemProperty(MAX_DRIVERS_PER_TASK, "1").setCatalogSessionProperty("iceberg", "orc_string_statistics_limit", Integer.MAX_VALUE + "B").build();
    DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build();
    File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data").toFile();
    HdfsConfig hdfsConfig = new HdfsConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
    HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
    HiveMetastore metastore = new FileHiveMetastore(new NodeVersion("test_version"), hdfsEnvironment, new MetastoreConfig(), new FileHiveMetastoreConfig().setCatalogDirectory(baseDir.toURI().toString()).setMetastoreUser("test"));
    tableOperationsProvider = new FileMetastoreTableOperationsProvider(new HdfsFileIoProvider(hdfsEnvironment));
    trinoCatalog = new TrinoHiveCatalog(new CatalogName("catalog"), memoizeMetastore(metastore, 1000), hdfsEnvironment, new TestingTypeManager(), tableOperationsProvider, "trino-version", false, false, false);
    queryRunner.installPlugin(new TestingIcebergPlugin(Optional.of(metastore), Optional.empty(), EMPTY_MODULE));
    queryRunner.createCatalog("iceberg", "iceberg");
    queryRunner.installPlugin(new TpchPlugin());
    queryRunner.createCatalog("tpch", "tpch");
    queryRunner.execute("CREATE SCHEMA test_schema");
    return queryRunner;
}
Also used : HdfsConfigurationInitializer(io.trino.plugin.hive.HdfsConfigurationInitializer) DistributedQueryRunner(io.trino.testing.DistributedQueryRunner) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) FileHiveMetastoreConfig(io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) FileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore) TpchPlugin(io.trino.plugin.tpch.TpchPlugin) HdfsConfig(io.trino.plugin.hive.HdfsConfig) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.trino.plugin.hive.HdfsConfiguration) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) NodeVersion(io.trino.plugin.hive.NodeVersion) FileHiveMetastoreConfig(io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig) FileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore) FileMetastoreTableOperationsProvider(io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider) TrinoHiveCatalog(io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog) CatalogName(io.trino.plugin.base.CatalogName) File(java.io.File) TestingTypeManager(io.trino.spi.type.TestingTypeManager) TestingConnectorSession(io.trino.testing.TestingConnectorSession) Session(io.trino.Session)

Example 15 with TestingTypeManager

use of io.trino.spi.type.TestingTypeManager in project trino by trinodb.

the class TestIcebergMergeAppend method createQueryRunner.

@Override
protected QueryRunner createQueryRunner() throws Exception {
    DistributedQueryRunner queryRunner = IcebergQueryRunner.createIcebergQueryRunner();
    HdfsConfig hdfsConfig = new HdfsConfig();
    HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
    HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
    File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data").toFile();
    HiveMetastore metastore = new FileHiveMetastore(new NodeVersion("testversion"), hdfsEnvironment, new MetastoreConfig(), new FileHiveMetastoreConfig().setCatalogDirectory(baseDir.toURI().toString()).setMetastoreUser("test"));
    tableOperationsProvider = new FileMetastoreTableOperationsProvider(new HdfsFileIoProvider(hdfsEnvironment));
    trinoCatalog = new TrinoHiveCatalog(new CatalogName("catalog"), memoizeMetastore(metastore, 1000), hdfsEnvironment, new TestingTypeManager(), tableOperationsProvider, "trino-version", false, false, false);
    return queryRunner;
}
Also used : HdfsConfigurationInitializer(io.trino.plugin.hive.HdfsConfigurationInitializer) DistributedQueryRunner(io.trino.testing.DistributedQueryRunner) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) MetastoreConfig(io.trino.plugin.hive.metastore.MetastoreConfig) FileHiveMetastoreConfig(io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig) HiveMetastore(io.trino.plugin.hive.metastore.HiveMetastore) FileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore) HdfsConfig(io.trino.plugin.hive.HdfsConfig) HiveHdfsConfiguration(io.trino.plugin.hive.HiveHdfsConfiguration) HdfsConfiguration(io.trino.plugin.hive.HdfsConfiguration) NoHdfsAuthentication(io.trino.plugin.hive.authentication.NoHdfsAuthentication) HdfsEnvironment(io.trino.plugin.hive.HdfsEnvironment) NodeVersion(io.trino.plugin.hive.NodeVersion) FileHiveMetastoreConfig(io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig) FileHiveMetastore(io.trino.plugin.hive.metastore.file.FileHiveMetastore) FileMetastoreTableOperationsProvider(io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider) TrinoHiveCatalog(io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog) CatalogName(io.trino.plugin.base.CatalogName) File(java.io.File) TestingTypeManager(io.trino.spi.type.TestingTypeManager)

Aggregations

TestingTypeManager (io.trino.spi.type.TestingTypeManager)21 Type (io.trino.spi.type.Type)14 Test (org.testng.annotations.Test)14 ObjectMapperProvider (io.airlift.json.ObjectMapperProvider)8 ArrayType (io.trino.spi.type.ArrayType)7 MapType (io.trino.spi.type.MapType)6 RowType (io.trino.spi.type.RowType)6 TestingTypeDeserializer (io.trino.spi.type.TestingTypeDeserializer)6 Schema (org.apache.avro.Schema)6 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 SimpleModule (com.fasterxml.jackson.databind.module.SimpleModule)5 Block (io.trino.spi.block.Block)5 CatalogName (io.trino.plugin.base.CatalogName)4 FileMetastoreTableOperationsProvider (io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider)4 TrinoHiveCatalog (io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog)4 TestingBlockEncodingSerde (io.trino.spi.block.TestingBlockEncodingSerde)4 TestingBlockJsonSerde (io.trino.spi.block.TestingBlockJsonSerde)4 JsonCodecFactory (io.airlift.json.JsonCodecFactory)3 HdfsConfig (io.trino.plugin.hive.HdfsConfig)3 HdfsConfiguration (io.trino.plugin.hive.HdfsConfiguration)3