use of io.trino.plugin.base.TypeDeserializer in project trino by trinodb.
the class TestCassandraColumnHandle method setup.
@BeforeClass
public void setup() {
ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new TypeDeserializer(TESTING_TYPE_MANAGER)));
codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(CassandraColumnHandle.class);
}
use of io.trino.plugin.base.TypeDeserializer in project trino by trinodb.
the class TestHiveSplit method testJsonRoundTrip.
@Test
public void testJsonRoundTrip() {
ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new TypeDeserializer(new TestingTypeManager())));
JsonCodec<HiveSplit> codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveSplit.class);
Properties schema = new Properties();
schema.setProperty("foo", "bar");
schema.setProperty("bar", "baz");
ImmutableList<HivePartitionKey> partitionKeys = ImmutableList.of(new HivePartitionKey("a", "apple"), new HivePartitionKey("b", "42"));
ImmutableList<HostAddress> addresses = ImmutableList.of(HostAddress.fromParts("127.0.0.1", 44), HostAddress.fromParts("127.0.0.1", 45));
AcidInfo.Builder acidInfoBuilder = AcidInfo.builder(new Path("file:///data/fullacid"));
acidInfoBuilder.addDeleteDelta(new Path("file:///data/fullacid/delete_delta_0000004_0000004_0000"));
acidInfoBuilder.addDeleteDelta(new Path("file:///data/fullacid/delete_delta_0000007_0000007_0000"));
AcidInfo acidInfo = acidInfoBuilder.build().get();
HiveSplit expected = new HiveSplit("db", "table", "partitionId", "path", 42, 87, 88, Instant.now().toEpochMilli(), schema, partitionKeys, addresses, OptionalInt.empty(), 0, true, TableToPartitionMapping.mapColumnsByIndex(ImmutableMap.of(1, new HiveTypeName("string"))), Optional.of(new HiveSplit.BucketConversion(BUCKETING_V1, 32, 16, ImmutableList.of(createBaseColumn("col", 5, HIVE_LONG, BIGINT, ColumnType.REGULAR, Optional.of("comment"))))), Optional.empty(), false, Optional.of(acidInfo), 555534, // some non-standard value
SplitWeight.fromProportion(2.0));
String json = codec.toJson(expected);
HiveSplit actual = codec.fromJson(json);
assertEquals(actual.getDatabase(), expected.getDatabase());
assertEquals(actual.getTable(), expected.getTable());
assertEquals(actual.getPartitionName(), expected.getPartitionName());
assertEquals(actual.getPath(), expected.getPath());
assertEquals(actual.getStart(), expected.getStart());
assertEquals(actual.getLength(), expected.getLength());
assertEquals(actual.getEstimatedFileSize(), expected.getEstimatedFileSize());
assertEquals(actual.getSchema(), expected.getSchema());
assertEquals(actual.getPartitionKeys(), expected.getPartitionKeys());
assertEquals(actual.getAddresses(), expected.getAddresses());
assertEquals(actual.getTableToPartitionMapping().getPartitionColumnCoercions(), expected.getTableToPartitionMapping().getPartitionColumnCoercions());
assertEquals(actual.getTableToPartitionMapping().getTableToPartitionColumns(), expected.getTableToPartitionMapping().getTableToPartitionColumns());
assertEquals(actual.getBucketConversion(), expected.getBucketConversion());
assertEquals(actual.isForceLocalScheduling(), expected.isForceLocalScheduling());
assertEquals(actual.isS3SelectPushdownEnabled(), expected.isS3SelectPushdownEnabled());
assertEquals(actual.getAcidInfo().get(), expected.getAcidInfo().get());
assertEquals(actual.getSplitNumber(), expected.getSplitNumber());
assertEquals(actual.getSplitWeight(), expected.getSplitWeight());
}
use of io.trino.plugin.base.TypeDeserializer in project trino by trinodb.
the class TestHiveColumnHandle method testRoundTrip.
private void testRoundTrip(HiveColumnHandle expected) {
ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Type.class, new TypeDeserializer(TESTING_TYPE_MANAGER)));
JsonCodec<HiveColumnHandle> codec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveColumnHandle.class);
String json = codec.toJson(expected);
HiveColumnHandle actual = codec.fromJson(json);
assertEquals(actual.getBaseColumnName(), expected.getBaseColumnName());
assertEquals(actual.getBaseHiveColumnIndex(), expected.getBaseHiveColumnIndex());
assertEquals(actual.getBaseType(), expected.getBaseType());
assertEquals(actual.getBaseHiveType(), expected.getBaseHiveType());
assertEquals(actual.getName(), expected.getName());
assertEquals(actual.getType(), expected.getType());
assertEquals(actual.getHiveType(), expected.getHiveType());
assertEquals(actual.getHiveColumnProjectionInfo(), expected.getHiveColumnProjectionInfo());
assertEquals(actual.isPartitionKey(), expected.isPartitionKey());
}
use of io.trino.plugin.base.TypeDeserializer in project trino by trinodb.
the class TestRecordingHiveMetastore method createJsonCodec.
private JsonCodec<HiveMetastoreRecording.Recording> createJsonCodec() {
ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
TypeDeserializer typeDeserializer = new TypeDeserializer(new TestingTypeManager());
objectMapperProvider.setJsonDeserializers(ImmutableMap.of(Block.class, new TestingBlockJsonSerde.Deserializer(new HiveBlockEncodingSerde()), Type.class, typeDeserializer));
objectMapperProvider.setJsonSerializers(ImmutableMap.of(Block.class, new TestingBlockJsonSerde.Serializer(new HiveBlockEncodingSerde())));
JsonCodec<HiveMetastoreRecording.Recording> jsonCodec = new JsonCodecFactory(objectMapperProvider).jsonCodec(HiveMetastoreRecording.Recording.class);
return jsonCodec;
}
Aggregations