use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.
the class ArrowReaderWriterTest method getTestData.
@Override
public RowData[] getTestData() {
RowData row1 = StreamRecordUtils.row((byte) 1, (short) 2, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), GenericRowData.of(1, StringData.fromString("hello"), new GenericArrayData(new StringData[] { StringData.fromString("hello") }), TimestampData.fromEpochMillis(3600000), GenericRowData.of(1, StringData.fromString("hello"))));
BinaryRowData row2 = StreamRecordUtils.binaryrow((byte) 1, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new String[] { null, null, null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, GenericRowData.of(1, StringData.fromString("hello"))), new RowDataSerializer(rowFieldType)));
RowData row3 = StreamRecordUtils.row(null, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new String[] { null, null, null }), GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null));
BinaryRowData row4 = StreamRecordUtils.binaryrow((byte) 1, null, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null), new RowDataSerializer(rowFieldType)));
RowData row5 = StreamRecordUtils.row(new Object[fieldTypes.size()]);
BinaryRowData row6 = StreamRecordUtils.binaryrow(new Object[fieldTypes.size()]);
return new RowData[] { row1, row2, row3, row4, row5, row6 };
}
use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.
the class LogicalTypesTest method testRowType.
@Test
public void testRowType() {
assertThat(new RowType(Arrays.asList(new RowType.RowField("a", new VarCharType(), "Someone's desc."), new RowType.RowField("b`", new TimestampType())))).satisfies(baseAssertions("ROW<`a` VARCHAR(1) 'Someone''s desc.', `b``` TIMESTAMP(6)>", "ROW<`a` VARCHAR(1) '...', `b``` TIMESTAMP(6)>", new Class[] { Row.class }, new Class[] { Row.class }, new LogicalType[] { new VarCharType(), new TimestampType() }, new RowType(Arrays.asList(new RowType.RowField("a", new VarCharType(), "Different desc."), new RowType.RowField("b`", new TimestampType())))));
assertThatThrownBy(() -> new RowType(Arrays.asList(new RowType.RowField("b", new VarCharType()), new RowType.RowField("b", new VarCharType()), new RowType.RowField("a", new VarCharType()), new RowType.RowField("a", new TimestampType())))).isInstanceOf(ValidationException.class);
assertThatThrownBy(() -> new RowType(Collections.singletonList(new RowType.RowField("", new VarCharType())))).isInstanceOf(ValidationException.class);
}
use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.
the class LogicalTypesTest method testEmptyStringLiterals.
@Test
public void testEmptyStringLiterals() {
final CharType charType = CharType.ofEmptyLiteral();
final VarCharType varcharType = VarCharType.ofEmptyLiteral();
final BinaryType binaryType = BinaryType.ofEmptyLiteral();
final VarBinaryType varBinaryType = VarBinaryType.ofEmptyLiteral();
// make the types nullable for testing
assertThat(charType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new CharType(1)));
assertThat(varcharType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new VarCharType(1)));
assertThat(binaryType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new BinaryType(1)));
assertThat(varBinaryType.copy(true)).satisfies(nonEqualityCheckWithOtherType(new VarBinaryType(1)));
assertThat(charType).hasSummaryString("CHAR(0) NOT NULL");
assertThat(varcharType).hasSummaryString("VARCHAR(0) NOT NULL");
assertThat(binaryType).hasSummaryString("BINARY(0) NOT NULL");
assertThat(varBinaryType).hasSummaryString("VARBINARY(0) NOT NULL");
assertThat(charType).hasNoSerializableString();
assertThat(varcharType).hasNoSerializableString();
assertThat(binaryType).hasNoSerializableString();
assertThat(varBinaryType).hasNoSerializableString();
}
use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.
the class LogicalTypeJsonSerdeTest method testLogicalTypeSerde.
private static List<LogicalType> testLogicalTypeSerde() {
final List<LogicalType> types = Arrays.asList(new BooleanType(), new TinyIntType(), new SmallIntType(), new IntType(), new BigIntType(), new FloatType(), new DoubleType(), new DecimalType(10), new DecimalType(15, 5), CharType.ofEmptyLiteral(), new CharType(), new CharType(5), VarCharType.ofEmptyLiteral(), new VarCharType(), new VarCharType(5), BinaryType.ofEmptyLiteral(), new BinaryType(), new BinaryType(100), VarBinaryType.ofEmptyLiteral(), new VarBinaryType(), new VarBinaryType(100), new DateType(), new TimeType(), new TimeType(3), new TimestampType(), new TimestampType(3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new TimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType(), new ZonedTimestampType(3), new ZonedTimestampType(false, TimestampKind.ROWTIME, 3), new LocalZonedTimestampType(), new LocalZonedTimestampType(3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3), new DayTimeIntervalType(DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR), new DayTimeIntervalType(false, DayTimeIntervalType.DayTimeResolution.DAY_TO_HOUR, 3, 6), new YearMonthIntervalType(YearMonthIntervalType.YearMonthResolution.YEAR_TO_MONTH), new YearMonthIntervalType(false, YearMonthIntervalType.YearMonthResolution.MONTH, 2), new ZonedTimestampType(), new LocalZonedTimestampType(), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new SymbolType<>(), new ArrayType(new IntType(false)), new ArrayType(new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3)), new ArrayType(new ZonedTimestampType(false, TimestampKind.ROWTIME, 3)), new ArrayType(new TimestampType()), new ArrayType(CharType.ofEmptyLiteral()), new ArrayType(VarCharType.ofEmptyLiteral()), new ArrayType(BinaryType.ofEmptyLiteral()), new ArrayType(VarBinaryType.ofEmptyLiteral()), new MapType(new BigIntType(), new IntType(false)), new MapType(new TimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType()), new MapType(CharType.ofEmptyLiteral(), CharType.ofEmptyLiteral()), new MapType(VarCharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral()), new MapType(BinaryType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral()), new MapType(VarBinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()), new MultisetType(new IntType(false)), new MultisetType(new TimestampType()), new MultisetType(new TimestampType(true, TimestampKind.ROWTIME, 3)), new MultisetType(CharType.ofEmptyLiteral()), new MultisetType(VarCharType.ofEmptyLiteral()), new MultisetType(BinaryType.ofEmptyLiteral()), new MultisetType(VarBinaryType.ofEmptyLiteral()), RowType.of(new BigIntType(), new IntType(false), new VarCharType(200)), RowType.of(new LogicalType[] { new BigIntType(), new IntType(false), new VarCharType(200) }, new String[] { "f1", "f2", "f3" }), RowType.of(new TimestampType(false, TimestampKind.ROWTIME, 3), new TimestampType(false, TimestampKind.REGULAR, 3), new ZonedTimestampType(false, TimestampKind.ROWTIME, 3), new ZonedTimestampType(false, TimestampKind.REGULAR, 3), new LocalZonedTimestampType(false, TimestampKind.ROWTIME, 3), new LocalZonedTimestampType(false, TimestampKind.PROCTIME, 3), new LocalZonedTimestampType(false, TimestampKind.REGULAR, 3)), RowType.of(CharType.ofEmptyLiteral(), VarCharType.ofEmptyLiteral(), BinaryType.ofEmptyLiteral(), VarBinaryType.ofEmptyLiteral()), // registered structured type
StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "structuredType"), PojoClass.class).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", new IntType(true)), new StructuredType.StructuredAttribute("f1", new BigIntType(true)), new StructuredType.StructuredAttribute("f2", new VarCharType(200), "desc"))).comparison(StructuredType.StructuredComparison.FULL).setFinal(false).setInstantiable(false).superType(StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "structuredType2")).attributes(Collections.singletonList(new StructuredType.StructuredAttribute("f0", new BigIntType(false)))).build()).description("description for StructuredType").build(), // unregistered structured type
StructuredType.newBuilder(PojoClass.class).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", new IntType(true)), new StructuredType.StructuredAttribute("f1", new BigIntType(true)), new StructuredType.StructuredAttribute("f2", new VarCharType(200), "desc"))).build(), // registered distinct type
DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", "distinctType"), new VarCharType(5)).build(), DistinctType.newBuilder(ObjectIdentifier.of("cat", "db", "distinctType"), new VarCharType(false, 5)).build(), // custom RawType
new RawType<>(LocalDateTime.class, LocalDateTimeSerializer.INSTANCE), // external RawType
new RawType<>(Row.class, ExternalSerializer.of(DataTypes.ROW(DataTypes.INT(), DataTypes.STRING()))));
final List<LogicalType> mutableTypes = new ArrayList<>(types);
// RawType for MapView
addRawTypesForMapView(mutableTypes, new VarCharType(100), new VarCharType(100));
addRawTypesForMapView(mutableTypes, new VarCharType(100), new BigIntType());
addRawTypesForMapView(mutableTypes, new BigIntType(), new VarCharType(100));
addRawTypesForMapView(mutableTypes, new BigIntType(), new BigIntType());
// RawType for ListView
addRawTypesForListView(mutableTypes, new VarCharType(100));
addRawTypesForListView(mutableTypes, new BigIntType());
// RawType for custom MapView
mutableTypes.add(DataViewUtils.adjustDataViews(MapView.newMapViewDataType(DataTypes.STRING().toInternal(), DataTypes.STRING().bridgedTo(byte[].class)), false).getLogicalType());
final List<LogicalType> allTypes = new ArrayList<>();
// consider nullable
for (LogicalType type : mutableTypes) {
allTypes.add(type.copy(true));
allTypes.add(type.copy(false));
}
// ignore nullable for NullType
allTypes.add(new NullType());
return allTypes;
}
use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.
the class CastRuleProviderTest method testResolvePredefinedToString.
@Test
void testResolvePredefinedToString() {
assertThat(CastRuleProvider.resolve(INT, new VarCharType(10))).isSameAs(CharVarCharTrimPadCastRule.INSTANCE);
assertThat(CastRuleProvider.resolve(INT, new CharType(10))).isSameAs(CharVarCharTrimPadCastRule.INSTANCE);
assertThat(CastRuleProvider.resolve(INT, STRING_TYPE)).isSameAs(NumericToStringCastRule.INSTANCE);
}
Aggregations