use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class DataTypeExtractorTest method getPojoWithCustomOrderDataType.
/**
* Testing data type shared with the Scala tests.
*/
public static DataType getPojoWithCustomOrderDataType(Class<?> pojoClass) {
final StructuredType.Builder builder = StructuredType.newBuilder(pojoClass);
builder.attributes(Arrays.asList(new StructuredAttribute("z", new BigIntType()), new StructuredAttribute("y", new BooleanType()), new StructuredAttribute("x", new IntType())));
builder.setFinal(true);
builder.setInstantiable(true);
final StructuredType structuredType = builder.build();
final List<DataType> fieldDataTypes = Arrays.asList(DataTypes.BIGINT(), DataTypes.BOOLEAN(), DataTypes.INT());
return new FieldsDataType(structuredType, pojoClass, fieldDataTypes);
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class RelDataTypeJsonSerdeTest method testRelDataTypeSerde.
// --------------------------------------------------------------------------------------------
// Test data
// --------------------------------------------------------------------------------------------
public static List<RelDataType> testRelDataTypeSerde() {
// the values in the list do not care about nullable.
final List<RelDataType> types = Arrays.asList(FACTORY.createSqlType(SqlTypeName.BOOLEAN), FACTORY.createSqlType(SqlTypeName.TINYINT), FACTORY.createSqlType(SqlTypeName.SMALLINT), FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.BIGINT), FACTORY.createSqlType(SqlTypeName.DECIMAL, 10, 3), FACTORY.createSqlType(SqlTypeName.DECIMAL, 19, 0), FACTORY.createSqlType(SqlTypeName.DECIMAL, 38, 19), FACTORY.createSqlType(SqlTypeName.FLOAT), FACTORY.createSqlType(SqlTypeName.DOUBLE), FACTORY.createSqlType(SqlTypeName.DATE), FACTORY.createSqlType(SqlTypeName.TIME), FACTORY.createSqlType(SqlTypeName.TIMESTAMP), FACTORY.createSqlType(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE), FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, 2, TimeUnit.MINUTE, RelDataType.PRECISION_NOT_SPECIFIED, SqlParserPos.ZERO)), FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, 6, TimeUnit.SECOND, 9, SqlParserPos.ZERO)), FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.HOUR, RelDataType.PRECISION_NOT_SPECIFIED, TimeUnit.SECOND, 9, SqlParserPos.ZERO)), FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.MINUTE, RelDataType.PRECISION_NOT_SPECIFIED, TimeUnit.SECOND, 0, SqlParserPos.ZERO)), FACTORY.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.SECOND, RelDataType.PRECISION_NOT_SPECIFIED, TimeUnit.SECOND, 6, SqlParserPos.ZERO)), FACTORY.createSqlType(SqlTypeName.CHAR), FACTORY.createSqlType(SqlTypeName.CHAR, 0), FACTORY.createSqlType(SqlTypeName.CHAR, 32), FACTORY.createSqlType(SqlTypeName.VARCHAR), FACTORY.createSqlType(SqlTypeName.VARCHAR, 0), FACTORY.createSqlType(SqlTypeName.VARCHAR, 10), FACTORY.createSqlType(SqlTypeName.BINARY), FACTORY.createSqlType(SqlTypeName.BINARY, 0), FACTORY.createSqlType(SqlTypeName.BINARY, 100), FACTORY.createSqlType(SqlTypeName.VARBINARY), FACTORY.createSqlType(SqlTypeName.VARBINARY, 0), FACTORY.createSqlType(SqlTypeName.VARBINARY, 1000), FACTORY.createSqlType(SqlTypeName.NULL), FACTORY.createSqlType(SqlTypeName.SYMBOL), FACTORY.createMultisetType(FACTORY.createSqlType(SqlTypeName.VARCHAR), -1), FACTORY.createArrayType(FACTORY.createSqlType(SqlTypeName.VARCHAR, 16), -1), FACTORY.createArrayType(FACTORY.createArrayType(FACTORY.createSqlType(SqlTypeName.VARCHAR, 16), -1), -1), FACTORY.createMapType(FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.VARCHAR, 10)), FACTORY.createMapType(FACTORY.createMapType(FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.VARCHAR, 10)), FACTORY.createArrayType(FACTORY.createMapType(FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.VARCHAR, 10)), -1)), // simple struct type
FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createSqlType(SqlTypeName.INTEGER), FACTORY.createSqlType(SqlTypeName.DECIMAL, 10, 3)), Arrays.asList("f1", "f2")), // struct type with array type
FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createSqlType(SqlTypeName.VARCHAR), FACTORY.createArrayType(FACTORY.createSqlType(SqlTypeName.VARCHAR, 16), -1)), Arrays.asList("f1", "f2")), // nested struct type
FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createStructType(StructKind.PEEK_FIELDS_NO_EXPAND, Arrays.asList(FACTORY.createSqlType(SqlTypeName.VARCHAR, 5), FACTORY.createSqlType(SqlTypeName.VARCHAR, 10)), Arrays.asList("f1", "f2")), FACTORY.createArrayType(FACTORY.createSqlType(SqlTypeName.VARCHAR, 16), -1)), Arrays.asList("f3", "f4")), FACTORY.createRowtimeIndicatorType(true, false), FACTORY.createRowtimeIndicatorType(true, true), FACTORY.createProctimeIndicatorType(true), FACTORY.createFieldTypeFromLogicalType(StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", "structuredType"), DataTypeJsonSerdeTest.PojoClass.class).attributes(Arrays.asList(new StructuredType.StructuredAttribute("f0", new IntType(true)), new StructuredType.StructuredAttribute("f1", new BigIntType(true)), new StructuredType.StructuredAttribute("f2", new VarCharType(200), "desc"))).comparison(StructuredType.StructuredComparison.FULL).setFinal(false).setInstantiable(false).description("description for StructuredType").build()));
final List<RelDataType> mutableTypes = new ArrayList<>(types.size() * 2);
for (RelDataType type : types) {
mutableTypes.add(FACTORY.createTypeWithNullability(type, true));
mutableTypes.add(FACTORY.createTypeWithNullability(type, false));
}
mutableTypes.add(FACTORY.createTypeWithNullability(FACTORY.createFieldTypeFromLogicalType(new RawType<>(true, Void.class, VoidSerializer.INSTANCE)), true));
mutableTypes.add(FACTORY.createTypeWithNullability(FACTORY.createFieldTypeFromLogicalType(new RawType<>(false, Void.class, VoidSerializer.INSTANCE)), false));
mutableTypes.add(FACTORY.createTypeWithNullability(FACTORY.createFieldTypeFromLogicalType(new RawType<>(true, Void.class, VoidSerializer.INSTANCE)), false));
return mutableTypes;
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class MapToMapAndMultisetToMultisetCastRule method generateCodeBlockInternal.
/* Example generated code for MULTISET<INT> -> MULTISET<FLOAT>:
org.apache.flink.table.data.MapData _myInput = ((org.apache.flink.table.data.MapData)(_myInputObj));
boolean _myInputIsNull = _myInputObj == null;
boolean isNull$0;
org.apache.flink.table.data.MapData result$1;
float result$2;
isNull$0 = _myInputIsNull;
if (!isNull$0) {
java.util.Map map$838 = new java.util.HashMap();
for (int i$841 = 0; i$841 < _myInput.size(); i$841++) {
java.lang.Float key$839 = null;
java.lang.Integer value$840 = null;
if (!_myInput.keyArray().isNullAt(i$841)) {
result$2 = ((float)(_myInput.keyArray().getInt(i$841)));
key$839 = result$2;
}
value$840 = _myInput.valueArray().getInt(i$841);
map$838.put(key$839, value$840);
}
result$1 = new org.apache.flink.table.data.GenericMapData(map$838);
isNull$0 = result$1 == null;
} else {
result$1 = null;
}
return result$1;
*/
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
final LogicalType innerInputKeyType;
final LogicalType innerInputValueType;
final LogicalType innerTargetKeyType;
final LogicalType innerTargetValueType;
if (inputLogicalType.is(LogicalTypeRoot.MULTISET)) {
innerInputKeyType = ((MultisetType) inputLogicalType).getElementType();
innerInputValueType = new IntType(false);
innerTargetKeyType = ((MultisetType) targetLogicalType).getElementType();
innerTargetValueType = new IntType(false);
} else {
innerInputKeyType = ((MapType) inputLogicalType).getKeyType();
innerInputValueType = ((MapType) inputLogicalType).getValueType();
innerTargetKeyType = ((MapType) targetLogicalType).getKeyType();
innerTargetValueType = ((MapType) targetLogicalType).getValueType();
}
final String innerTargetKeyTypeTerm = boxedTypeTermForType(innerTargetKeyType);
final String innerTargetValueTypeTerm = boxedTypeTermForType(innerTargetValueType);
final String keyArrayTerm = methodCall(inputTerm, "keyArray");
final String valueArrayTerm = methodCall(inputTerm, "valueArray");
final String size = methodCall(inputTerm, "size");
final String map = newName("map");
final String key = newName("key");
final String value = newName("value");
return new CastRuleUtils.CodeWriter().declStmt(className(Map.class), map, constructorCall(HashMap.class)).forStmt(size, (index, codeWriter) -> {
final CastCodeBlock keyCodeBlock = CastRuleProvider.generateAlwaysNonNullCodeBlock(context, rowFieldReadAccess(index, keyArrayTerm, innerInputKeyType), innerInputKeyType, innerTargetKeyType);
assert keyCodeBlock != null;
final CastCodeBlock valueCodeBlock = CastRuleProvider.generateAlwaysNonNullCodeBlock(context, rowFieldReadAccess(index, valueArrayTerm, innerInputValueType), innerInputValueType, innerTargetValueType);
assert valueCodeBlock != null;
codeWriter.declStmt(innerTargetKeyTypeTerm, key, null).declStmt(innerTargetValueTypeTerm, value, null);
if (innerTargetKeyType.isNullable()) {
codeWriter.ifStmt("!" + methodCall(keyArrayTerm, "isNullAt", index), thenWriter -> thenWriter.append(keyCodeBlock).assignStmt(key, keyCodeBlock.getReturnTerm()));
} else {
codeWriter.append(keyCodeBlock).assignStmt(key, keyCodeBlock.getReturnTerm());
}
if (inputLogicalType.is(LogicalTypeRoot.MAP) && innerTargetValueType.isNullable()) {
codeWriter.ifStmt("!" + methodCall(valueArrayTerm, "isNullAt", index), thenWriter -> thenWriter.append(valueCodeBlock).assignStmt(value, valueCodeBlock.getReturnTerm()));
} else {
codeWriter.append(valueCodeBlock).assignStmt(value, valueCodeBlock.getReturnTerm());
}
codeWriter.stmt(methodCall(map, "put", key, value));
}).assignStmt(returnVariable, constructorCall(GenericMapData.class, map)).toString();
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class RowDataSerializerTest method testRowDataSerializerWithComplexTypes.
private static Object[] testRowDataSerializerWithComplexTypes() {
InternalTypeInfo<RowData> typeInfo = InternalTypeInfo.ofFields(new IntType(), new DoubleType(), VarCharType.STRING_TYPE, new ArrayType(new IntType()), new MapType(new IntType(), new IntType()));
GenericRowData[] data = new GenericRowData[] { createRow(null, null, null, null, null), createRow(0, null, null, null, null), createRow(0, 0.0, null, null, null), createRow(0, 0.0, fromString("a"), null, null), createRow(1, 0.0, fromString("a"), null, null), createRow(1, 1.0, fromString("a"), null, null), createRow(1, 1.0, fromString("b"), null, null), createRow(1, 1.0, fromString("b"), createArray(1), createMap(new int[] { 1 }, new int[] { 1 })), createRow(1, 1.0, fromString("b"), createArray(1, 2), createMap(new int[] { 1, 4 }, new int[] { 1, 2 })), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3), createMap(new int[] { 1, 5 }, new int[] { 1, 3 })), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4), createMap(new int[] { 1, 6 }, new int[] { 1, 4 })), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4, 5), createMap(new int[] { 1, 7 }, new int[] { 1, 5 })), createRow(1, 1.0, fromString("b"), createArray(1, 2, 3, 4, 5, 6), createMap(new int[] { 1, 8 }, new int[] { 1, 6 })) };
RowDataSerializer serializer = typeInfo.toRowSerializer();
return new Object[] { serializer, data };
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class RowTimeSortOperatorTest method testOnlySortOnRowTime.
@Test
public void testOnlySortOnRowTime() throws Exception {
InternalTypeInfo<RowData> inputRowType = InternalTypeInfo.ofFields(new BigIntType(), new BigIntType(), VarCharType.STRING_TYPE, new IntType());
int rowTimeIdx = 0;
RowDataHarnessAssertor assertor = new RowDataHarnessAssertor(inputRowType.toRowFieldTypes());
RowTimeSortOperator operator = createSortOperator(inputRowType, rowTimeIdx, null);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator);
testHarness.open();
testHarness.processElement(insertRecord(3L, 2L, "Hello world", 3));
testHarness.processElement(insertRecord(2L, 2L, "Hello", 2));
testHarness.processElement(insertRecord(6L, 3L, "Luke Skywalker", 6));
testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 5));
testHarness.processElement(insertRecord(7L, 4L, "Comment#1", 7));
testHarness.processElement(insertRecord(9L, 4L, "Comment#3", 9));
testHarness.processElement(insertRecord(10L, 4L, "Comment#4", 10));
testHarness.processElement(insertRecord(8L, 4L, "Comment#2", 8));
testHarness.processElement(insertRecord(1L, 1L, "Hi", 2));
testHarness.processElement(insertRecord(1L, 1L, "Hi", 1));
testHarness.processElement(insertRecord(4L, 3L, "Helloworld, how are you?", 4));
testHarness.processWatermark(new Watermark(9L));
List<Object> expectedOutput = new ArrayList<>();
expectedOutput.add(insertRecord(1L, 1L, "Hi", 2));
expectedOutput.add(insertRecord(1L, 1L, "Hi", 1));
expectedOutput.add(insertRecord(2L, 2L, "Hello", 2));
expectedOutput.add(insertRecord(3L, 2L, "Hello world", 3));
expectedOutput.add(insertRecord(4L, 3L, "Helloworld, how are you?", 4));
expectedOutput.add(insertRecord(5L, 3L, "I am fine.", 5));
expectedOutput.add(insertRecord(6L, 3L, "Luke Skywalker", 6));
expectedOutput.add(insertRecord(7L, 4L, "Comment#1", 7));
expectedOutput.add(insertRecord(8L, 4L, "Comment#2", 8));
expectedOutput.add(insertRecord(9L, 4L, "Comment#3", 9));
expectedOutput.add(new Watermark(9L));
// do a snapshot, data could be recovered from state
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
testHarness.close();
expectedOutput.clear();
operator = createSortOperator(inputRowType, rowTimeIdx, null);
testHarness = createTestHarness(operator);
testHarness.initializeState(snapshot);
testHarness.open();
// late data will be dropped
testHarness.processElement(insertRecord(5L, 3L, "I am fine.", 6));
testHarness.processWatermark(new Watermark(10L));
expectedOutput.add(insertRecord(10L, 4L, "Comment#4", 10));
expectedOutput.add(new Watermark(10L));
assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
// those watermark has no effect
testHarness.processWatermark(new Watermark(11L));
testHarness.processWatermark(new Watermark(12L));
expectedOutput.add(new Watermark(11L));
expectedOutput.add(new Watermark(12L));
assertor.assertOutputEquals("output wrong.", expectedOutput, testHarness.getOutput());
}
Aggregations