use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class Int2HashJoinOperatorTest method joinAndAssert.
@SuppressWarnings("unchecked")
static void joinAndAssert(Object operator, MutableObjectIterator<BinaryRowData> input1, MutableObjectIterator<BinaryRowData> input2, int expectOutSize, int expectOutKeySize, int expectOutVal, boolean semiJoin) throws Exception {
InternalTypeInfo<RowData> typeInfo = InternalTypeInfo.ofFields(new IntType(), new IntType());
InternalTypeInfo<RowData> rowDataTypeInfo = InternalTypeInfo.ofFields(new IntType(), new IntType(), new IntType(), new IntType());
TwoInputStreamTaskTestHarness<BinaryRowData, BinaryRowData, JoinedRowData> testHarness = new TwoInputStreamTaskTestHarness<>(TwoInputStreamTask::new, 2, 1, new int[] { 1, 2 }, typeInfo, (TypeInformation) typeInfo, rowDataTypeInfo);
testHarness.memorySize = 36 * 1024 * 1024;
testHarness.getExecutionConfig().enableObjectReuse();
testHarness.setupOutputForSingletonOperatorChain();
if (operator instanceof StreamOperator) {
testHarness.getStreamConfig().setStreamOperator((StreamOperator<?>) operator);
} else {
testHarness.getStreamConfig().setStreamOperatorFactory((StreamOperatorFactory<?>) operator);
}
testHarness.getStreamConfig().setOperatorID(new OperatorID());
testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.OPERATOR, 0.99);
testHarness.invoke();
testHarness.waitForTaskRunning();
Random random = new Random();
do {
BinaryRowData row1 = null;
BinaryRowData row2 = null;
if (random.nextInt(2) == 0) {
row1 = input1.next();
if (row1 == null) {
row2 = input2.next();
}
} else {
row2 = input2.next();
if (row2 == null) {
row1 = input1.next();
}
}
if (row1 == null && row2 == null) {
break;
}
if (row1 != null) {
testHarness.processElement(new StreamRecord<>(row1), 0, 0);
} else {
testHarness.processElement(new StreamRecord<>(row2), 1, 0);
}
} while (true);
testHarness.endInput(0, 0);
testHarness.endInput(1, 0);
testHarness.waitForInputProcessing();
testHarness.waitForTaskCompletion();
Queue<Object> actual = testHarness.getOutput();
Assert.assertEquals("Output was not correct.", expectOutSize, actual.size());
// Don't verify the output value when experOutVal is -1
if (expectOutVal != -1) {
if (semiJoin) {
HashMap<Integer, Long> map = new HashMap<>(expectOutKeySize);
for (Object o : actual) {
StreamRecord<RowData> record = (StreamRecord<RowData>) o;
RowData row = record.getValue();
int key = row.getInt(0);
int val = row.getInt(1);
Long contained = map.get(key);
if (contained == null) {
contained = (long) val;
} else {
contained = valueOf(contained + val);
}
map.put(key, contained);
}
Assert.assertEquals("Wrong number of keys", expectOutKeySize, map.size());
for (Map.Entry<Integer, Long> entry : map.entrySet()) {
long val = entry.getValue();
int key = entry.getKey();
Assert.assertEquals("Wrong number of values in per-key cross product for key " + key, expectOutVal, val);
}
} else {
// create the map for validating the results
HashMap<Integer, Long> map = new HashMap<>(expectOutKeySize);
for (Object o : actual) {
StreamRecord<RowData> record = (StreamRecord<RowData>) o;
RowData row = record.getValue();
int key = row.isNullAt(0) ? row.getInt(2) : row.getInt(0);
int val1 = 0;
int val2 = 0;
if (!row.isNullAt(1)) {
val1 = row.getInt(1);
}
if (!row.isNullAt(3)) {
val2 = row.getInt(3);
}
int val = val1 + val2;
Long contained = map.get(key);
if (contained == null) {
contained = (long) val;
} else {
contained = valueOf(contained + val);
}
map.put(key, contained);
}
Assert.assertEquals("Wrong number of keys", expectOutKeySize, map.size());
for (Map.Entry<Integer, Long> entry : map.entrySet()) {
long val = entry.getValue();
int key = entry.getKey();
Assert.assertEquals("Wrong number of values in per-key cross product for key " + key, expectOutVal, val);
}
}
}
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class DataFormatConvertersTest method testTypes.
@Test
public void testTypes() {
for (int i = 0; i < simpleTypes.length; i++) {
test(simpleTypes[i], simpleValues[i]);
}
test(new RowTypeInfo(simpleTypes), new Row(simpleTypes.length));
test(new RowTypeInfo(simpleTypes), Row.ofKind(RowKind.DELETE, simpleValues));
test(InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()), GenericRowData.of(StringData.fromString("hehe"), 111));
test(InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()), GenericRowData.of(null, null));
test(new DecimalDataTypeInfo(10, 5), null);
test(new DecimalDataTypeInfo(10, 5), DecimalDataUtils.castFrom(5.555, 10, 5));
test(Types.BIG_DEC, null);
{
DataFormatConverter converter = getConverter(Types.BIG_DEC);
Assert.assertTrue(Arrays.deepEquals(new Object[] { converter.toInternal(converter.toExternal(DecimalDataUtils.castFrom(5, 19, 18))) }, new Object[] { DecimalDataUtils.castFrom(5, 19, 18) }));
}
test(new ListTypeInfo<>(Types.STRING), null);
test(new ListTypeInfo<>(Types.STRING), Arrays.asList("ahah", "xx"));
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] { 1D, 5D });
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] { null, null });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { null, null });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { "haha", "hehe" });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { "haha", "hehe" }, new String[] { "aa", "bb" });
test(new MapTypeInfo<>(Types.STRING, Types.INT), null);
HashMap<String, Integer> map = new HashMap<>();
map.put("haha", 1);
map.put("hah1", 5);
map.put(null, null);
test(new MapTypeInfo<>(Types.STRING, Types.INT), map);
Tuple2 tuple2 = new Tuple2<>(5, 10);
TupleTypeInfo tupleTypeInfo = new TupleTypeInfo<>(tuple2.getClass(), Types.INT, Types.INT);
test(tupleTypeInfo, tuple2);
test(TypeExtractor.createTypeInfo(MyPojo.class), new MyPojo(1, 3));
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class RowDataTest method getBinaryRow.
private BinaryRowData getBinaryRow() {
BinaryRowData row = new BinaryRowData(18);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeBoolean(0, true);
writer.writeByte(1, (byte) 1);
writer.writeShort(2, (short) 2);
writer.writeInt(3, 3);
writer.writeLong(4, 4);
writer.writeFloat(5, 5);
writer.writeDouble(6, 6);
writer.writeString(8, str);
writer.writeRawValue(9, generic, genericSerializer);
writer.writeDecimal(10, decimal1, 5);
writer.writeDecimal(11, decimal2, 20);
writer.writeArray(12, array, new ArrayDataSerializer(DataTypes.INT().getLogicalType()));
writer.writeMap(13, map, new MapDataSerializer(DataTypes.INT().getLogicalType(), DataTypes.INT().getLogicalType()));
writer.writeRow(14, underRow, new RowDataSerializer(RowType.of(new IntType(), new IntType())));
writer.writeBinary(15, bytes);
writer.writeTimestamp(16, timestamp1, 3);
writer.writeTimestamp(17, timestamp2, 9);
return row;
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class BinaryRowDataTest method testNested.
@Test
public void testNested() {
BinaryRowData row = new BinaryRowData(2);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.writeRow(0, GenericRowData.of(fromString("1"), 1), new RowDataSerializer(RowType.of(VarCharType.STRING_TYPE, new IntType())));
writer.setNullAt(1);
writer.complete();
RowData nestedRow = row.getRow(0, 2);
assertEquals("1", nestedRow.getString(0).toString());
assertEquals(1, nestedRow.getInt(1));
assertTrue(row.isNullAt(1));
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class MapAndMultisetToStringCastRule method generateCodeBlockInternal.
/* Example generated code for MAP<STRING, INTERVAL MONTH> -> CHAR(12):
isNull$0 = _myInputIsNull;
if (!isNull$0) {
org.apache.flink.table.data.ArrayData keys$2 = _myInput.keyArray();
org.apache.flink.table.data.ArrayData values$3 = _myInput.valueArray();
builder$1.setLength(0);
builder$1.append("{");
for (int i$5 = 0; i$5 < _myInput.size(); i$5++) {
if (builder$1.length() > 12) {
break;
}
if (i$5 != 0) {
builder$1.append(", ");
}
org.apache.flink.table.data.binary.BinaryStringData key$6 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
boolean keyIsNull$7 = keys$2.isNullAt(i$5);
int value$8 = -1;
boolean valueIsNull$9 = values$3.isNullAt(i$5);
if (!keyIsNull$7) {
key$6 = ((org.apache.flink.table.data.binary.BinaryStringData) keys$2.getString(i$5));
builder$1.append(key$6);
} else {
builder$1.append("NULL");
}
builder$1.append("=");
if (!valueIsNull$9) {
value$8 = values$3.getInt(i$5);
isNull$2 = valueIsNull$9;
if (!isNull$2) {
result$3 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + value$8);
isNull$2 = result$3 == null;
} else {
result$3 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
}
builder$1.append(result$3);
} else {
builder$1.append("NULL");
}
}
builder$1.append("}");
java.lang.String resultString$4;
resultString$4 = builder$1.toString();
if (builder$1.length() > 12) {
resultString$4 = builder$1.substring(0, java.lang.Math.min(builder$1.length(), 12));
} else {
if (resultString$.length() < 12) {
int padLength$10;
padLength$10 = 12 - resultString$.length();
java.lang.StringBuilder sbPadding$11;
sbPadding$11 = new java.lang.StringBuilder();
for (int i$12 = 0; i$12 < padLength$10; i$12++) {
sbPadding$11.append(" ");
}
resultString$4 = resultString$4 + sbPadding$11.toString();
}
}
result$1 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$4);
isNull$0 = result$1 == null;
} else {
result$1 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
}
*/
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
final LogicalType keyType = inputLogicalType.is(LogicalTypeRoot.MULTISET) ? ((MultisetType) inputLogicalType).getElementType() : ((MapType) inputLogicalType).getKeyType();
final LogicalType valueType = inputLogicalType.is(LogicalTypeRoot.MULTISET) ? new IntType(false) : ((MapType) inputLogicalType).getValueType();
final String builderTerm = newName("builder");
context.declareClassField(className(StringBuilder.class), builderTerm, constructorCall(StringBuilder.class));
final String keyArrayTerm = newName("keys");
final String valueArrayTerm = newName("values");
final String resultStringTerm = newName("resultString");
final int length = LogicalTypeChecks.getLength(targetLogicalType);
CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter().declStmt(ArrayData.class, keyArrayTerm, methodCall(inputTerm, "keyArray")).declStmt(ArrayData.class, valueArrayTerm, methodCall(inputTerm, "valueArray")).stmt(methodCall(builderTerm, "setLength", 0)).stmt(methodCall(builderTerm, "append", strLiteral("{"))).forStmt(methodCall(inputTerm, "size"), (indexTerm, loopBodyWriter) -> {
String keyTerm = newName("key");
String keyIsNullTerm = newName("keyIsNull");
String valueTerm = newName("value");
String valueIsNullTerm = newName("valueIsNull");
CastCodeBlock keyCast = // Null check is done at the key array access level
CastRuleProvider.generateAlwaysNonNullCodeBlock(context, keyTerm, keyType, STRING_TYPE);
CastCodeBlock valueCast = // Null check is done at the value array access level
CastRuleProvider.generateAlwaysNonNullCodeBlock(context, valueTerm, valueType, STRING_TYPE);
Consumer<CastRuleUtils.CodeWriter> appendNonNullValue = bodyWriter -> bodyWriter.assignStmt(valueTerm, rowFieldReadAccess(indexTerm, valueArrayTerm, valueType)).append(valueCast).stmt(methodCall(builderTerm, "append", valueCast.getReturnTerm()));
if (!context.legacyBehaviour() && couldTrim(length)) {
loopBodyWriter.ifStmt(stringExceedsLength(builderTerm, length), CastRuleUtils.CodeWriter::breakStmt);
}
loopBodyWriter.ifStmt(indexTerm + " != 0", thenBodyWriter -> thenBodyWriter.stmt(methodCall(builderTerm, "append", strLiteral(", ")))).declPrimitiveStmt(keyType, keyTerm).declStmt(boolean.class, keyIsNullTerm, methodCall(keyArrayTerm, "isNullAt", indexTerm)).declPrimitiveStmt(valueType, valueTerm).declStmt(boolean.class, valueIsNullTerm, methodCall(valueArrayTerm, "isNullAt", indexTerm)).ifStmt("!" + keyIsNullTerm, thenBodyWriter -> thenBodyWriter.assignStmt(keyTerm, rowFieldReadAccess(indexTerm, keyArrayTerm, keyType)).append(keyCast).stmt(methodCall(builderTerm, "append", keyCast.getReturnTerm())), elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", nullLiteral(context.legacyBehaviour())))).stmt(methodCall(builderTerm, "append", strLiteral("=")));
if (inputLogicalType.is(LogicalTypeRoot.MULTISET)) {
appendNonNullValue.accept(loopBodyWriter);
} else {
loopBodyWriter.ifStmt("!" + valueIsNullTerm, appendNonNullValue, elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", nullLiteral(context.legacyBehaviour()))));
}
}).stmt(methodCall(builderTerm, "append", strLiteral("}")));
return CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded(writer, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, builderTerm).assignStmt(returnVariable, CastRuleUtils.staticCall(BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)).toString();
}
Aggregations