use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class RowDataSerializerTest method testLargeRowDataSerializer.
private static Object[] testLargeRowDataSerializer() {
InternalTypeInfo<RowData> typeInfo = InternalTypeInfo.ofFields(new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), new IntType(), VarCharType.STRING_TYPE);
GenericRowData row = new GenericRowData(13);
row.setField(0, 2);
row.setField(1, null);
row.setField(3, null);
row.setField(4, null);
row.setField(5, null);
row.setField(6, null);
row.setField(7, null);
row.setField(8, null);
row.setField(9, null);
row.setField(10, null);
row.setField(11, null);
row.setField(12, fromString("Test"));
RowDataSerializer serializer = typeInfo.toRowSerializer();
return new Object[] { serializer, new RowData[] { row } };
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class TestCsvDeserializationSchema method deserialize.
@SuppressWarnings("unchecked")
@Override
public RowData deserialize(byte[] message) throws IOException {
GenericRowData row = new GenericRowData(physicalFieldCount);
int startIndex = 0;
for (int csvColumn = 0; csvColumn < indexMapping.length; csvColumn++) {
startIndex = fieldParsers[csvColumn].resetErrorStateAndParse(message, startIndex, message.length, new byte[] { ',' }, null);
if (indexMapping[csvColumn] != -1) {
row.setField(indexMapping[csvColumn], csvRowToRowDataConverters[csvColumn].toInternal(fieldParsers[csvColumn].getLastResult()));
}
}
return row;
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class CodeSplitTest method testJoinCondition.
@Test
public void testJoinCondition() {
int numFields = 200;
FlinkTypeFactory typeFactory = FlinkTypeFactory.INSTANCE();
RexBuilder builder = new RexBuilder(typeFactory);
RelDataType intType = typeFactory.createFieldTypeFromLogicalType(new IntType());
RexNode[] conditions = new RexNode[numFields];
for (int i = 0; i < numFields; i++) {
conditions[i] = builder.makeCall(SqlStdOperatorTable.LESS_THAN, new RexInputRef(i, intType), new RexInputRef(numFields + i, intType));
}
RexNode joinCondition = builder.makeCall(SqlStdOperatorTable.AND, conditions);
RowType rowType = getIntRowType(numFields);
GenericRowData rowData1 = new GenericRowData(numFields);
GenericRowData rowData2 = new GenericRowData(numFields);
Random random = new Random();
for (int i = 0; i < numFields; i++) {
rowData1.setField(i, 0);
rowData2.setField(i, 1);
}
boolean result = random.nextBoolean();
if (!result) {
rowData1.setField(random.nextInt(numFields), 1);
}
Consumer<TableConfig> consumer = tableConfig -> {
JoinCondition instance = JoinUtil.generateConditionFunction(tableConfig, joinCondition, rowType, rowType).newInstance(classLoader);
for (int i = 0; i < 100; i++) {
Assert.assertEquals(result, instance.apply(rowData1, rowData2));
}
};
runTest(consumer);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class CodeSplitTest method testHashFunction.
@Test
public void testHashFunction() {
int numFields = 1000;
RowType rowType = getIntRowType(numFields);
int[] hashFields = new int[numFields];
for (int i = 0; i < numFields; i++) {
hashFields[i] = i;
}
GenericRowData rowData = new GenericRowData(numFields);
for (int i = 0; i < numFields; i++) {
rowData.setField(i, i);
}
Consumer<TableConfig> consumer = tableConfig -> {
HashFunction instance = HashCodeGenerator.generateRowHash(new CodeGeneratorContext(tableConfig), rowType, "", hashFields).newInstance(classLoader);
for (int i = 0; i < 100; i++) {
Assert.assertEquals(-1433414860, instance.hashCode(rowData));
}
};
runTest(consumer);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class RawFormatDeserializationSchema method open.
@Override
public void open(InitializationContext context) throws Exception {
reuse = new GenericRowData(1);
converter.open();
}
Aggregations