use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class SortCodeGeneratorTest method testInner.
private void testInner() throws Exception {
List<MemorySegment> segments = new ArrayList<>();
for (int i = 0; i < 100; i++) {
segments.add(MemorySegmentFactory.wrap(new byte[32768]));
}
Tuple2<NormalizedKeyComputer, RecordComparator> tuple2 = getSortBaseWithNulls(this.getClass().getSimpleName(), inputType, sortSpec);
BinaryRowDataSerializer serializer = new BinaryRowDataSerializer(inputType.getFieldCount());
BinaryInMemorySortBuffer sortBuffer = BinaryInMemorySortBuffer.createBuffer(tuple2.f0, (AbstractRowDataSerializer) serializer, serializer, tuple2.f1, new ListMemorySegmentPool(segments));
BinaryRowData[] dataArray = getTestData();
List<BinaryRowData> data = Arrays.asList(dataArray.clone());
List<BinaryRowData> binaryRows = Arrays.asList(dataArray.clone());
Collections.shuffle(binaryRows);
for (BinaryRowData row : binaryRows) {
if (!sortBuffer.write(row)) {
throw new RuntimeException();
}
}
new QuickSort().sort(sortBuffer);
MutableObjectIterator<BinaryRowData> iter = sortBuffer.getIterator();
List<BinaryRowData> result = new ArrayList<>();
BinaryRowData row = serializer.createInstance();
while ((row = iter.next(row)) != null) {
result.add(row.copy());
}
int[] keys = sortSpec.getFieldIndices();
LogicalType[] keyTypes = sortSpec.getFieldTypes(inputType);
boolean[] orders = sortSpec.getAscendingOrders();
data.sort((o1, o2) -> {
for (int i = 0; i < keys.length; i++) {
LogicalType t = inputType.getTypeAt(keys[i]);
boolean order = orders[i];
Object first = null;
Object second = null;
RowData.FieldGetter fieldGetter = RowData.createFieldGetter(keyTypes[i], keys[i]);
if (!o1.isNullAt(keys[i])) {
first = fieldGetter.getFieldOrNull(o1);
}
if (!o2.isNullAt(keys[i])) {
second = fieldGetter.getFieldOrNull(o2);
}
if (first != null || second != null) {
if (first == null) {
return order ? -1 : 1;
}
if (second == null) {
return order ? 1 : -1;
}
if (first instanceof Comparable) {
int ret = ((Comparable) first).compareTo(second);
if (ret != 0) {
return order ? ret : -ret;
}
} else if (t.getTypeRoot() == LogicalTypeRoot.ARRAY) {
BinaryArrayData leftArray = (BinaryArrayData) first;
BinaryArrayData rightArray = (BinaryArrayData) second;
int minLength = Math.min(leftArray.size(), rightArray.size());
for (int j = 0; j < minLength; j++) {
boolean isNullLeft = leftArray.isNullAt(j);
boolean isNullRight = rightArray.isNullAt(j);
if (isNullLeft && isNullRight) {
// Do nothing.
} else if (isNullLeft) {
return order ? -1 : 1;
} else if (isNullRight) {
return order ? 1 : -1;
} else {
int comp = Byte.compare(leftArray.getByte(j), rightArray.getByte(j));
if (comp != 0) {
return order ? comp : -comp;
}
}
}
if (leftArray.size() < rightArray.size()) {
return order ? -1 : 1;
} else if (leftArray.size() > rightArray.size()) {
return order ? 1 : -1;
}
} else if (t.getTypeRoot() == LogicalTypeRoot.VARBINARY) {
int comp = org.apache.flink.table.runtime.operators.sort.SortUtil.compareBinary((byte[]) first, (byte[]) second);
if (comp != 0) {
return order ? comp : -comp;
}
} else if (t.getTypeRoot() == LogicalTypeRoot.ROW) {
RowType rowType = (RowType) t;
int comp;
if (rowType.getFields().get(0).getType() instanceof IntType) {
comp = INT_ROW_COMP.compare(INT_ROW_CONV.toExternal(first), INT_ROW_CONV.toExternal(second));
} else {
comp = NEST_ROW_COMP.compare(NEST_ROW_CONV.toExternal(first), NEST_ROW_CONV.toExternal(second));
}
if (comp != 0) {
return order ? comp : -comp;
}
} else if (t.getTypeRoot() == LogicalTypeRoot.RAW) {
Integer i1 = ((RawValueData<Integer>) first).toObject(IntSerializer.INSTANCE);
Integer i2 = ((RawValueData<Integer>) second).toObject(IntSerializer.INSTANCE);
int comp = Integer.compare(i1, i2);
if (comp != 0) {
return order ? comp : -comp;
}
} else {
throw new RuntimeException();
}
}
}
return 0;
});
StringBuilder builder = new StringBuilder();
for (int i = 0; i < data.size(); i++) {
builder.append("\n").append("expect: ").append(DataFormatTestUtil.rowDataToString(data.get(i), inputType)).append("; actual: ").append(DataFormatTestUtil.rowDataToString(result.get(i), inputType));
}
builder.append("\n").append("types: ").append(Arrays.asList(inputType.getChildren()));
builder.append("\n").append("keys: ").append(Arrays.toString(keys));
String msg = builder.toString();
for (int i = 0; i < data.size(); i++) {
for (int j = 0; j < keys.length; j++) {
boolean isNull1 = data.get(i).isNullAt(keys[j]);
boolean isNull2 = result.get(i).isNullAt(keys[j]);
Assert.assertEquals(msg, isNull1, isNull2);
if (!isNull1 || !isNull2) {
RowData.FieldGetter fieldGetter = RowData.createFieldGetter(keyTypes[j], keys[j]);
Object o1 = fieldGetter.getFieldOrNull(data.get(i));
Object o2 = fieldGetter.getFieldOrNull(result.get(i));
if (keyTypes[j] instanceof VarBinaryType) {
Assert.assertArrayEquals(msg, (byte[]) o1, (byte[]) o2);
} else if (keyTypes[j] instanceof TypeInformationRawType) {
assertThat(msg, (RawValueData) o1, equivalent((RawValueData) o2, new RawValueDataSerializer<>(IntSerializer.INSTANCE)));
} else {
Assert.assertEquals(msg, o1, o2);
}
}
}
}
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class LongHashJoinGeneratorTest method newOperator.
@Override
public Object newOperator(long memorySize, HashJoinType type, boolean reverseJoinFunction) {
RowType keyType = RowType.of(new IntType());
Assert.assertTrue(LongHashJoinGenerator.support(type, keyType, new boolean[] { true }));
return LongHashJoinGenerator.gen(new TableConfig(), type, keyType, RowType.of(new IntType(), new IntType()), RowType.of(new IntType(), new IntType()), new int[] { 0 }, new int[] { 0 }, 20, 10000, reverseJoinFunction, new GeneratedJoinCondition(MyJoinCondition.class.getCanonicalName(), "", new Object[0]));
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class CodeSplitTest method getIntRowType.
private RowType getIntRowType(int numFields) {
LogicalType[] fieldTypes = new LogicalType[numFields];
Arrays.fill(fieldTypes, new IntType());
return RowType.of(fieldTypes);
}
Aggregations