use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class CodeSplitTest method testJoinCondition.
@Test
public void testJoinCondition() {
int numFields = 200;
FlinkTypeFactory typeFactory = FlinkTypeFactory.INSTANCE();
RexBuilder builder = new RexBuilder(typeFactory);
RelDataType intType = typeFactory.createFieldTypeFromLogicalType(new IntType());
RexNode[] conditions = new RexNode[numFields];
for (int i = 0; i < numFields; i++) {
conditions[i] = builder.makeCall(SqlStdOperatorTable.LESS_THAN, new RexInputRef(i, intType), new RexInputRef(numFields + i, intType));
}
RexNode joinCondition = builder.makeCall(SqlStdOperatorTable.AND, conditions);
RowType rowType = getIntRowType(numFields);
GenericRowData rowData1 = new GenericRowData(numFields);
GenericRowData rowData2 = new GenericRowData(numFields);
Random random = new Random();
for (int i = 0; i < numFields; i++) {
rowData1.setField(i, 0);
rowData2.setField(i, 1);
}
boolean result = random.nextBoolean();
if (!result) {
rowData1.setField(random.nextInt(numFields), 1);
}
Consumer<TableConfig> consumer = tableConfig -> {
JoinCondition instance = JoinUtil.generateConditionFunction(tableConfig, joinCondition, rowType, rowType).newInstance(classLoader);
for (int i = 0; i < 100; i++) {
Assert.assertEquals(result, instance.apply(rowData1, rowData2));
}
};
runTest(consumer);
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class BinaryArrayDataTest method testNested.
@Test
public void testNested() {
BinaryArrayData array = new BinaryArrayData();
BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
writer.writeRow(0, GenericRowData.of(fromString("1"), 1), new RowDataSerializer(RowType.of(VarCharType.STRING_TYPE, new IntType())));
writer.setNullAt(1);
writer.complete();
RowData nestedRow = array.getRow(0, 2);
assertEquals("1", nestedRow.getString(0).toString());
assertEquals(1, nestedRow.getInt(1));
assertTrue(array.isNullAt(1));
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class AbstractJdbcRowConverterTest method testExternalLocalDateTimeToTimestamp.
@Test
public void testExternalLocalDateTimeToTimestamp() throws Exception {
RowType rowType = RowType.of(new IntType(), new TimestampType(3));
JdbcRowConverter rowConverter = new AbstractJdbcRowConverter(rowType) {
private static final long serialVersionUID = 1L;
@Override
public String converterName() {
return "test";
}
};
ResultSet resultSet = Mockito.mock(ResultSet.class);
Mockito.when(resultSet.getObject(1)).thenReturn(123);
Mockito.when(resultSet.getObject(2)).thenReturn(LocalDateTime.parse("2021-04-07T00:00:05.999"));
RowData res = rowConverter.toInternal(resultSet);
assertEquals(123, res.getInt(0));
assertEquals(LocalDateTime.parse("2021-04-07T00:00:05.999"), res.getTimestamp(1, 3).toLocalDateTime());
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class OrcBulkRowDataWriterTest method initInput.
@Before
public void initInput() {
input = new ArrayList<>();
fieldTypes = new LogicalType[4];
fieldTypes[0] = new VarCharType();
fieldTypes[1] = new IntType();
List<RowType.RowField> arrayRowFieldList = Collections.singletonList(new RowType.RowField("_col2_col0", new VarCharType()));
fieldTypes[2] = new ArrayType(new RowType(arrayRowFieldList));
List<RowType.RowField> mapRowFieldList = Arrays.asList(new RowType.RowField("_col3_col0", new VarCharType()), new RowType.RowField("_col3_col1", new TimestampType()));
fieldTypes[3] = new MapType(new VarCharType(), new RowType(mapRowFieldList));
{
GenericRowData rowData = new GenericRowData(4);
rowData.setField(0, new BinaryStringData("_col_0_string_1"));
rowData.setField(1, 1);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_1"));
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_1"));
GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
rowData.setField(2, arrayData);
GenericRowData mapValue1 = new GenericRowData(2);
mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_1")));
mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
Map<StringData, RowData> mapDataMap = new HashMap<>();
mapDataMap.put(new BinaryStringData("_col_3_map_key_1"), mapValue1);
GenericMapData mapData = new GenericMapData(mapDataMap);
rowData.setField(3, mapData);
input.add(rowData);
}
{
GenericRowData rowData = new GenericRowData(4);
rowData.setField(0, new BinaryStringData("_col_0_string_2"));
rowData.setField(1, 2);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_2"));
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_2"));
GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
rowData.setField(2, arrayData);
GenericRowData mapValue1 = new GenericRowData(2);
mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_2")));
mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
Map<StringData, RowData> mapDataMap = new HashMap<>();
mapDataMap.put(new BinaryStringData("_col_3_map_key_2"), mapValue1);
GenericMapData mapData = new GenericMapData(mapDataMap);
rowData.setField(3, mapData);
input.add(rowData);
}
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class ParquetSplitReaderUtil method createVectorFromConstant.
public static ColumnVector createVectorFromConstant(LogicalType type, Object value, int batchSize) {
switch(type.getTypeRoot()) {
case CHAR:
case VARCHAR:
case BINARY:
case VARBINARY:
HeapBytesVector bsv = new HeapBytesVector(batchSize);
if (value == null) {
bsv.fillWithNulls();
} else {
bsv.fill(value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8));
}
return bsv;
case BOOLEAN:
HeapBooleanVector bv = new HeapBooleanVector(batchSize);
if (value == null) {
bv.fillWithNulls();
} else {
bv.fill((boolean) value);
}
return bv;
case TINYINT:
HeapByteVector byteVector = new HeapByteVector(batchSize);
if (value == null) {
byteVector.fillWithNulls();
} else {
byteVector.fill(((Number) value).byteValue());
}
return byteVector;
case SMALLINT:
HeapShortVector sv = new HeapShortVector(batchSize);
if (value == null) {
sv.fillWithNulls();
} else {
sv.fill(((Number) value).shortValue());
}
return sv;
case INTEGER:
HeapIntVector iv = new HeapIntVector(batchSize);
if (value == null) {
iv.fillWithNulls();
} else {
iv.fill(((Number) value).intValue());
}
return iv;
case BIGINT:
HeapLongVector lv = new HeapLongVector(batchSize);
if (value == null) {
lv.fillWithNulls();
} else {
lv.fill(((Number) value).longValue());
}
return lv;
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
int precision = decimalType.getPrecision();
int scale = decimalType.getScale();
DecimalData decimal = value == null ? null : Preconditions.checkNotNull(DecimalData.fromBigDecimal((BigDecimal) value, precision, scale));
ColumnVector internalVector;
if (ParquetSchemaConverter.is32BitDecimal(precision)) {
internalVector = createVectorFromConstant(new IntType(), decimal == null ? null : (int) decimal.toUnscaledLong(), batchSize);
} else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
internalVector = createVectorFromConstant(new BigIntType(), decimal == null ? null : decimal.toUnscaledLong(), batchSize);
} else {
internalVector = createVectorFromConstant(new VarBinaryType(), decimal == null ? null : decimal.toUnscaledBytes(), batchSize);
}
return new ParquetDecimalVector(internalVector);
case FLOAT:
HeapFloatVector fv = new HeapFloatVector(batchSize);
if (value == null) {
fv.fillWithNulls();
} else {
fv.fill(((Number) value).floatValue());
}
return fv;
case DOUBLE:
HeapDoubleVector dv = new HeapDoubleVector(batchSize);
if (value == null) {
dv.fillWithNulls();
} else {
dv.fill(((Number) value).doubleValue());
}
return dv;
case DATE:
if (value instanceof LocalDate) {
value = Date.valueOf((LocalDate) value);
}
return createVectorFromConstant(new IntType(), value == null ? null : toInternal((Date) value), batchSize);
case TIMESTAMP_WITHOUT_TIME_ZONE:
HeapTimestampVector tv = new HeapTimestampVector(batchSize);
if (value == null) {
tv.fillWithNulls();
} else {
tv.fill(TimestampData.fromLocalDateTime((LocalDateTime) value));
}
return tv;
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
Aggregations