Search in sources :

Example 26 with IntType

use of org.apache.flink.table.types.logical.IntType in project flink by apache.

the class CodeSplitTest method testJoinCondition.

@Test
public void testJoinCondition() {
    int numFields = 200;
    FlinkTypeFactory typeFactory = FlinkTypeFactory.INSTANCE();
    RexBuilder builder = new RexBuilder(typeFactory);
    RelDataType intType = typeFactory.createFieldTypeFromLogicalType(new IntType());
    RexNode[] conditions = new RexNode[numFields];
    for (int i = 0; i < numFields; i++) {
        conditions[i] = builder.makeCall(SqlStdOperatorTable.LESS_THAN, new RexInputRef(i, intType), new RexInputRef(numFields + i, intType));
    }
    RexNode joinCondition = builder.makeCall(SqlStdOperatorTable.AND, conditions);
    RowType rowType = getIntRowType(numFields);
    GenericRowData rowData1 = new GenericRowData(numFields);
    GenericRowData rowData2 = new GenericRowData(numFields);
    Random random = new Random();
    for (int i = 0; i < numFields; i++) {
        rowData1.setField(i, 0);
        rowData2.setField(i, 1);
    }
    boolean result = random.nextBoolean();
    if (!result) {
        rowData1.setField(random.nextInt(numFields), 1);
    }
    Consumer<TableConfig> consumer = tableConfig -> {
        JoinCondition instance = JoinUtil.generateConditionFunction(tableConfig, joinCondition, rowType, rowType).newInstance(classLoader);
        for (int i = 0; i < 100; i++) {
            Assert.assertEquals(result, instance.apply(rowData1, rowData2));
        }
    };
    runTest(consumer);
}
Also used : Arrays(java.util.Arrays) FlinkMatchers(org.apache.flink.core.testutils.FlinkMatchers) IntType(org.apache.flink.table.types.logical.IntType) Random(java.util.Random) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RowType(org.apache.flink.table.types.logical.RowType) ArrayList(java.util.ArrayList) HashFunction(org.apache.flink.table.runtime.generated.HashFunction) TableConfigOptions(org.apache.flink.table.api.config.TableConfigOptions) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) GenericRowData(org.apache.flink.table.data.GenericRowData) RexNode(org.apache.calcite.rex.RexNode) OutputStream(java.io.OutputStream) PrintStream(java.io.PrintStream) RelDataType(org.apache.calcite.rel.type.RelDataType) TableConfig(org.apache.flink.table.api.TableConfig) RecordComparator(org.apache.flink.table.runtime.generated.RecordComparator) RexBuilder(org.apache.calcite.rex.RexBuilder) Test(org.junit.Test) IOException(java.io.IOException) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ComparatorCodeGenerator(org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator) RexInputRef(org.apache.calcite.rex.RexInputRef) Consumer(java.util.function.Consumer) JoinUtil(org.apache.flink.table.planner.plan.utils.JoinUtil) JoinCondition(org.apache.flink.table.runtime.generated.JoinCondition) List(java.util.List) MatcherAssert(org.hamcrest.MatcherAssert) LogicalType(org.apache.flink.table.types.logical.LogicalType) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) Assert(org.junit.Assert) Collections(java.util.Collections) SortSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.SortSpec) Projection(org.apache.flink.table.runtime.generated.Projection) RowType(org.apache.flink.table.types.logical.RowType) RelDataType(org.apache.calcite.rel.type.RelDataType) IntType(org.apache.flink.table.types.logical.IntType) JoinCondition(org.apache.flink.table.runtime.generated.JoinCondition) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) Random(java.util.Random) RexBuilder(org.apache.calcite.rex.RexBuilder) RexInputRef(org.apache.calcite.rex.RexInputRef) GenericRowData(org.apache.flink.table.data.GenericRowData) TableConfig(org.apache.flink.table.api.TableConfig) RexNode(org.apache.calcite.rex.RexNode) Test(org.junit.Test)

Example 27 with IntType

use of org.apache.flink.table.types.logical.IntType in project flink by apache.

the class BinaryArrayDataTest method testNested.

@Test
public void testNested() {
    BinaryArrayData array = new BinaryArrayData();
    BinaryArrayWriter writer = new BinaryArrayWriter(array, 2, 8);
    writer.writeRow(0, GenericRowData.of(fromString("1"), 1), new RowDataSerializer(RowType.of(VarCharType.STRING_TYPE, new IntType())));
    writer.setNullAt(1);
    writer.complete();
    RowData nestedRow = array.getRow(0, 2);
    assertEquals("1", nestedRow.getString(0).toString());
    assertEquals(1, nestedRow.getInt(1));
    assertTrue(array.isNullAt(1));
}
Also used : BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryArrayWriter(org.apache.flink.table.data.writer.BinaryArrayWriter) BinaryArrayData(org.apache.flink.table.data.binary.BinaryArrayData) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer) IntType(org.apache.flink.table.types.logical.IntType) Test(org.junit.Test)

Example 28 with IntType

use of org.apache.flink.table.types.logical.IntType in project flink by apache.

the class AbstractJdbcRowConverterTest method testExternalLocalDateTimeToTimestamp.

@Test
public void testExternalLocalDateTimeToTimestamp() throws Exception {
    RowType rowType = RowType.of(new IntType(), new TimestampType(3));
    JdbcRowConverter rowConverter = new AbstractJdbcRowConverter(rowType) {

        private static final long serialVersionUID = 1L;

        @Override
        public String converterName() {
            return "test";
        }
    };
    ResultSet resultSet = Mockito.mock(ResultSet.class);
    Mockito.when(resultSet.getObject(1)).thenReturn(123);
    Mockito.when(resultSet.getObject(2)).thenReturn(LocalDateTime.parse("2021-04-07T00:00:05.999"));
    RowData res = rowConverter.toInternal(resultSet);
    assertEquals(123, res.getInt(0));
    assertEquals(LocalDateTime.parse("2021-04-07T00:00:05.999"), res.getTimestamp(1, 3).toLocalDateTime());
}
Also used : RowData(org.apache.flink.table.data.RowData) ResultSet(java.sql.ResultSet) RowType(org.apache.flink.table.types.logical.RowType) TimestampType(org.apache.flink.table.types.logical.TimestampType) IntType(org.apache.flink.table.types.logical.IntType) Test(org.junit.Test)

Example 29 with IntType

use of org.apache.flink.table.types.logical.IntType in project flink by apache.

the class OrcBulkRowDataWriterTest method initInput.

@Before
public void initInput() {
    input = new ArrayList<>();
    fieldTypes = new LogicalType[4];
    fieldTypes[0] = new VarCharType();
    fieldTypes[1] = new IntType();
    List<RowType.RowField> arrayRowFieldList = Collections.singletonList(new RowType.RowField("_col2_col0", new VarCharType()));
    fieldTypes[2] = new ArrayType(new RowType(arrayRowFieldList));
    List<RowType.RowField> mapRowFieldList = Arrays.asList(new RowType.RowField("_col3_col0", new VarCharType()), new RowType.RowField("_col3_col1", new TimestampType()));
    fieldTypes[3] = new MapType(new VarCharType(), new RowType(mapRowFieldList));
    {
        GenericRowData rowData = new GenericRowData(4);
        rowData.setField(0, new BinaryStringData("_col_0_string_1"));
        rowData.setField(1, 1);
        GenericRowData arrayValue1 = new GenericRowData(1);
        arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_1"));
        GenericRowData arrayValue2 = new GenericRowData(1);
        arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_1"));
        GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
        rowData.setField(2, arrayData);
        GenericRowData mapValue1 = new GenericRowData(2);
        mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_1")));
        mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
        Map<StringData, RowData> mapDataMap = new HashMap<>();
        mapDataMap.put(new BinaryStringData("_col_3_map_key_1"), mapValue1);
        GenericMapData mapData = new GenericMapData(mapDataMap);
        rowData.setField(3, mapData);
        input.add(rowData);
    }
    {
        GenericRowData rowData = new GenericRowData(4);
        rowData.setField(0, new BinaryStringData("_col_0_string_2"));
        rowData.setField(1, 2);
        GenericRowData arrayValue1 = new GenericRowData(1);
        arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_2"));
        GenericRowData arrayValue2 = new GenericRowData(1);
        arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_2"));
        GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
        rowData.setField(2, arrayData);
        GenericRowData mapValue1 = new GenericRowData(2);
        mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_2")));
        mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
        Map<StringData, RowData> mapDataMap = new HashMap<>();
        mapDataMap.put(new BinaryStringData("_col_3_map_key_2"), mapValue1);
        GenericMapData mapData = new GenericMapData(mapDataMap);
        rowData.setField(3, mapData);
        input.add(rowData);
    }
}
Also used : GenericMapData(org.apache.flink.table.data.GenericMapData) GenericArrayData(org.apache.flink.table.data.GenericArrayData) RowType(org.apache.flink.table.types.logical.RowType) Timestamp(java.sql.Timestamp) MapType(org.apache.flink.table.types.logical.MapType) IntType(org.apache.flink.table.types.logical.IntType) ArrayType(org.apache.flink.table.types.logical.ArrayType) TimestampType(org.apache.flink.table.types.logical.TimestampType) GenericRowData(org.apache.flink.table.data.GenericRowData) VarCharType(org.apache.flink.table.types.logical.VarCharType) BinaryStringData(org.apache.flink.table.data.binary.BinaryStringData) Map(java.util.Map) HashMap(java.util.HashMap) Before(org.junit.Before)

Example 30 with IntType

use of org.apache.flink.table.types.logical.IntType in project flink by apache.

the class ParquetSplitReaderUtil method createVectorFromConstant.

public static ColumnVector createVectorFromConstant(LogicalType type, Object value, int batchSize) {
    switch(type.getTypeRoot()) {
        case CHAR:
        case VARCHAR:
        case BINARY:
        case VARBINARY:
            HeapBytesVector bsv = new HeapBytesVector(batchSize);
            if (value == null) {
                bsv.fillWithNulls();
            } else {
                bsv.fill(value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8));
            }
            return bsv;
        case BOOLEAN:
            HeapBooleanVector bv = new HeapBooleanVector(batchSize);
            if (value == null) {
                bv.fillWithNulls();
            } else {
                bv.fill((boolean) value);
            }
            return bv;
        case TINYINT:
            HeapByteVector byteVector = new HeapByteVector(batchSize);
            if (value == null) {
                byteVector.fillWithNulls();
            } else {
                byteVector.fill(((Number) value).byteValue());
            }
            return byteVector;
        case SMALLINT:
            HeapShortVector sv = new HeapShortVector(batchSize);
            if (value == null) {
                sv.fillWithNulls();
            } else {
                sv.fill(((Number) value).shortValue());
            }
            return sv;
        case INTEGER:
            HeapIntVector iv = new HeapIntVector(batchSize);
            if (value == null) {
                iv.fillWithNulls();
            } else {
                iv.fill(((Number) value).intValue());
            }
            return iv;
        case BIGINT:
            HeapLongVector lv = new HeapLongVector(batchSize);
            if (value == null) {
                lv.fillWithNulls();
            } else {
                lv.fill(((Number) value).longValue());
            }
            return lv;
        case DECIMAL:
            DecimalType decimalType = (DecimalType) type;
            int precision = decimalType.getPrecision();
            int scale = decimalType.getScale();
            DecimalData decimal = value == null ? null : Preconditions.checkNotNull(DecimalData.fromBigDecimal((BigDecimal) value, precision, scale));
            ColumnVector internalVector;
            if (ParquetSchemaConverter.is32BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new IntType(), decimal == null ? null : (int) decimal.toUnscaledLong(), batchSize);
            } else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new BigIntType(), decimal == null ? null : decimal.toUnscaledLong(), batchSize);
            } else {
                internalVector = createVectorFromConstant(new VarBinaryType(), decimal == null ? null : decimal.toUnscaledBytes(), batchSize);
            }
            return new ParquetDecimalVector(internalVector);
        case FLOAT:
            HeapFloatVector fv = new HeapFloatVector(batchSize);
            if (value == null) {
                fv.fillWithNulls();
            } else {
                fv.fill(((Number) value).floatValue());
            }
            return fv;
        case DOUBLE:
            HeapDoubleVector dv = new HeapDoubleVector(batchSize);
            if (value == null) {
                dv.fillWithNulls();
            } else {
                dv.fill(((Number) value).doubleValue());
            }
            return dv;
        case DATE:
            if (value instanceof LocalDate) {
                value = Date.valueOf((LocalDate) value);
            }
            return createVectorFromConstant(new IntType(), value == null ? null : toInternal((Date) value), batchSize);
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            HeapTimestampVector tv = new HeapTimestampVector(batchSize);
            if (value == null) {
                tv.fillWithNulls();
            } else {
                tv.fill(TimestampData.fromLocalDateTime((LocalDateTime) value));
            }
            return tv;
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
}
Also used : HeapShortVector(org.apache.flink.table.data.columnar.vector.heap.HeapShortVector) HeapLongVector(org.apache.flink.table.data.columnar.vector.heap.HeapLongVector) LocalDateTime(java.time.LocalDateTime) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) HeapByteVector(org.apache.flink.table.data.columnar.vector.heap.HeapByteVector) HeapDoubleVector(org.apache.flink.table.data.columnar.vector.heap.HeapDoubleVector) HeapTimestampVector(org.apache.flink.table.data.columnar.vector.heap.HeapTimestampVector) HeapBytesVector(org.apache.flink.table.data.columnar.vector.heap.HeapBytesVector) HeapIntVector(org.apache.flink.table.data.columnar.vector.heap.HeapIntVector) BigIntType(org.apache.flink.table.types.logical.BigIntType) LocalDate(java.time.LocalDate) HeapBooleanVector(org.apache.flink.table.data.columnar.vector.heap.HeapBooleanVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) WritableColumnVector(org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) DecimalData(org.apache.flink.table.data.DecimalData) DecimalType(org.apache.flink.table.types.logical.DecimalType) HeapFloatVector(org.apache.flink.table.data.columnar.vector.heap.HeapFloatVector)

Aggregations

IntType (org.apache.flink.table.types.logical.IntType)43 BigIntType (org.apache.flink.table.types.logical.BigIntType)23 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 RowData (org.apache.flink.table.data.RowData)13 ArrayList (java.util.ArrayList)11 Test (org.junit.Test)11 GenericRowData (org.apache.flink.table.data.GenericRowData)10 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)10 DoubleType (org.apache.flink.table.types.logical.DoubleType)10 SmallIntType (org.apache.flink.table.types.logical.SmallIntType)10 TimestampType (org.apache.flink.table.types.logical.TimestampType)10 TinyIntType (org.apache.flink.table.types.logical.TinyIntType)10 VarCharType (org.apache.flink.table.types.logical.VarCharType)10 BooleanType (org.apache.flink.table.types.logical.BooleanType)8 DecimalType (org.apache.flink.table.types.logical.DecimalType)7 MapType (org.apache.flink.table.types.logical.MapType)7 RowType (org.apache.flink.table.types.logical.RowType)7 StructuredType (org.apache.flink.table.types.logical.StructuredType)7 DataType (org.apache.flink.table.types.DataType)6 FloatType (org.apache.flink.table.types.logical.FloatType)6