Search in sources :

Example 1 with GenericArrayData

use of org.apache.flink.table.data.GenericArrayData in project flink by apache.

the class OrcBulkRowDataWriterTest method readList.

/**
 * Read ListColumnVector with specify schema {@literal array<struct<_col2_col0:string>>}.
 */
private static ArrayData readList(ListColumnVector listVector, int row) {
    int offset = (int) listVector.offsets[row];
    StructColumnVector structChild = (StructColumnVector) listVector.child;
    BytesColumnVector valueChild = (BytesColumnVector) structChild.fields[0];
    StringData value1 = readStringData(valueChild, offset);
    GenericRowData arrayValue1 = new GenericRowData(1);
    arrayValue1.setField(0, value1);
    StringData value2 = readStringData(valueChild, offset + 1);
    GenericRowData arrayValue2 = new GenericRowData(1);
    arrayValue2.setField(0, (value2));
    return new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
}
Also used : GenericArrayData(org.apache.flink.table.data.GenericArrayData) StructColumnVector(org.apache.hadoop.hive.ql.exec.vector.StructColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) GenericRowData(org.apache.flink.table.data.GenericRowData) BinaryStringData(org.apache.flink.table.data.binary.BinaryStringData) StringData(org.apache.flink.table.data.StringData)

Example 2 with GenericArrayData

use of org.apache.flink.table.data.GenericArrayData in project flink by apache.

the class ArrowReaderWriterTest method getTestData.

@Override
public RowData[] getTestData() {
    RowData row1 = StreamRecordUtils.row((byte) 1, (short) 2, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), GenericRowData.of(1, StringData.fromString("hello"), new GenericArrayData(new StringData[] { StringData.fromString("hello") }), TimestampData.fromEpochMillis(3600000), GenericRowData.of(1, StringData.fromString("hello"))));
    BinaryRowData row2 = StreamRecordUtils.binaryrow((byte) 1, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new String[] { null, null, null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, GenericRowData.of(1, StringData.fromString("hello"))), new RowDataSerializer(rowFieldType)));
    RowData row3 = StreamRecordUtils.row(null, (short) 2, 3, 4L, false, 1.0f, 1.0, "中文", "中文".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000), TimestampData.fromEpochMillis(3600000, 100000), TimestampData.fromEpochMillis(3600000, 100000), new GenericArrayData(new String[] { null, null, null }), GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null));
    BinaryRowData row4 = StreamRecordUtils.binaryrow((byte) 1, null, 3, 4L, true, 1.0f, 1.0, "hello", "hello".getBytes(), DecimalData.fromUnscaledLong(1, 10, 3), 100, 3600000, 3600000, 3600000, 3600000, Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(TimestampData.fromEpochMillis(3600000), 0), Tuple2.of(TimestampData.fromEpochMillis(3600000), 2), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 4), Tuple2.of(TimestampData.fromEpochMillis(3600000, 100000), 8), Tuple2.of(new GenericArrayData(new StringData[] { StringData.fromString("hello"), StringData.fromString("中文"), null }), new ArrayDataSerializer(new VarCharType())), Tuple2.of(GenericRowData.of(1, null, new GenericArrayData(new StringData[] { StringData.fromString("hello") }), null, null), new RowDataSerializer(rowFieldType)));
    RowData row5 = StreamRecordUtils.row(new Object[fieldTypes.size()]);
    BinaryRowData row6 = StreamRecordUtils.binaryrow(new Object[fieldTypes.size()]);
    return new RowData[] { row1, row2, row3, row4, row5, row6 };
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) GenericArrayData(org.apache.flink.table.data.GenericArrayData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) VarCharType(org.apache.flink.table.types.logical.VarCharType) StringData(org.apache.flink.table.data.StringData) ArrayDataSerializer(org.apache.flink.table.runtime.typeutils.ArrayDataSerializer) RowDataSerializer(org.apache.flink.table.runtime.typeutils.RowDataSerializer)

Example 3 with GenericArrayData

use of org.apache.flink.table.data.GenericArrayData in project flink by apache.

the class PostgresRowConverter method createPostgresArrayConverter.

private JdbcDeserializationConverter createPostgresArrayConverter(ArrayType arrayType) {
    // PG's bytea[] is wrapped in PGobject, rather than primitive byte arrays
    if (arrayType.getElementType().is(LogicalTypeFamily.BINARY_STRING)) {
        final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
        final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType());
        return val -> {
            PgArray pgArray = (PgArray) val;
            Object[] in = (Object[]) pgArray.getArray();
            final Object[] array = (Object[]) Array.newInstance(elementClass, in.length);
            for (int i = 0; i < in.length; i++) {
                array[i] = elementConverter.deserialize(((PGobject) in[i]).getValue().getBytes());
            }
            return new GenericArrayData(array);
        };
    } else {
        final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
        final JdbcDeserializationConverter elementConverter = createNullableInternalConverter(arrayType.getElementType());
        return val -> {
            PgArray pgArray = (PgArray) val;
            Object[] in = (Object[]) pgArray.getArray();
            final Object[] array = (Object[]) Array.newInstance(elementClass, in.length);
            for (int i = 0; i < in.length; i++) {
                array[i] = elementConverter.deserialize(in[i]);
            }
            return new GenericArrayData(array);
        };
    }
}
Also used : PGobject(org.postgresql.util.PGobject) Array(java.lang.reflect.Array) LogicalType(org.apache.flink.table.types.logical.LogicalType) LogicalTypeFamily(org.apache.flink.table.types.logical.LogicalTypeFamily) GenericArrayData(org.apache.flink.table.data.GenericArrayData) AbstractJdbcRowConverter(org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter) ArrayType(org.apache.flink.table.types.logical.ArrayType) RowType(org.apache.flink.table.types.logical.RowType) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) LogicalTypeUtils(org.apache.flink.table.types.logical.utils.LogicalTypeUtils) PgArray(org.postgresql.jdbc.PgArray) GenericArrayData(org.apache.flink.table.data.GenericArrayData) PgArray(org.postgresql.jdbc.PgArray) PGobject(org.postgresql.util.PGobject)

Example 4 with GenericArrayData

use of org.apache.flink.table.data.GenericArrayData in project flink by apache.

the class CanalJsonSerializationSchema method serialize.

@Override
public byte[] serialize(RowData row) {
    try {
        StringData opType = rowKind2String(row.getRowKind());
        ArrayData arrayData = new GenericArrayData(new RowData[] { row });
        reuse.setField(0, arrayData);
        reuse.setField(1, opType);
        return jsonSerializer.serialize(reuse);
    } catch (Throwable t) {
        throw new RuntimeException("Could not serialize row '" + row + "'.", t);
    }
}
Also used : GenericArrayData(org.apache.flink.table.data.GenericArrayData) StringData(org.apache.flink.table.data.StringData) GenericArrayData(org.apache.flink.table.data.GenericArrayData) ArrayData(org.apache.flink.table.data.ArrayData)

Example 5 with GenericArrayData

use of org.apache.flink.table.data.GenericArrayData in project flink by apache.

the class AvroToRowDataConverters method createArrayConverter.

private static AvroToRowDataConverter createArrayConverter(ArrayType arrayType) {
    final AvroToRowDataConverter elementConverter = createNullableConverter(arrayType.getElementType());
    final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
    return avroObject -> {
        final List<?> list = (List<?>) avroObject;
        final int length = list.size();
        final Object[] array = (Object[]) Array.newInstance(elementClass, length);
        for (int i = 0; i < length; ++i) {
            array[i] = elementConverter.convert(list.get(i));
        }
        return new GenericArrayData(array);
    };
}
Also used : ChronoField(java.time.temporal.ChronoField) Array(java.lang.reflect.Array) GenericArrayData(org.apache.flink.table.data.GenericArrayData) HashMap(java.util.HashMap) RowType(org.apache.flink.table.types.logical.RowType) ByteBuffer(java.nio.ByteBuffer) GenericRowData(org.apache.flink.table.data.GenericRowData) DecimalType(org.apache.flink.table.types.logical.DecimalType) GenericMapData(org.apache.flink.table.data.GenericMapData) Map(java.util.Map) LocalTime(java.time.LocalTime) LogicalTypeUtils(org.apache.flink.table.types.logical.utils.LogicalTypeUtils) IndexedRecord(org.apache.avro.generic.IndexedRecord) GenericRecord(org.apache.avro.generic.GenericRecord) RowData(org.apache.flink.table.data.RowData) GenericFixed(org.apache.avro.generic.GenericFixed) TimestampData(org.apache.flink.table.data.TimestampData) DataTypes(org.apache.flink.table.api.DataTypes) DecimalData(org.apache.flink.table.data.DecimalData) ArrayType(org.apache.flink.table.types.logical.ArrayType) Instant(java.time.Instant) AvroSchemaConverter.extractValueTypeToAvroMap(org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap) Serializable(java.io.Serializable) StringData(org.apache.flink.table.data.StringData) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) LocalDate(java.time.LocalDate) Internal(org.apache.flink.annotation.Internal) GenericArrayData(org.apache.flink.table.data.GenericArrayData) List(java.util.List)

Aggregations

GenericArrayData (org.apache.flink.table.data.GenericArrayData)11 GenericRowData (org.apache.flink.table.data.GenericRowData)7 StringData (org.apache.flink.table.data.StringData)6 ArrayType (org.apache.flink.table.types.logical.ArrayType)6 RowType (org.apache.flink.table.types.logical.RowType)6 LogicalType (org.apache.flink.table.types.logical.LogicalType)5 Array (java.lang.reflect.Array)4 RowData (org.apache.flink.table.data.RowData)4 LogicalTypeUtils (org.apache.flink.table.types.logical.utils.LogicalTypeUtils)4 Serializable (java.io.Serializable)3 LocalTime (java.time.LocalTime)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 Internal (org.apache.flink.annotation.Internal)3 DecimalData (org.apache.flink.table.data.DecimalData)3 GenericMapData (org.apache.flink.table.data.GenericMapData)3 TimestampData (org.apache.flink.table.data.TimestampData)3 DecimalType (org.apache.flink.table.types.logical.DecimalType)3 IOException (java.io.IOException)2 BigDecimal (java.math.BigDecimal)2