Search in sources :

Example 1 with ColumnVector

use of org.apache.flink.table.data.columnar.vector.ColumnVector in project flink by apache.

the class ParquetColumnarRowInputFormat method createPartitionedFormat.

/**
 * Create a partitioned {@link ParquetColumnarRowInputFormat}, the partition columns can be
 * generated by {@link Path}.
 */
public static <SplitT extends FileSourceSplit> ParquetColumnarRowInputFormat<SplitT> createPartitionedFormat(Configuration hadoopConfig, RowType producedRowType, TypeInformation<RowData> producedTypeInfo, List<String> partitionKeys, PartitionFieldExtractor<SplitT> extractor, int batchSize, boolean isUtcTimestamp, boolean isCaseSensitive) {
    // TODO FLINK-25113 all this partition keys code should be pruned from the parquet format,
    // because now FileSystemTableSource uses FileInfoExtractorBulkFormat for reading partition
    // keys.
    RowType projectedRowType = new RowType(producedRowType.getFields().stream().filter(field -> !partitionKeys.contains(field.getName())).collect(Collectors.toList()));
    List<String> projectedNames = projectedRowType.getFieldNames();
    ColumnBatchFactory<SplitT> factory = (SplitT split, ColumnVector[] parquetVectors) -> {
        // create and initialize the row batch
        ColumnVector[] vectors = new ColumnVector[producedRowType.getFieldCount()];
        for (int i = 0; i < vectors.length; i++) {
            RowType.RowField field = producedRowType.getFields().get(i);
            vectors[i] = partitionKeys.contains(field.getName()) ? createVectorFromConstant(field.getType(), extractor.extract(split, field.getName(), field.getType()), batchSize) : parquetVectors[projectedNames.indexOf(field.getName())];
        }
        return new VectorizedColumnBatch(vectors);
    };
    return new ParquetColumnarRowInputFormat<>(hadoopConfig, projectedRowType, producedTypeInfo, factory, batchSize, isUtcTimestamp, isCaseSensitive);
}
Also used : VectorizedColumnBatch(org.apache.flink.table.data.columnar.vector.VectorizedColumnBatch) RowType(org.apache.flink.table.types.logical.RowType) WritableColumnVector(org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector)

Example 2 with ColumnVector

use of org.apache.flink.table.data.columnar.vector.ColumnVector in project flink by apache.

the class ParquetSplitReaderUtil method genPartColumnarRowReader.

/**
 * Util for generating partitioned {@link ParquetColumnarRowSplitReader}.
 */
public static ParquetColumnarRowSplitReader genPartColumnarRowReader(boolean utcTimestamp, boolean caseSensitive, Configuration conf, String[] fullFieldNames, DataType[] fullFieldTypes, Map<String, Object> partitionSpec, int[] selectedFields, int batchSize, Path path, long splitStart, long splitLength) throws IOException {
    List<String> nonPartNames = Arrays.stream(fullFieldNames).filter(n -> !partitionSpec.containsKey(n)).collect(Collectors.toList());
    List<String> selNonPartNames = Arrays.stream(selectedFields).mapToObj(i -> fullFieldNames[i]).filter(nonPartNames::contains).collect(Collectors.toList());
    int[] selParquetFields = selNonPartNames.stream().mapToInt(nonPartNames::indexOf).toArray();
    ParquetColumnarRowSplitReader.ColumnBatchGenerator gen = readVectors -> {
        // create and initialize the row batch
        ColumnVector[] vectors = new ColumnVector[selectedFields.length];
        for (int i = 0; i < vectors.length; i++) {
            String name = fullFieldNames[selectedFields[i]];
            LogicalType type = fullFieldTypes[selectedFields[i]].getLogicalType();
            vectors[i] = partitionSpec.containsKey(name) ? createVectorFromConstant(type, partitionSpec.get(name), batchSize) : readVectors[selNonPartNames.indexOf(name)];
        }
        return new VectorizedColumnBatch(vectors);
    };
    return new ParquetColumnarRowSplitReader(utcTimestamp, caseSensitive, conf, Arrays.stream(selParquetFields).mapToObj(i -> fullFieldTypes[i].getLogicalType()).toArray(LogicalType[]::new), selNonPartNames.toArray(new String[0]), gen, batchSize, new org.apache.hadoop.fs.Path(path.toUri()), splitStart, splitLength);
}
Also used : PrimitiveType(org.apache.parquet.schema.PrimitiveType) HeapTimestampVector(org.apache.flink.table.data.columnar.vector.heap.HeapTimestampVector) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) Preconditions.checkArgument(org.apache.parquet.Preconditions.checkArgument) FloatColumnReader(org.apache.flink.formats.parquet.vector.reader.FloatColumnReader) VectorizedColumnBatch(org.apache.flink.table.data.columnar.vector.VectorizedColumnBatch) PageReader(org.apache.parquet.column.page.PageReader) HeapLongVector(org.apache.flink.table.data.columnar.vector.heap.HeapLongVector) HeapFloatVector(org.apache.flink.table.data.columnar.vector.heap.HeapFloatVector) IntColumnReader(org.apache.flink.formats.parquet.vector.reader.IntColumnReader) BigDecimal(java.math.BigDecimal) BytesColumnReader(org.apache.flink.formats.parquet.vector.reader.BytesColumnReader) DecimalType(org.apache.flink.table.types.logical.DecimalType) Path(org.apache.flink.core.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) TimestampColumnReader(org.apache.flink.formats.parquet.vector.reader.TimestampColumnReader) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) ShortColumnReader(org.apache.flink.formats.parquet.vector.reader.ShortColumnReader) WritableColumnVector(org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) HeapDoubleVector(org.apache.flink.table.data.columnar.vector.heap.HeapDoubleVector) DateTimeUtils.toInternal(org.apache.flink.table.utils.DateTimeUtils.toInternal) HeapIntVector(org.apache.flink.table.data.columnar.vector.heap.HeapIntVector) ColumnDescriptor(org.apache.parquet.column.ColumnDescriptor) FixedLenBytesColumnReader(org.apache.flink.formats.parquet.vector.reader.FixedLenBytesColumnReader) LocalDate(java.time.LocalDate) BooleanColumnReader(org.apache.flink.formats.parquet.vector.reader.BooleanColumnReader) ColumnReader(org.apache.flink.formats.parquet.vector.reader.ColumnReader) IntType(org.apache.flink.table.types.logical.IntType) LocalDateTime(java.time.LocalDateTime) HeapBytesVector(org.apache.flink.table.data.columnar.vector.heap.HeapBytesVector) ParquetSchemaConverter(org.apache.flink.formats.parquet.utils.ParquetSchemaConverter) HeapByteVector(org.apache.flink.table.data.columnar.vector.heap.HeapByteVector) OriginalType(org.apache.parquet.schema.OriginalType) TimestampData(org.apache.flink.table.data.TimestampData) BigIntType(org.apache.flink.table.types.logical.BigIntType) ByteColumnReader(org.apache.flink.formats.parquet.vector.reader.ByteColumnReader) LongColumnReader(org.apache.flink.formats.parquet.vector.reader.LongColumnReader) DecimalData(org.apache.flink.table.data.DecimalData) IOException(java.io.IOException) Date(java.sql.Date) DoubleColumnReader(org.apache.flink.formats.parquet.vector.reader.DoubleColumnReader) HeapShortVector(org.apache.flink.table.data.columnar.vector.heap.HeapShortVector) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) HeapBooleanVector(org.apache.flink.table.data.columnar.vector.heap.HeapBooleanVector) VectorizedColumnBatch(org.apache.flink.table.data.columnar.vector.VectorizedColumnBatch) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 3 with ColumnVector

use of org.apache.flink.table.data.columnar.vector.ColumnVector in project flink by apache.

the class ArrowUtilsTest method testCreateArrowReader.

@Test
public void testCreateArrowReader() {
    VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator);
    ArrowReader reader = ArrowUtils.createArrowReader(root, rowType);
    ColumnVector[] columnVectors = reader.getColumnVectors();
    for (int i = 0; i < columnVectors.length; i++) {
        assertEquals(testFields.get(i).f4, columnVectors[i].getClass());
    }
}
Also used : VectorSchemaRoot(org.apache.arrow.vector.VectorSchemaRoot) ArrowDecimalColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDecimalColumnVector) ArrowBigIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBigIntColumnVector) ArrowVarBinaryColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarBinaryColumnVector) ArrowVarCharColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarCharColumnVector) ArrowTimestampColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimestampColumnVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) ArrowIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowIntColumnVector) ArrowRowColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowRowColumnVector) ArrowSmallIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowSmallIntColumnVector) ArrowFloatColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowFloatColumnVector) ArrowDoubleColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDoubleColumnVector) ArrowTinyIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTinyIntColumnVector) ArrowArrayColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowArrayColumnVector) ArrowDateColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDateColumnVector) ArrowBooleanColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBooleanColumnVector) ArrowTimeColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimeColumnVector) Test(org.junit.Test)

Example 4 with ColumnVector

use of org.apache.flink.table.data.columnar.vector.ColumnVector in project flink by apache.

the class ParquetSplitReaderUtil method createVectorFromConstant.

public static ColumnVector createVectorFromConstant(LogicalType type, Object value, int batchSize) {
    switch(type.getTypeRoot()) {
        case CHAR:
        case VARCHAR:
        case BINARY:
        case VARBINARY:
            HeapBytesVector bsv = new HeapBytesVector(batchSize);
            if (value == null) {
                bsv.fillWithNulls();
            } else {
                bsv.fill(value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8));
            }
            return bsv;
        case BOOLEAN:
            HeapBooleanVector bv = new HeapBooleanVector(batchSize);
            if (value == null) {
                bv.fillWithNulls();
            } else {
                bv.fill((boolean) value);
            }
            return bv;
        case TINYINT:
            HeapByteVector byteVector = new HeapByteVector(batchSize);
            if (value == null) {
                byteVector.fillWithNulls();
            } else {
                byteVector.fill(((Number) value).byteValue());
            }
            return byteVector;
        case SMALLINT:
            HeapShortVector sv = new HeapShortVector(batchSize);
            if (value == null) {
                sv.fillWithNulls();
            } else {
                sv.fill(((Number) value).shortValue());
            }
            return sv;
        case INTEGER:
            HeapIntVector iv = new HeapIntVector(batchSize);
            if (value == null) {
                iv.fillWithNulls();
            } else {
                iv.fill(((Number) value).intValue());
            }
            return iv;
        case BIGINT:
            HeapLongVector lv = new HeapLongVector(batchSize);
            if (value == null) {
                lv.fillWithNulls();
            } else {
                lv.fill(((Number) value).longValue());
            }
            return lv;
        case DECIMAL:
            DecimalType decimalType = (DecimalType) type;
            int precision = decimalType.getPrecision();
            int scale = decimalType.getScale();
            DecimalData decimal = value == null ? null : Preconditions.checkNotNull(DecimalData.fromBigDecimal((BigDecimal) value, precision, scale));
            ColumnVector internalVector;
            if (ParquetSchemaConverter.is32BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new IntType(), decimal == null ? null : (int) decimal.toUnscaledLong(), batchSize);
            } else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
                internalVector = createVectorFromConstant(new BigIntType(), decimal == null ? null : decimal.toUnscaledLong(), batchSize);
            } else {
                internalVector = createVectorFromConstant(new VarBinaryType(), decimal == null ? null : decimal.toUnscaledBytes(), batchSize);
            }
            return new ParquetDecimalVector(internalVector);
        case FLOAT:
            HeapFloatVector fv = new HeapFloatVector(batchSize);
            if (value == null) {
                fv.fillWithNulls();
            } else {
                fv.fill(((Number) value).floatValue());
            }
            return fv;
        case DOUBLE:
            HeapDoubleVector dv = new HeapDoubleVector(batchSize);
            if (value == null) {
                dv.fillWithNulls();
            } else {
                dv.fill(((Number) value).doubleValue());
            }
            return dv;
        case DATE:
            if (value instanceof LocalDate) {
                value = Date.valueOf((LocalDate) value);
            }
            return createVectorFromConstant(new IntType(), value == null ? null : toInternal((Date) value), batchSize);
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            HeapTimestampVector tv = new HeapTimestampVector(batchSize);
            if (value == null) {
                tv.fillWithNulls();
            } else {
                tv.fill(TimestampData.fromLocalDateTime((LocalDateTime) value));
            }
            return tv;
        default:
            throw new UnsupportedOperationException("Unsupported type: " + type);
    }
}
Also used : HeapShortVector(org.apache.flink.table.data.columnar.vector.heap.HeapShortVector) HeapLongVector(org.apache.flink.table.data.columnar.vector.heap.HeapLongVector) LocalDateTime(java.time.LocalDateTime) VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) HeapByteVector(org.apache.flink.table.data.columnar.vector.heap.HeapByteVector) HeapDoubleVector(org.apache.flink.table.data.columnar.vector.heap.HeapDoubleVector) HeapTimestampVector(org.apache.flink.table.data.columnar.vector.heap.HeapTimestampVector) HeapBytesVector(org.apache.flink.table.data.columnar.vector.heap.HeapBytesVector) HeapIntVector(org.apache.flink.table.data.columnar.vector.heap.HeapIntVector) BigIntType(org.apache.flink.table.types.logical.BigIntType) LocalDate(java.time.LocalDate) HeapBooleanVector(org.apache.flink.table.data.columnar.vector.heap.HeapBooleanVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) WritableColumnVector(org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) DecimalData(org.apache.flink.table.data.DecimalData) DecimalType(org.apache.flink.table.types.logical.DecimalType) HeapFloatVector(org.apache.flink.table.data.columnar.vector.heap.HeapFloatVector)

Example 5 with ColumnVector

use of org.apache.flink.table.data.columnar.vector.ColumnVector in project flink by apache.

the class ArrowUtils method createColumnVector.

public static ColumnVector createColumnVector(ValueVector vector, LogicalType fieldType) {
    if (vector instanceof TinyIntVector) {
        return new ArrowTinyIntColumnVector((TinyIntVector) vector);
    } else if (vector instanceof SmallIntVector) {
        return new ArrowSmallIntColumnVector((SmallIntVector) vector);
    } else if (vector instanceof IntVector) {
        return new ArrowIntColumnVector((IntVector) vector);
    } else if (vector instanceof BigIntVector) {
        return new ArrowBigIntColumnVector((BigIntVector) vector);
    } else if (vector instanceof BitVector) {
        return new ArrowBooleanColumnVector((BitVector) vector);
    } else if (vector instanceof Float4Vector) {
        return new ArrowFloatColumnVector((Float4Vector) vector);
    } else if (vector instanceof Float8Vector) {
        return new ArrowDoubleColumnVector((Float8Vector) vector);
    } else if (vector instanceof VarCharVector) {
        return new ArrowVarCharColumnVector((VarCharVector) vector);
    } else if (vector instanceof VarBinaryVector) {
        return new ArrowVarBinaryColumnVector((VarBinaryVector) vector);
    } else if (vector instanceof DecimalVector) {
        return new ArrowDecimalColumnVector((DecimalVector) vector);
    } else if (vector instanceof DateDayVector) {
        return new ArrowDateColumnVector((DateDayVector) vector);
    } else if (vector instanceof TimeSecVector || vector instanceof TimeMilliVector || vector instanceof TimeMicroVector || vector instanceof TimeNanoVector) {
        return new ArrowTimeColumnVector(vector);
    } else if (vector instanceof TimeStampVector && ((ArrowType.Timestamp) vector.getField().getType()).getTimezone() == null) {
        return new ArrowTimestampColumnVector(vector);
    } else if (vector instanceof ListVector) {
        ListVector listVector = (ListVector) vector;
        return new ArrowArrayColumnVector(listVector, createColumnVector(listVector.getDataVector(), ((ArrayType) fieldType).getElementType()));
    } else if (vector instanceof StructVector) {
        StructVector structVector = (StructVector) vector;
        ColumnVector[] fieldColumns = new ColumnVector[structVector.size()];
        for (int i = 0; i < fieldColumns.length; ++i) {
            fieldColumns[i] = createColumnVector(structVector.getVectorById(i), ((RowType) fieldType).getTypeAt(i));
        }
        return new ArrowRowColumnVector(structVector, fieldColumns);
    } else {
        throw new UnsupportedOperationException(String.format("Unsupported type %s.", fieldType));
    }
}
Also used : BitVector(org.apache.arrow.vector.BitVector) StructVector(org.apache.arrow.vector.complex.StructVector) Float4Vector(org.apache.arrow.vector.Float4Vector) TimeSecVector(org.apache.arrow.vector.TimeSecVector) ArrowArrayColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowArrayColumnVector) ArrowTinyIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTinyIntColumnVector) ArrowIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowIntColumnVector) TimeMilliVector(org.apache.arrow.vector.TimeMilliVector) RowType(org.apache.flink.table.types.logical.RowType) ArrowBooleanColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBooleanColumnVector) VarBinaryVector(org.apache.arrow.vector.VarBinaryVector) ArrowDoubleColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDoubleColumnVector) ArrowDateColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDateColumnVector) ArrowSmallIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowSmallIntColumnVector) DateDayVector(org.apache.arrow.vector.DateDayVector) ArrowVarBinaryColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarBinaryColumnVector) ArrowDecimalColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDecimalColumnVector) ArrowFloatColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowFloatColumnVector) DecimalVector(org.apache.arrow.vector.DecimalVector) ArrowTimeColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimeColumnVector) TimeNanoVector(org.apache.arrow.vector.TimeNanoVector) SmallIntVector(org.apache.arrow.vector.SmallIntVector) BigIntVector(org.apache.arrow.vector.BigIntVector) TinyIntVector(org.apache.arrow.vector.TinyIntVector) IntVector(org.apache.arrow.vector.IntVector) SmallIntVector(org.apache.arrow.vector.SmallIntVector) Float8Vector(org.apache.arrow.vector.Float8Vector) VarCharVector(org.apache.arrow.vector.VarCharVector) ArrowVarCharColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarCharColumnVector) BigIntVector(org.apache.arrow.vector.BigIntVector) ArrowBigIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBigIntColumnVector) ArrowVarBinaryColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarBinaryColumnVector) ArrowIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowIntColumnVector) ArrowDoubleColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDoubleColumnVector) ArrowTinyIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTinyIntColumnVector) ArrowDateColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDateColumnVector) ArrowBooleanColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBooleanColumnVector) ArrowDecimalColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowDecimalColumnVector) ArrowVarCharColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowVarCharColumnVector) ArrowTimestampColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimestampColumnVector) ColumnVector(org.apache.flink.table.data.columnar.vector.ColumnVector) ArrowRowColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowRowColumnVector) ArrowSmallIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowSmallIntColumnVector) ArrowFloatColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowFloatColumnVector) ArrowArrayColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowArrayColumnVector) ArrowTimeColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimeColumnVector) TimeStampVector(org.apache.arrow.vector.TimeStampVector) TimeMicroVector(org.apache.arrow.vector.TimeMicroVector) ListVector(org.apache.arrow.vector.complex.ListVector) ArrowBigIntColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowBigIntColumnVector) ArrowTimestampColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowTimestampColumnVector) TinyIntVector(org.apache.arrow.vector.TinyIntVector) ArrowRowColumnVector(org.apache.flink.table.runtime.arrow.vectors.ArrowRowColumnVector)

Aggregations

ColumnVector (org.apache.flink.table.data.columnar.vector.ColumnVector)5 WritableColumnVector (org.apache.flink.table.data.columnar.vector.writable.WritableColumnVector)3 LocalDate (java.time.LocalDate)2 LocalDateTime (java.time.LocalDateTime)2 DecimalData (org.apache.flink.table.data.DecimalData)2 VectorizedColumnBatch (org.apache.flink.table.data.columnar.vector.VectorizedColumnBatch)2 HeapBooleanVector (org.apache.flink.table.data.columnar.vector.heap.HeapBooleanVector)2 HeapByteVector (org.apache.flink.table.data.columnar.vector.heap.HeapByteVector)2 HeapBytesVector (org.apache.flink.table.data.columnar.vector.heap.HeapBytesVector)2 HeapDoubleVector (org.apache.flink.table.data.columnar.vector.heap.HeapDoubleVector)2 HeapFloatVector (org.apache.flink.table.data.columnar.vector.heap.HeapFloatVector)2 HeapIntVector (org.apache.flink.table.data.columnar.vector.heap.HeapIntVector)2 HeapLongVector (org.apache.flink.table.data.columnar.vector.heap.HeapLongVector)2 HeapShortVector (org.apache.flink.table.data.columnar.vector.heap.HeapShortVector)2 HeapTimestampVector (org.apache.flink.table.data.columnar.vector.heap.HeapTimestampVector)2 ArrowArrayColumnVector (org.apache.flink.table.runtime.arrow.vectors.ArrowArrayColumnVector)2 ArrowBigIntColumnVector (org.apache.flink.table.runtime.arrow.vectors.ArrowBigIntColumnVector)2 IOException (java.io.IOException)1 BigDecimal (java.math.BigDecimal)1 StandardCharsets (java.nio.charset.StandardCharsets)1