use of org.apache.flink.table.data.columnar.vector.writable.WritableLongVector in project flink by apache.
the class FixedLenBytesColumnReader method readBatchFromDictionaryIds.
@Override
protected void readBatchFromDictionaryIds(int rowId, int num, VECTOR column, WritableIntVector dictionaryIds) {
if (ParquetSchemaConverter.is32BitDecimal(precision)) {
WritableIntVector intVector = (WritableIntVector) column;
for (int i = rowId; i < rowId + num; ++i) {
if (!intVector.isNullAt(i)) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getInt(i));
intVector.setInt(i, (int) heapBinaryToLong(v));
}
}
} else if (ParquetSchemaConverter.is64BitDecimal(precision)) {
WritableLongVector longVector = (WritableLongVector) column;
for (int i = rowId; i < rowId + num; ++i) {
if (!longVector.isNullAt(i)) {
Binary v = dictionary.decodeToBinary(dictionaryIds.getInt(i));
longVector.setLong(i, heapBinaryToLong(v));
}
}
} else {
WritableBytesVector bytesVector = (WritableBytesVector) column;
for (int i = rowId; i < rowId + num; ++i) {
if (!bytesVector.isNullAt(i)) {
byte[] v = dictionary.decodeToBinary(dictionaryIds.getInt(i)).getBytes();
bytesVector.appendBytes(i, v, 0, v.length);
}
}
}
}
Aggregations