use of org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData in project hive by apache.
the class SerDeEncodedDataReader method returnData.
@Override
public void returnData(OrcEncodedColumnBatch ecb) {
for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) {
if (!ecb.hasData(colIx))
continue;
// TODO: reuse columnvector-s on hasBatch - save the array by column? take apart each list.
ColumnStreamData[] datas = ecb.getColumnData(colIx);
for (ColumnStreamData data : datas) {
if (data == null || data.decRef() != 0)
continue;
if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) {
for (MemoryBuffer buf : data.getCacheBuffers()) {
LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf);
}
}
bufferManager.decRefBuffers(data.getCacheBuffers());
CSD_POOL.offer(data);
}
}
// We can offer ECB even with some streams not discarded; reset() will clear the arrays.
ECB_POOL.offer(ecb);
}
use of org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData in project hive by apache.
the class OrcEncodedDataReader method returnData.
@Override
public void returnData(OrcEncodedColumnBatch ecb) {
for (int colIx = 0; colIx < ecb.getTotalColCount(); ++colIx) {
if (!ecb.hasData(colIx))
continue;
ColumnStreamData[] datas = ecb.getColumnData(colIx);
for (ColumnStreamData data : datas) {
if (data == null || data.decRef() != 0)
continue;
if (LlapIoImpl.LOCKING_LOGGER.isTraceEnabled()) {
for (MemoryBuffer buf : data.getCacheBuffers()) {
LlapIoImpl.LOCKING_LOGGER.trace("Unlocking {} at the end of processing", buf);
}
}
bufferManager.decRefBuffers(data.getCacheBuffers());
CSD_POOL.offer(data);
}
}
// We can offer ECB even with some streams not discarded; reset() will clear the arrays.
ECB_POOL.offer(ecb);
}
Aggregations