use of org.apache.drill.exec.proto.UserBitShared.SerializedField in project drill by apache.
the class WritableBatch method reconstructContainer.
public void reconstructContainer(BufferAllocator allocator, VectorContainer container) {
Preconditions.checkState(!cleared, "Attempted to reconstruct a container from a WritableBatch after it had been cleared");
// If we have DrillBuf's associated with value vectors
if (buffers.length > 0) {
int len = 0;
for (DrillBuf b : buffers) {
len += b.capacity();
}
DrillBuf newBuf = allocator.buffer(len);
try {
// Copy data from each buffer into the compound buffer
int offset = 0;
for (DrillBuf buf : buffers) {
newBuf.setBytes(offset, buf);
offset += buf.capacity();
buf.release();
}
List<SerializedField> fields = def.getFieldList();
int bufferOffset = 0;
// For each value vector slice up the appropriate size from the
// compound buffer and load it into the value vector
int vectorIndex = 0;
for (VectorWrapper<?> vv : container) {
SerializedField fmd = fields.get(vectorIndex);
ValueVector v = vv.getValueVector();
DrillBuf bb = newBuf.slice(bufferOffset, fmd.getBufferLength());
v.load(fmd, bb);
vectorIndex++;
bufferOffset += fmd.getBufferLength();
}
} finally {
// Any vectors that loaded material from newBuf slices above will retain those.
newBuf.release(1);
}
}
SelectionVectorMode svMode;
if (def.hasCarriesTwoByteSelectionVector() && def.getCarriesTwoByteSelectionVector()) {
svMode = SelectionVectorMode.TWO_BYTE;
} else {
svMode = SelectionVectorMode.NONE;
}
container.buildSchema(svMode);
container.setValueCount(def.getRecordCount());
}
use of org.apache.drill.exec.proto.UserBitShared.SerializedField in project drill by apache.
the class WritableBatch method getBatchNoHV.
public static WritableBatch getBatchNoHV(int recordCount, Iterable<ValueVector> vectors, boolean isSV2) {
List<DrillBuf> buffers = new ArrayList<>();
List<SerializedField> metadata = new ArrayList<>();
for (ValueVector vv : vectors) {
metadata.add(vv.getMetadata());
// don't try to get the buffers if we don't have any records. It is possible the buffers are dead buffers.
if (recordCount == 0) {
vv.clear();
continue;
}
for (DrillBuf b : vv.getBuffers(true)) {
buffers.add(b);
}
// remove vv access to buffers.
vv.clear();
}
RecordBatchDef batchDef = RecordBatchDef.newBuilder().addAllField(metadata).setRecordCount(recordCount).setCarriesTwoByteSelectionVector(isSV2).build();
WritableBatch b = new WritableBatch(batchDef, buffers);
return b;
}
use of org.apache.drill.exec.proto.UserBitShared.SerializedField in project drill by apache.
the class AbstractRepeatedMapVector method getMetadata.
@Override
public SerializedField getMetadata() {
SerializedField.Builder builder = getField().getAsBuilder().setBufferLength(getBufferSize()).setValueCount(getAccessor().getValueCount());
builder.addChild(offsets.getMetadata());
for (ValueVector child : getChildren()) {
builder.addChild(child.getMetadata());
}
return builder.build();
}
use of org.apache.drill.exec.proto.UserBitShared.SerializedField in project drill by apache.
the class MapVector method load.
@Override
public void load(SerializedField metadata, DrillBuf buf) {
List<SerializedField> fields = metadata.getChildList();
valueCount = metadata.getValueCount();
int bufOffset = 0;
for (SerializedField child : fields) {
MaterializedField fieldDef = MaterializedField.create(child);
ValueVector vector = getChild(fieldDef.getName());
if (vector == null) {
// if we arrive here, we didn't have a matching vector.
vector = BasicTypeHelper.getNewVector(fieldDef, allocator);
putChild(fieldDef.getName(), vector);
}
if (child.getValueCount() == 0) {
vector.clear();
} else {
vector.load(child, buf.slice(bufOffset, child.getBufferLength()));
}
bufOffset += child.getBufferLength();
}
assert bufOffset == buf.writerIndex();
}
use of org.apache.drill.exec.proto.UserBitShared.SerializedField in project drill by apache.
the class VectorAccessibleSerializable method readVectors.
private void readVectors(InputStream input, RecordBatchDef batchDef) throws IOException {
final VectorContainer container = new VectorContainer();
final List<ValueVector> vectorList = Lists.newArrayList();
final List<SerializedField> fieldList = batchDef.getFieldList();
for (SerializedField metaData : fieldList) {
final int dataLength = metaData.getBufferLength();
final MaterializedField field = MaterializedField.create(metaData);
final DrillBuf buf = allocator.read(dataLength, input);
final ValueVector vector = TypeHelper.getNewVector(field, allocator);
vector.load(metaData, buf);
// Vector now owns the buffer
buf.release();
vectorList.add(vector);
}
container.addCollection(vectorList);
container.buildSchema(svMode);
container.setRecordCount(recordCount);
va = container;
}
Aggregations