use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class ScanBatch method close.
@Override
public void close() throws Exception {
container.clear();
for (final ValueVector v : implicitVectors.values()) {
v.clear();
}
mutator.clear();
currentReader.close();
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class FilterRecordBatch method generateSV4Filterer.
protected Filterer generateSV4Filterer() throws SchemaChangeException {
final ErrorCollector collector = new ErrorCollectorImpl();
final List<TransferPair> transfers = Lists.newArrayList();
final ClassGenerator<Filterer> cg = CodeGenerator.getRoot(Filterer.TEMPLATE_DEFINITION4, context.getFunctionRegistry(), context.getOptions());
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(popConfig.getExpr(), incoming, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
cg.addExpr(new ReturnValueExpression(expr), ClassGenerator.BlkCreateMode.FALSE);
for (final VectorWrapper<?> vw : incoming) {
for (final ValueVector vv : vw.getValueVectors()) {
final TransferPair pair = vv.getTransferPair(oContext.getAllocator());
container.add(pair.getTo());
transfers.add(pair);
}
}
// allocate outgoing sv4
container.buildSchema(SelectionVectorMode.FOUR_BYTE);
try {
final TransferPair[] tx = transfers.toArray(new TransferPair[transfers.size()]);
final Filterer filter = context.getImplementationClass(cg);
filter.setup(context, incoming, this, tx);
return filter;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class TopNBatch method buildSchema.
@Override
public void buildSchema() throws SchemaChangeException {
VectorContainer c = new VectorContainer(oContext);
IterOutcome outcome = next(incoming);
switch(outcome) {
case OK:
case OK_NEW_SCHEMA:
for (VectorWrapper<?> w : incoming) {
@SuppressWarnings("resource") ValueVector v = c.addOrGet(w.getField());
if (v instanceof AbstractContainerVector) {
w.getValueVector().makeTransferPair(v);
v.clear();
}
}
c = VectorContainer.canonicalize(c);
for (VectorWrapper<?> w : c) {
@SuppressWarnings("resource") ValueVector v = container.addOrGet(w.getField());
if (v instanceof AbstractContainerVector) {
w.getValueVector().makeTransferPair(v);
v.clear();
}
v.allocateNew();
}
container.buildSchema(SelectionVectorMode.NONE);
container.setRecordCount(0);
return;
case STOP:
state = BatchState.STOP;
return;
case OUT_OF_MEMORY:
state = BatchState.OUT_OF_MEMORY;
return;
case NONE:
state = BatchState.DONE;
default:
return;
}
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class WritableBatch method getBatchNoHV.
public static WritableBatch getBatchNoHV(int recordCount, Iterable<ValueVector> vectors, boolean isSV2) {
List<DrillBuf> buffers = Lists.newArrayList();
List<SerializedField> metadata = Lists.newArrayList();
for (ValueVector vv : vectors) {
metadata.add(vv.getMetadata());
// don't try to get the buffers if we don't have any records. It is possible the buffers are dead buffers.
if (recordCount == 0) {
vv.clear();
continue;
}
for (DrillBuf b : vv.getBuffers(true)) {
buffers.add(b);
}
// remove vv access to buffers.
vv.clear();
}
RecordBatchDef batchDef = RecordBatchDef.newBuilder().addAllField(metadata).setRecordCount(recordCount).setCarriesTwoByteSelectionVector(isSV2).build();
WritableBatch b = new WritableBatch(batchDef, buffers);
return b;
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class WritableBatch method reconstructContainer.
public void reconstructContainer(BufferAllocator allocator, VectorContainer container) {
Preconditions.checkState(!cleared, "Attempted to reconstruct a container from a WritableBatch after it had been cleared");
if (buffers.length > 0) {
/* If we have DrillBuf's associated with value vectors */
int len = 0;
for (DrillBuf b : buffers) {
len += b.capacity();
}
DrillBuf newBuf = allocator.buffer(len);
try {
/* Copy data from each buffer into the compound buffer */
int offset = 0;
for (DrillBuf buf : buffers) {
newBuf.setBytes(offset, buf);
offset += buf.capacity();
buf.release();
}
List<SerializedField> fields = def.getFieldList();
int bufferOffset = 0;
/*
* For each value vector slice up the appropriate size from the compound buffer and load it into the value vector
*/
int vectorIndex = 0;
for (VectorWrapper<?> vv : container) {
SerializedField fmd = fields.get(vectorIndex);
ValueVector v = vv.getValueVector();
DrillBuf bb = newBuf.slice(bufferOffset, fmd.getBufferLength());
// v.load(fmd, cbb.slice(bufferOffset, fmd.getBufferLength()));
v.load(fmd, bb);
vectorIndex++;
bufferOffset += fmd.getBufferLength();
}
} finally {
// Any vectors that loaded material from newBuf slices above will retain those.
newBuf.release(1);
}
}
SelectionVectorMode svMode;
if (def.hasCarriesTwoByteSelectionVector() && def.getCarriesTwoByteSelectionVector()) {
svMode = SelectionVectorMode.TWO_BYTE;
} else {
svMode = SelectionVectorMode.NONE;
}
container.buildSchema(svMode);
/* Set the record count in the value vector */
for (VectorWrapper<?> v : container) {
ValueVector.Mutator m = v.getValueVector().getMutator();
m.setValueCount(def.getRecordCount());
}
}
Aggregations