use of org.apache.drill.exec.exception.ClassTransformationException in project drill by apache.
the class OrderedPartitionRecordBatch method getPartitionVectors.
/**
* This method is called when the first batch comes in. Incoming batches are collected until a threshold is met. At
* that point, the records in the batches are sorted and sampled, and the sampled records are stored in the
* distributed cache. Once a sufficient fraction of the fragments have shared their samples, each fragment grabs all
* the samples, sorts all the records, builds a partition table, and attempts to push the partition table to the
* distributed cache. Whichever table gets pushed first becomes the table used by all fragments for partitioning.
*
* @return True is successful. False if failed.
*/
private boolean getPartitionVectors() {
try {
if (!saveSamples()) {
return false;
}
CachedVectorContainer finalTable = null;
long val = minorFragmentSampleCount.incrementAndGet();
logger.debug("Incremented mfsc, got {}", val);
final long fragmentsBeforeProceed = (long) Math.ceil(sendingMajorFragmentWidth * completionFactor);
final String finalTableKey = mapKey + "final";
if (val == fragmentsBeforeProceed) {
// we crossed the barrier, build table and get data.
buildTable();
finalTable = tableMap.get(finalTableKey);
} else {
if (val < fragmentsBeforeProceed) {
if (!waitUntilTimeOut(10)) {
return false;
}
}
for (int i = 0; i < 100 && finalTable == null; i++) {
finalTable = tableMap.get(finalTableKey);
if (finalTable != null) {
break;
}
if (!waitUntilTimeOut(10)) {
return false;
}
}
if (finalTable == null) {
buildTable();
}
finalTable = tableMap.get(finalTableKey);
}
Preconditions.checkState(finalTable != null);
// the rest of this operator
for (VectorWrapper<?> w : finalTable.get()) {
partitionVectors.add(w.getValueVector());
}
} catch (final ClassTransformationException | IOException | SchemaChangeException ex) {
kill(false);
context.fail(ex);
return false;
// TODO InterruptedException
}
return true;
}
use of org.apache.drill.exec.exception.ClassTransformationException in project drill by apache.
the class OrderedPartitionRecordBatch method setupNewSchema.
/**
* Sets up projection that will transfer all of the columns in batch, and also populate the partition column based on
* which partition a record falls into in the partition table
*
* @param batch
* @throws SchemaChangeException
*/
protected void setupNewSchema(VectorAccessible batch) throws SchemaChangeException {
container.clear();
final ErrorCollector collector = new ErrorCollectorImpl();
final List<TransferPair> transfers = Lists.newArrayList();
final ClassGenerator<OrderedPartitionProjector> cg = CodeGenerator.getRoot(OrderedPartitionProjector.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
for (VectorWrapper<?> vw : batch) {
TransferPair tp = vw.getValueVector().getTransferPair(oContext.getAllocator());
transfers.add(tp);
container.add(tp.getTo());
}
cg.setMappingSet(mainMapping);
int count = 0;
for (Ordering od : popConfig.getOrderings()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
cg.setMappingSet(incomingMapping);
ClassGenerator.HoldingContainer left = cg.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
cg.setMappingSet(partitionMapping);
ClassGenerator.HoldingContainer right = cg.addExpr(new ValueVectorReadExpression(new TypedFieldId(expr.getMajorType(), count++)), ClassGenerator.BlkCreateMode.FALSE);
cg.setMappingSet(mainMapping);
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
ClassGenerator.HoldingContainer out = cg.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
JConditional jc = cg.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
if (od.getDirection() == Direction.ASCENDING) {
jc._then()._return(out.getValue());
} else {
jc._then()._return(out.getValue().minus());
}
}
cg.getEvalBlock()._return(JExpr.lit(0));
container.add(this.partitionKeyVector);
container.buildSchema(batch.getSchema().getSelectionVectorMode());
try {
this.projector = context.getImplementationClass(cg);
projector.setup(context, batch, this, transfers, partitionVectors, partitions, popConfig.getRef());
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
use of org.apache.drill.exec.exception.ClassTransformationException in project drill by apache.
the class ExternalSortBatch method createCopier.
private void createCopier(VectorAccessible batch, List<BatchGroup> batchGroupList, VectorContainer outputContainer, boolean spilling) throws SchemaChangeException {
try {
if (copier == null) {
CodeGenerator<PriorityQueueCopier> cg = CodeGenerator.get(PriorityQueueCopier.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.saveCodeForDebugging(true);
ClassGenerator<PriorityQueueCopier> g = cg.getRoot();
generateComparisons(g, batch);
g.setMappingSet(COPIER_MAPPING_SET);
CopyUtil.generateCopies(g, batch, true);
g.setMappingSet(MAIN_MAPPING);
copier = context.getImplementationClass(cg);
} else {
copier.close();
}
@SuppressWarnings("resource") BufferAllocator allocator = spilling ? copierAllocator : oAllocator;
for (VectorWrapper<?> i : batch) {
@SuppressWarnings("resource") ValueVector v = TypeHelper.getNewVector(i.getField(), allocator);
outputContainer.add(v);
}
copier.setup(context, allocator, batch, batchGroupList, outputContainer);
} catch (ClassTransformationException | IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.drill.exec.exception.ClassTransformationException in project drill by apache.
the class RemovingRecordBatch method getGenerated2Copier.
private Copier getGenerated2Copier() throws SchemaChangeException {
Preconditions.checkArgument(incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE);
for (VectorWrapper<?> vv : incoming) {
vv.getValueVector().makeTransferPair(container.addOrGet(vv.getField(), callBack));
}
try {
final CodeGenerator<Copier> cg = CodeGenerator.get(Copier.TEMPLATE_DEFINITION2, context.getFunctionRegistry(), context.getOptions());
CopyUtil.generateCopies(cg.getRoot(), incoming, false);
Copier copier = context.getImplementationClass(cg);
copier.setupRemover(context, incoming, this);
cg.plainJavaCapable(true);
return copier;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
use of org.apache.drill.exec.exception.ClassTransformationException in project drill by apache.
the class WindowFrameRecordBatch method buildSchema.
@Override
protected void buildSchema() throws SchemaChangeException {
logger.trace("buildSchema()");
final IterOutcome outcome = next(incoming);
switch(outcome) {
case NONE:
state = BatchState.DONE;
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
return;
case STOP:
state = BatchState.STOP;
return;
case OUT_OF_MEMORY:
state = BatchState.OUT_OF_MEMORY;
return;
}
try {
createFramers(incoming);
} catch (IOException | ClassTransformationException e) {
throw new SchemaChangeException("Exception when creating the schema", e);
}
if (incoming.getRecordCount() > 0) {
batches.add(new WindowDataBatch(incoming, oContext));
}
}
Aggregations