Search in sources :

Example 21 with ClassTransformationException

use of org.apache.drill.exec.exception.ClassTransformationException in project drill by axbaretto.

the class MergingRecordBatch method createMerger.

/**
 * Creates a generate class which implements the copy and compare methods.
 *
 * @return instance of a new merger based on generated code
 * @throws SchemaChangeException
 */
private MergingReceiverGeneratorBase createMerger() throws SchemaChangeException {
    try {
        final CodeGenerator<MergingReceiverGeneratorBase> cg = CodeGenerator.get(MergingReceiverGeneratorBase.TEMPLATE_DEFINITION, context.getOptions());
        cg.plainJavaCapable(true);
        // Uncomment out this line to debug the generated code.
        // cg.saveCodeForDebugging(true);
        final ClassGenerator<MergingReceiverGeneratorBase> g = cg.getRoot();
        ExpandableHyperContainer batch = null;
        boolean first = true;
        for (final RecordBatchLoader loader : batchLoaders) {
            if (first) {
                batch = new ExpandableHyperContainer(loader);
                first = false;
            } else {
                batch.addBatch(loader);
            }
        }
        generateComparisons(g, batch);
        g.setMappingSet(COPIER_MAPPING_SET);
        CopyUtil.generateCopies(g, batch, true);
        g.setMappingSet(MAIN_MAPPING);
        final MergingReceiverGeneratorBase merger = context.getImplementationClass(cg);
        merger.doSetup(context, batch, outgoingContainer);
        return merger;
    } catch (ClassTransformationException | IOException e) {
        throw new SchemaChangeException(e);
    }
}
Also used : ExpandableHyperContainer(org.apache.drill.exec.record.ExpandableHyperContainer) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) IOException(java.io.IOException)

Example 22 with ClassTransformationException

use of org.apache.drill.exec.exception.ClassTransformationException in project drill by axbaretto.

the class OrderedPartitionRecordBatch method setupNewSchema.

/**
 * Sets up projection that will transfer all of the columns in batch, and also populate the partition column based on
 * which partition a record falls into in the partition table
 *
 * @param batch
 * @throws SchemaChangeException
 */
protected void setupNewSchema(VectorAccessible batch) throws SchemaChangeException {
    container.clear();
    final ErrorCollector collector = new ErrorCollectorImpl();
    final List<TransferPair> transfers = Lists.newArrayList();
    final ClassGenerator<OrderedPartitionProjector> cg = CodeGenerator.getRoot(OrderedPartitionProjector.TEMPLATE_DEFINITION, context.getOptions());
    for (VectorWrapper<?> vw : batch) {
        TransferPair tp = vw.getValueVector().getTransferPair(oContext.getAllocator());
        transfers.add(tp);
        container.add(tp.getTo());
    }
    cg.setMappingSet(mainMapping);
    int count = 0;
    for (Ordering od : popConfig.getOrderings()) {
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
        if (collector.hasErrors()) {
            throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
        }
        cg.setMappingSet(incomingMapping);
        ClassGenerator.HoldingContainer left = cg.addExpr(expr, ClassGenerator.BlkCreateMode.FALSE);
        cg.setMappingSet(partitionMapping);
        ClassGenerator.HoldingContainer right = cg.addExpr(new ValueVectorReadExpression(new TypedFieldId(expr.getMajorType(), count++)), ClassGenerator.BlkCreateMode.FALSE);
        cg.setMappingSet(mainMapping);
        // next we wrap the two comparison sides and add the expression block for the comparison.
        LogicalExpression fh = FunctionGenerationHelper.getOrderingComparator(od.nullsSortHigh(), left, right, context.getFunctionRegistry());
        ClassGenerator.HoldingContainer out = cg.addExpr(fh, ClassGenerator.BlkCreateMode.FALSE);
        JConditional jc = cg.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
        if (od.getDirection() == Direction.ASCENDING) {
            jc._then()._return(out.getValue());
        } else {
            jc._then()._return(out.getValue().minus());
        }
    }
    cg.getEvalBlock()._return(JExpr.lit(0));
    container.add(this.partitionKeyVector);
    container.buildSchema(batch.getSchema().getSelectionVectorMode());
    try {
        this.projector = context.getImplementationClass(cg);
        projector.setup(context, batch, this, transfers, partitionVectors, partitions, popConfig.getRef());
    } catch (ClassTransformationException | IOException e) {
        throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }
}
Also used : TransferPair(org.apache.drill.exec.record.TransferPair) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) ErrorCollector(org.apache.drill.common.expression.ErrorCollector) IOException(java.io.IOException) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) ValueVectorReadExpression(org.apache.drill.exec.expr.ValueVectorReadExpression) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) ClassGenerator(org.apache.drill.exec.expr.ClassGenerator) TypedFieldId(org.apache.drill.exec.record.TypedFieldId) Ordering(org.apache.drill.common.logical.data.Order.Ordering) HoldingContainer(org.apache.drill.exec.expr.ClassGenerator.HoldingContainer) JConditional(com.sun.codemodel.JConditional)

Example 23 with ClassTransformationException

use of org.apache.drill.exec.exception.ClassTransformationException in project drill by axbaretto.

the class OrderedPartitionRecordBatch method getPartitionVectors.

/**
 * This method is called when the first batch comes in. Incoming batches are collected until a threshold is met. At
 * that point, the records in the batches are sorted and sampled, and the sampled records are stored in the
 * distributed cache. Once a sufficient fraction of the fragments have shared their samples, each fragment grabs all
 * the samples, sorts all the records, builds a partition table, and attempts to push the partition table to the
 * distributed cache. Whichever table gets pushed first becomes the table used by all fragments for partitioning.
 *
 * @return True is successful. False if failed.
 */
private boolean getPartitionVectors() {
    try {
        if (!saveSamples()) {
            return false;
        }
        CachedVectorContainer finalTable = null;
        long val = minorFragmentSampleCount.incrementAndGet();
        logger.debug("Incremented mfsc, got {}", val);
        final long fragmentsBeforeProceed = (long) Math.ceil(sendingMajorFragmentWidth * completionFactor);
        final String finalTableKey = mapKey + "final";
        if (val == fragmentsBeforeProceed) {
            // we crossed the barrier, build table and get data.
            buildTable();
            finalTable = tableMap.get(finalTableKey);
        } else {
            if (val < fragmentsBeforeProceed) {
                if (!waitUntilTimeOut(10)) {
                    return false;
                }
            }
            for (int i = 0; i < 100 && finalTable == null; i++) {
                finalTable = tableMap.get(finalTableKey);
                if (finalTable != null) {
                    break;
                }
                if (!waitUntilTimeOut(10)) {
                    return false;
                }
            }
            if (finalTable == null) {
                buildTable();
            }
            finalTable = tableMap.get(finalTableKey);
        }
        Preconditions.checkState(finalTable != null);
        // the rest of this operator
        for (VectorWrapper<?> w : finalTable.get()) {
            partitionVectors.add(w.getValueVector());
        }
    } catch (final ClassTransformationException | IOException | SchemaChangeException ex) {
        kill(false);
        context.getExecutorState().fail(ex);
        return false;
    // TODO InterruptedException
    }
    return true;
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) IOException(java.io.IOException) CachedVectorContainer(org.apache.drill.exec.cache.CachedVectorContainer)

Example 24 with ClassTransformationException

use of org.apache.drill.exec.exception.ClassTransformationException in project drill by axbaretto.

the class TopNBatch method innerNext.

@Override
public IterOutcome innerNext() {
    recordCount = 0;
    if (state == BatchState.DONE) {
        return IterOutcome.NONE;
    }
    if (schema != null) {
        if (getSelectionVector4().next()) {
            recordCount = sv4.getCount();
            return IterOutcome.OK;
        } else {
            recordCount = 0;
            return IterOutcome.NONE;
        }
    }
    try {
        outer: while (true) {
            Stopwatch watch = Stopwatch.createStarted();
            IterOutcome upstream;
            if (first) {
                upstream = IterOutcome.OK_NEW_SCHEMA;
                first = false;
            } else {
                upstream = next(incoming);
            }
            if (upstream == IterOutcome.OK && schema == null) {
                upstream = IterOutcome.OK_NEW_SCHEMA;
                container.clear();
            }
            logger.debug("Took {} us to get next", watch.elapsed(TimeUnit.MICROSECONDS));
            switch(upstream) {
                case NONE:
                    break outer;
                case NOT_YET:
                    throw new UnsupportedOperationException();
                case OUT_OF_MEMORY:
                case STOP:
                    return upstream;
                case OK_NEW_SCHEMA:
                    // only change in the case that the schema truly changes.  Artificial schema changes are ignored.
                    if (!incoming.getSchema().equals(schema)) {
                        if (schema != null) {
                            if (!unionTypeEnabled) {
                                throw new UnsupportedOperationException("Sort doesn't currently support sorts with changing schemas.");
                            } else {
                                this.schema = SchemaUtil.mergeSchemas(this.schema, incoming.getSchema());
                                purgeAndResetPriorityQueue();
                                this.schemaChanged = true;
                            }
                        } else {
                            this.schema = incoming.getSchema();
                        }
                    }
                // fall through.
                case OK:
                    if (incoming.getRecordCount() == 0) {
                        for (VectorWrapper<?> w : incoming) {
                            w.clear();
                        }
                        break;
                    }
                    countSincePurge += incoming.getRecordCount();
                    batchCount++;
                    RecordBatchData batch;
                    if (schemaChanged) {
                        batch = new RecordBatchData(SchemaUtil.coerceContainer(incoming, this.schema, oContext), oContext.getAllocator());
                    } else {
                        batch = new RecordBatchData(incoming, oContext.getAllocator());
                    }
                    boolean success = false;
                    try {
                        if (priorityQueue == null) {
                            assert !schemaChanged;
                            priorityQueue = createNewPriorityQueue(new ExpandableHyperContainer(batch.getContainer()), config.getLimit());
                        }
                        priorityQueue.add(batch);
                        if (countSincePurge > config.getLimit() && batchCount > batchPurgeThreshold) {
                            purge();
                            countSincePurge = 0;
                            batchCount = 0;
                        }
                        success = true;
                    } finally {
                        if (!success) {
                            batch.clear();
                        }
                    }
                    break;
                default:
                    throw new UnsupportedOperationException();
            }
        }
        if (schema == null || priorityQueue == null) {
            // builder may be null at this point if the first incoming batch is empty
            state = BatchState.DONE;
            return IterOutcome.NONE;
        }
        priorityQueue.generate();
        this.sv4 = priorityQueue.getFinalSv4();
        container.clear();
        for (VectorWrapper<?> w : priorityQueue.getHyperBatch()) {
            container.add(w.getValueVectors());
        }
        container.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
        recordCount = sv4.getCount();
        return IterOutcome.OK_NEW_SCHEMA;
    } catch (SchemaChangeException | ClassTransformationException | IOException ex) {
        kill(false);
        logger.error("Failure during query", ex);
        context.getExecutorState().fail(ex);
        return IterOutcome.STOP;
    }
}
Also used : ExpandableHyperContainer(org.apache.drill.exec.record.ExpandableHyperContainer) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) RecordBatchData(org.apache.drill.exec.physical.impl.sort.RecordBatchData) VectorWrapper(org.apache.drill.exec.record.VectorWrapper) Stopwatch(com.google.common.base.Stopwatch) IOException(java.io.IOException)

Example 25 with ClassTransformationException

use of org.apache.drill.exec.exception.ClassTransformationException in project drill by axbaretto.

the class ProjectRecordBatch method setupNewSchemaFromInput.

private void setupNewSchemaFromInput(RecordBatch incomingBatch) throws SchemaChangeException {
    if (allocationVectors != null) {
        for (final ValueVector v : allocationVectors) {
            v.clear();
        }
    }
    this.allocationVectors = Lists.newArrayList();
    if (complexWriters != null) {
        container.clear();
    } else {
        container.zeroVectors();
    }
    final List<NamedExpression> exprs = getExpressionList();
    final ErrorCollector collector = new ErrorCollectorImpl();
    final List<TransferPair> transfers = Lists.newArrayList();
    final ClassGenerator<Projector> cg = CodeGenerator.getRoot(Projector.TEMPLATE_DEFINITION, context.getOptions());
    cg.getCodeGenerator().plainJavaCapable(true);
    // Uncomment out this line to debug the generated code.
    // cg.getCodeGenerator().saveCodeForDebugging(true);
    final IntHashSet transferFieldIds = new IntHashSet();
    final boolean isAnyWildcard = isAnyWildcard(exprs);
    final ClassifierResult result = new ClassifierResult();
    final boolean classify = isClassificationNeeded(exprs);
    for (NamedExpression namedExpression : exprs) {
        result.clear();
        if (classify && namedExpression.getExpr() instanceof SchemaPath) {
            classifyExpr(namedExpression, incomingBatch, result);
            if (result.isStar) {
                // The value indicates which wildcard we are processing now
                final Integer value = result.prefixMap.get(result.prefix);
                if (value != null && value == 1) {
                    int k = 0;
                    for (final VectorWrapper<?> wrapper : incomingBatch) {
                        final ValueVector vvIn = wrapper.getValueVector();
                        if (k > result.outputNames.size() - 1) {
                            assert false;
                        }
                        // get the renamed column names
                        final String name = result.outputNames.get(k++);
                        if (name.isEmpty()) {
                            continue;
                        }
                        if (isImplicitFileColumn(vvIn)) {
                            continue;
                        }
                        final FieldReference ref = new FieldReference(name);
                        final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getAsNamePart().getName(), vvIn.getField().getType()), callBack);
                        final TransferPair tp = vvIn.makeTransferPair(vvOut);
                        transfers.add(tp);
                    }
                } else if (value != null && value > 1) {
                    // subsequent wildcards should do a copy of incoming valuevectors
                    int k = 0;
                    for (final VectorWrapper<?> wrapper : incomingBatch) {
                        final ValueVector vvIn = wrapper.getValueVector();
                        final SchemaPath originalPath = SchemaPath.getSimplePath(vvIn.getField().getName());
                        if (k > result.outputNames.size() - 1) {
                            assert false;
                        }
                        // get the renamed column names
                        final String name = result.outputNames.get(k++);
                        if (name.isEmpty()) {
                            continue;
                        }
                        if (isImplicitFileColumn(vvIn)) {
                            continue;
                        }
                        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incomingBatch, collector, context.getFunctionRegistry());
                        if (collector.hasErrors()) {
                            throw new SchemaChangeException(String.format("Failure while trying to materialize incomingBatch schema.  Errors:\n %s.", collector.toErrorString()));
                        }
                        final MaterializedField outputField = MaterializedField.create(name, expr.getMajorType());
                        final ValueVector vv = container.addOrGet(outputField, callBack);
                        allocationVectors.add(vv);
                        final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getName()));
                        final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
                        final HoldingContainer hc = cg.addExpr(write, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
                    }
                }
                continue;
            }
        } else {
            // For the columns which do not needed to be classified,
            // it is still necessary to ensure the output column name is unique
            result.outputNames = Lists.newArrayList();
            final String outputName = getRef(namedExpression).getRootSegment().getPath();
            addToResultMaps(outputName, result, true);
        }
        String outputName = getRef(namedExpression).getRootSegment().getPath();
        if (result != null && result.outputNames != null && result.outputNames.size() > 0) {
            boolean isMatched = false;
            for (int j = 0; j < result.outputNames.size(); j++) {
                if (!result.outputNames.get(j).isEmpty()) {
                    outputName = result.outputNames.get(j);
                    isMatched = true;
                    break;
                }
            }
            if (!isMatched) {
                continue;
            }
        }
        final LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incomingBatch, collector, context.getFunctionRegistry(), true, unionTypeEnabled);
        final MaterializedField outputField = MaterializedField.create(outputName, expr.getMajorType());
        if (collector.hasErrors()) {
            throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
        }
        // add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
        if (expr instanceof ValueVectorReadExpression && incomingBatch.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE && !((ValueVectorReadExpression) expr).hasReadPath() && !isAnyWildcard && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])) {
            final ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
            final TypedFieldId id = vectorRead.getFieldId();
            final ValueVector vvIn = incomingBatch.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
            Preconditions.checkNotNull(incomingBatch);
            final FieldReference ref = getRef(namedExpression);
            final ValueVector vvOut = container.addOrGet(MaterializedField.create(ref.getLastSegment().getNameSegment().getPath(), vectorRead.getMajorType()), callBack);
            final TransferPair tp = vvIn.makeTransferPair(vvOut);
            transfers.add(tp);
            transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
        } else if (expr instanceof DrillFuncHolderExpr && ((DrillFuncHolderExpr) expr).getHolder().isComplexWriterFuncHolder()) {
            // Lazy initialization of the list of complex writers, if not done yet.
            if (complexWriters == null) {
                complexWriters = Lists.newArrayList();
            } else {
                complexWriters.clear();
            }
            // The reference name will be passed to ComplexWriter, used as the name of the output vector from the writer.
            ((DrillFuncHolderExpr) expr).getFieldReference(namedExpression.getRef());
            cg.addExpr(expr, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
            if (complexFieldReferencesList == null) {
                complexFieldReferencesList = Lists.newArrayList();
            }
            // save the field reference for later for getting schema when input is empty
            complexFieldReferencesList.add(namedExpression.getRef());
        } else {
            // need to do evaluation.
            final ValueVector vector = container.addOrGet(outputField, callBack);
            allocationVectors.add(vector);
            final TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getName()));
            final boolean useSetSafe = !(vector instanceof FixedWidthVector);
            final ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
            final HoldingContainer hc = cg.addExpr(write, ClassGenerator.BlkCreateMode.TRUE_IF_BOUND);
            // We cannot do multiple transfers from the same vector. However we still need to instantiate the output vector.
            if (expr instanceof ValueVectorReadExpression) {
                final ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
                if (!vectorRead.hasReadPath()) {
                    final TypedFieldId id = vectorRead.getFieldId();
                    final ValueVector vvIn = incomingBatch.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
                    vvIn.makeTransferPair(vector);
                }
            }
            logger.debug("Added eval for project expression.");
        }
    }
    try {
        CodeGenerator<Projector> codeGen = cg.getCodeGenerator();
        codeGen.plainJavaCapable(true);
        // Uncomment out this line to debug the generated code.
        // codeGen.saveCodeForDebugging(true);
        this.projector = context.getImplementationClass(codeGen);
        projector.setup(context, incomingBatch, this, transfers);
    } catch (ClassTransformationException | IOException e) {
        throw new SchemaChangeException("Failure while attempting to load generated class", e);
    }
}
Also used : TransferPair(org.apache.drill.exec.record.TransferPair) IntHashSet(com.carrotsearch.hppc.IntHashSet) ErrorCollector(org.apache.drill.common.expression.ErrorCollector) DrillFuncHolderExpr(org.apache.drill.exec.expr.DrillFuncHolderExpr) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) HoldingContainer(org.apache.drill.exec.expr.ClassGenerator.HoldingContainer) SchemaPath(org.apache.drill.common.expression.SchemaPath) TypedFieldId(org.apache.drill.exec.record.TypedFieldId) ValueVectorWriteExpression(org.apache.drill.exec.expr.ValueVectorWriteExpression) FieldReference(org.apache.drill.common.expression.FieldReference) ClassTransformationException(org.apache.drill.exec.exception.ClassTransformationException) FixedWidthVector(org.apache.drill.exec.vector.FixedWidthVector) VectorWrapper(org.apache.drill.exec.record.VectorWrapper) MaterializedField(org.apache.drill.exec.record.MaterializedField) IOException(java.io.IOException) ValueVector(org.apache.drill.exec.vector.ValueVector) ValueVectorReadExpression(org.apache.drill.exec.expr.ValueVectorReadExpression) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) NamedExpression(org.apache.drill.common.logical.data.NamedExpression)

Aggregations

ClassTransformationException (org.apache.drill.exec.exception.ClassTransformationException)29 IOException (java.io.IOException)27 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)21 ValueVector (org.apache.drill.exec.vector.ValueVector)9 ErrorCollector (org.apache.drill.common.expression.ErrorCollector)8 ErrorCollectorImpl (org.apache.drill.common.expression.ErrorCollectorImpl)8 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)8 TransferPair (org.apache.drill.exec.record.TransferPair)6 MaterializedField (org.apache.drill.exec.record.MaterializedField)5 TypedFieldId (org.apache.drill.exec.record.TypedFieldId)5 VectorWrapper (org.apache.drill.exec.record.VectorWrapper)5 HoldingContainer (org.apache.drill.exec.expr.ClassGenerator.HoldingContainer)4 ValueVectorReadExpression (org.apache.drill.exec.expr.ValueVectorReadExpression)4 ValueVectorWriteExpression (org.apache.drill.exec.expr.ValueVectorWriteExpression)4 ExpandableHyperContainer (org.apache.drill.exec.record.ExpandableHyperContainer)4 VectorContainer (org.apache.drill.exec.record.VectorContainer)4 IntHashSet (com.carrotsearch.hppc.IntHashSet)3 Stopwatch (com.google.common.base.Stopwatch)3 FieldReference (org.apache.drill.common.expression.FieldReference)3 NamedExpression (org.apache.drill.common.logical.data.NamedExpression)3