Search in sources :

Example 46 with DrillRuntimeException

use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.

the class DrillSimpleFuncHolder method renderEnd.

@Override
public HoldingContainer renderEnd(ClassGenerator<?> classGenerator, HoldingContainer[] inputVariables, JVar[] workspaceJVars, FieldReference fieldReference) {
    //for the argument is not, then raise exception.
    for (int i = 0; i < inputVariables.length; i++) {
        if (getParameters()[i].isConstant() && !inputVariables[i].isConstant()) {
            throw new DrillRuntimeException(String.format("The argument '%s' of Function '%s' has to be constant!", getParameters()[i].getName(), this.getRegisteredNames()[0]));
        }
    }
    generateBody(classGenerator, BlockType.SETUP, setupBody(), inputVariables, workspaceJVars, true);
    HoldingContainer c = generateEvalBody(classGenerator, inputVariables, evalBody(), workspaceJVars, fieldReference);
    generateBody(classGenerator, BlockType.RESET, resetBody(), null, workspaceJVars, false);
    generateBody(classGenerator, BlockType.CLEANUP, cleanupBody(), null, workspaceJVars, false);
    return c;
}
Also used : HoldingContainer(org.apache.drill.exec.expr.ClassGenerator.HoldingContainer) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 47 with DrillRuntimeException

use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.

the class BaseRawBatchBuffer method getNext.

@Override
public RawFragmentBatch getNext() throws IOException {
    if (outOfMemory.get()) {
        if (bufferQueue.size() < 10) {
            outOfMemory.set(false);
        }
    }
    RawFragmentBatch b;
    try {
        b = bufferQueue.poll();
        // if we didn't get a batch, block on waiting for queue.
        if (b == null && (!isTerminated() || !bufferQueue.isEmpty())) {
            b = bufferQueue.take();
        }
    } catch (final InterruptedException e) {
        // We expect that the interrupt means the fragment is canceled or failed, so we should kill this buffer
        if (!context.shouldContinue()) {
            kill(context);
        } else {
            throw new DrillRuntimeException("Interrupted but context.shouldContinue() is true", e);
        }
        // Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the
        // interruption and respond to it if it wants to.
        Thread.currentThread().interrupt();
        return null;
    }
    if (context.isOverMemoryLimit()) {
        outOfMemory.set(true);
    }
    if (b != null) {
        upkeep(b);
        if (b.getHeader().getIsLastBatch()) {
            logger.debug("Got last batch from {}:{}", b.getHeader().getSendingMajorFragmentId(), b.getHeader().getSendingMinorFragmentId());
            final int remainingStreams = decrementStreamCounter();
            if (remainingStreams == 0) {
                logger.debug("Streams finished");
                allStreamsFinished();
            }
        }
    } else {
        if (!bufferQueue.isEmpty()) {
            throw new IllegalStateException("Returning null when there are batches left in queue");
        }
        if (!isTerminated()) {
            throw new IllegalStateException("Returning null when not finished");
        }
    }
    assertAckSent(b);
    return b;
}
Also used : RawFragmentBatch(org.apache.drill.exec.record.RawFragmentBatch) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 48 with DrillRuntimeException

use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.

the class MapUtility method writeToMapFromReader.

/*
   * Function to read a value from the field reader, detect the type, construct the appropriate value holder
   * and use the value holder to write to the Map.
   */
// TODO : This should be templatized and generated using freemarker
public static void writeToMapFromReader(FieldReader fieldReader, BaseWriter.MapWriter mapWriter) {
    try {
        MajorType valueMajorType = fieldReader.getType();
        MinorType valueMinorType = valueMajorType.getMinorType();
        boolean repeated = false;
        if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
            repeated = true;
        }
        switch(valueMinorType) {
            case TINYINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).tinyInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.tinyInt(MappifyUtility.fieldValue));
                }
                break;
            case SMALLINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).smallInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.smallInt(MappifyUtility.fieldValue));
                }
                break;
            case BIGINT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bigInt());
                } else {
                    fieldReader.copyAsValue(mapWriter.bigInt(MappifyUtility.fieldValue));
                }
                break;
            case INT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).integer());
                } else {
                    fieldReader.copyAsValue(mapWriter.integer(MappifyUtility.fieldValue));
                }
                break;
            case UINT1:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt1());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt1(MappifyUtility.fieldValue));
                }
                break;
            case UINT2:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt2());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt2(MappifyUtility.fieldValue));
                }
                break;
            case UINT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt4());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt4(MappifyUtility.fieldValue));
                }
                break;
            case UINT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).uInt8());
                } else {
                    fieldReader.copyAsValue(mapWriter.uInt8(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL9:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal9());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal9(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL18:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal18());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal18(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL28SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal28Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal28Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DECIMAL38SPARSE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).decimal38Sparse());
                } else {
                    fieldReader.copyAsValue(mapWriter.decimal38Sparse(MappifyUtility.fieldValue));
                }
                break;
            case DATE:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).date());
                } else {
                    fieldReader.copyAsValue(mapWriter.date(MappifyUtility.fieldValue));
                }
                break;
            case TIME:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).time());
                } else {
                    fieldReader.copyAsValue(mapWriter.time(MappifyUtility.fieldValue));
                }
                break;
            case TIMESTAMP:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).timeStamp());
                } else {
                    fieldReader.copyAsValue(mapWriter.timeStamp(MappifyUtility.fieldValue));
                }
                break;
            case INTERVAL:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).interval());
                } else {
                    fieldReader.copyAsValue(mapWriter.interval(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALDAY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalDay());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalDay(MappifyUtility.fieldValue));
                }
                break;
            case INTERVALYEAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).intervalYear());
                } else {
                    fieldReader.copyAsValue(mapWriter.intervalYear(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT4:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float4());
                } else {
                    fieldReader.copyAsValue(mapWriter.float4(MappifyUtility.fieldValue));
                }
                break;
            case FLOAT8:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).float8());
                } else {
                    fieldReader.copyAsValue(mapWriter.float8(MappifyUtility.fieldValue));
                }
                break;
            case BIT:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).bit());
                } else {
                    fieldReader.copyAsValue(mapWriter.bit(MappifyUtility.fieldValue));
                }
                break;
            case VARCHAR:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varChar());
                } else {
                    fieldReader.copyAsValue(mapWriter.varChar(MappifyUtility.fieldValue));
                }
                break;
            case VARBINARY:
                if (repeated) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).varBinary());
                } else {
                    fieldReader.copyAsValue(mapWriter.varBinary(MappifyUtility.fieldValue));
                }
                break;
            case MAP:
                if (valueMajorType.getMode() == TypeProtos.DataMode.REPEATED) {
                    fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).map());
                } else {
                    fieldReader.copyAsValue(mapWriter.map(MappifyUtility.fieldValue));
                }
                break;
            case LIST:
                fieldReader.copyAsValue(mapWriter.list(MappifyUtility.fieldValue).list());
                break;
            default:
                throw new DrillRuntimeException(String.format("kvgen does not support input of type: %s", valueMinorType));
        }
    } catch (ClassCastException e) {
        final MaterializedField field = fieldReader.getField();
        throw new DrillRuntimeException(String.format(TYPE_MISMATCH_ERROR, field.getPath(), field.getType()));
    }
}
Also used : MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MinorType(org.apache.drill.common.types.TypeProtos.MinorType) MaterializedField(org.apache.drill.exec.record.MaterializedField) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 49 with DrillRuntimeException

use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.

the class BinaryTableGroupScan method init.

private void init() {
    logger.debug("Getting region locations");
    TableName tableName = TableName.valueOf(hbaseScanSpec.getTableName());
    try (Admin admin = formatPlugin.getConnection().getAdmin();
        RegionLocator locator = formatPlugin.getConnection().getRegionLocator(tableName)) {
        hTableDesc = admin.getTableDescriptor(tableName);
        // Fetch tableStats only once and cache it.
        if (tableStats == null) {
            tableStats = new MapRDBTableStats(getHBaseConf(), hbaseScanSpec.getTableName());
        }
        boolean foundStartRegion = false;
        regionsToScan = new TreeMap<TabletFragmentInfo, String>();
        List<HRegionLocation> regionLocations = locator.getAllRegionLocations();
        for (HRegionLocation regionLocation : regionLocations) {
            HRegionInfo regionInfo = regionLocation.getRegionInfo();
            if (!foundStartRegion && hbaseScanSpec.getStartRow() != null && hbaseScanSpec.getStartRow().length != 0 && !regionInfo.containsRow(hbaseScanSpec.getStartRow())) {
                continue;
            }
            foundStartRegion = true;
            regionsToScan.put(new TabletFragmentInfo(regionInfo), regionLocation.getHostname());
            if (hbaseScanSpec.getStopRow() != null && hbaseScanSpec.getStopRow().length != 0 && regionInfo.containsRow(hbaseScanSpec.getStopRow())) {
                break;
            }
        }
    } catch (Exception e) {
        throw new DrillRuntimeException("Error getting region info for table: " + hbaseScanSpec.getTableName(), e);
    }
    verifyColumns();
}
Also used : MapRDBTableStats(org.apache.drill.exec.store.mapr.db.MapRDBTableStats) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Admin(org.apache.hadoop.hbase.client.Admin) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) IOException(java.io.IOException) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) TabletFragmentInfo(org.apache.drill.exec.store.mapr.db.TabletFragmentInfo)

Example 50 with DrillRuntimeException

use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.

the class SortRecordBatchBuilder method add.

public void add(RecordBatchData rbd) {
    long batchBytes = getSize(rbd.getContainer());
    if (batchBytes == 0 && batches.size() > 0) {
        return;
    }
    if (runningBatches >= Character.MAX_VALUE) {
        final String errMsg = String.format("Tried to add more than %d number of batches.", (int) Character.MAX_VALUE);
        logger.error(errMsg);
        throw new DrillRuntimeException(errMsg);
    }
    if (!reservation.add(rbd.getRecordCount() * 4)) {
        final String errMsg = String.format("Failed to pre-allocate memory for SV. " + "Existing recordCount*4 = %d, " + "incoming batch recordCount*4 = %d", recordCount * 4, rbd.getRecordCount() * 4);
        logger.error(errMsg);
        throw new DrillRuntimeException(errMsg);
    }
    if (rbd.getRecordCount() == 0 && batches.size() > 0) {
        rbd.getContainer().zeroVectors();
        SelectionVector2 sv2 = rbd.getSv2();
        if (sv2 != null) {
            sv2.clear();
        }
        return;
    }
    runningBatches++;
    batches.put(rbd.getContainer().getSchema(), rbd);
    recordCount += rbd.getRecordCount();
}
Also used : SelectionVector2(org.apache.drill.exec.record.selection.SelectionVector2) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Aggregations

DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)69 IOException (java.io.IOException)32 VersionMismatchException (org.apache.drill.exec.exception.VersionMismatchException)9 Stopwatch (com.google.common.base.Stopwatch)7 ExecutionSetupException (org.apache.drill.common.exceptions.ExecutionSetupException)7 UserException (org.apache.drill.common.exceptions.UserException)6 KeeperException (org.apache.zookeeper.KeeperException)6 NodeExistsException (org.apache.zookeeper.KeeperException.NodeExistsException)6 NoSuchElementException (java.util.NoSuchElementException)5 Path (org.apache.hadoop.fs.Path)5 Bson (org.bson.conversions.Bson)4 Registry (org.apache.drill.exec.proto.UserBitShared.Registry)3 ValueVector (org.apache.drill.exec.vector.ValueVector)3 Admin (org.apache.hadoop.hbase.client.Admin)3 Document (org.bson.Document)3 File (java.io.File)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 URI (java.net.URI)2 RexNode (org.apache.calcite.rex.RexNode)2 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)2