Search in sources :

Example 21 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobExecutor method assignLocation.

private String assignLocation(ActivityClusterGraph acg, Map<TaskId, LValueConstraintExpression> locationMap, TaskId tid, TaskAttempt taskAttempt) throws HyracksException {
    ActivityId aid = tid.getActivityId();
    ActivityCluster ac = acg.getActivityMap().get(aid);
    Set<ActivityId> blockers = ac.getBlocked2BlockerMap().get(aid);
    String nodeId = null;
    if (blockers != null) {
        for (ActivityId blocker : blockers) {
            nodeId = findTaskLocation(new TaskId(blocker, tid.getPartition()));
            if (nodeId != null) {
                break;
            }
        }
    }
    INodeManager nodeManager = ccs.getNodeManager();
    Collection<String> liveNodes = nodeManager.getAllNodeIds();
    if (nodeId == null) {
        LValueConstraintExpression pLocationExpr = locationMap.get(tid);
        Object location = solver.getValue(pLocationExpr);
        if (location == null) {
            // pick any
            nodeId = liveNodes.toArray(new String[liveNodes.size()])[random.nextInt(1) % liveNodes.size()];
        } else if (location instanceof String) {
            nodeId = (String) location;
        } else if (location instanceof String[]) {
            for (String choice : (String[]) location) {
                if (liveNodes.contains(choice)) {
                    nodeId = choice;
                    break;
                }
            }
            if (nodeId == null) {
                throw new HyracksException("No satisfiable location found for " + taskAttempt.getTaskAttemptId());
            }
        } else {
            throw new HyracksException("Unknown type of value for " + pLocationExpr + ": " + location + "(" + location.getClass() + ")");
        }
    }
    if (nodeId == null) {
        throw new HyracksException("No satisfiable location found for " + taskAttempt.getTaskAttemptId());
    }
    if (!liveNodes.contains(nodeId)) {
        throw new HyracksException("Node " + nodeId + " not live");
    }
    return nodeId;
}
Also used : INodeManager(org.apache.hyracks.control.cc.cluster.INodeManager) LValueConstraintExpression(org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression) TaskId(org.apache.hyracks.api.dataflow.TaskId) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ActivityCluster(org.apache.hyracks.api.job.ActivityCluster)

Example 22 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class JobManager method finalComplete.

@Override
public void finalComplete(JobRun run) throws HyracksException {
    checkJob(run);
    JobId jobId = run.getJobId();
    HyracksException caughtException = null;
    CCServiceContext serviceCtx = ccs.getContext();
    if (serviceCtx != null) {
        try {
            serviceCtx.notifyJobFinish(jobId);
        } catch (HyracksException e) {
            LOGGER.log(Level.SEVERE, e.getMessage(), e);
            caughtException = e;
        }
    }
    run.setStatus(run.getPendingStatus(), run.getPendingExceptions());
    run.setEndTime(System.currentTimeMillis());
    activeRunMap.remove(jobId);
    runMapArchive.put(jobId, run);
    runMapHistory.put(jobId, run.getExceptions());
    if (run.getActivityClusterGraph().isReportTaskDetails()) {
        /**
             * log job details when profiling is enabled
             */
        try {
            ccs.getJobLogFile().log(createJobLogObject(run));
        } catch (Exception e) {
            LOGGER.log(Level.SEVERE, e.getMessage(), e);
            if (caughtException == null) {
                caughtException = new HyracksException(e);
            } else {
                caughtException.addSuppressed(e);
            }
        }
    }
    // Releases cluster capacitys occupied by the job.
    JobSpecification job = run.getJobSpecification();
    jobCapacityController.release(job);
    // Picks the next job to execute.
    pickJobsToRun();
    // throws caught exceptions if any
    if (caughtException != null) {
        throw caughtException;
    }
}
Also used : HyracksException(org.apache.hyracks.api.exceptions.HyracksException) JobSpecification(org.apache.hyracks.api.job.JobSpecification) CCServiceContext(org.apache.hyracks.control.cc.application.CCServiceContext) JobId(org.apache.hyracks.api.job.JobId) InvocationTargetException(java.lang.reflect.InvocationTargetException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException)

Example 23 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class ResultWriterOperatorDescriptor method createPushRuntime.

@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
    final IDatasetPartitionManager dpm = ctx.getDatasetPartitionManager();
    final IFrame frame = new VSizeFrame(ctx);
    final FrameOutputStream frameOutputStream = new FrameOutputStream(ctx.getInitialFrameSize());
    frameOutputStream.reset(frame, true);
    PrintStream printStream = new PrintStream(frameOutputStream);
    final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
    final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc, printStream);
    final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(outRecordDesc);
    return new AbstractUnaryInputSinkOperatorNodePushable() {

        private IFrameWriter datasetPartitionWriter;

        private boolean failed = false;

        @Override
        public void open() throws HyracksDataException {
            try {
                datasetPartitionWriter = dpm.createDatasetPartitionWriter(ctx, rsId, ordered, asyncMode, partition, nPartitions);
                datasetPartitionWriter.open();
                resultSerializer.init();
            } catch (HyracksException e) {
                throw HyracksDataException.create(e);
            }
        }

        @Override
        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
            frameTupleAccessor.reset(buffer);
            for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
                resultSerializer.appendTuple(frameTupleAccessor, tIndex);
                if (!frameOutputStream.appendTuple()) {
                    frameOutputStream.flush(datasetPartitionWriter);
                    resultSerializer.appendTuple(frameTupleAccessor, tIndex);
                    frameOutputStream.appendTuple();
                }
            }
        }

        @Override
        public void fail() throws HyracksDataException {
            failed = true;
            datasetPartitionWriter.fail();
        }

        @Override
        public void close() throws HyracksDataException {
            try {
                if (!failed && frameOutputStream.getTupleCount() > 0) {
                    frameOutputStream.flush(datasetPartitionWriter);
                }
            } catch (Exception e) {
                datasetPartitionWriter.fail();
                throw e;
            } finally {
                datasetPartitionWriter.close();
            }
        }

        @Override
        public String toString() {
            StringBuilder sb = new StringBuilder();
            sb.append("{ ");
            sb.append("\"rsId\": \"").append(rsId).append("\", ");
            sb.append("\"ordered\": ").append(ordered).append(", ");
            sb.append("\"asyncMode\": ").append(asyncMode).append(" }");
            return sb.toString();
        }
    };
}
Also used : PrintStream(java.io.PrintStream) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) IFrame(org.apache.hyracks.api.comm.IFrame) IResultSerializer(org.apache.hyracks.api.dataflow.value.IResultSerializer) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ByteBuffer(java.nio.ByteBuffer) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IOException(java.io.IOException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IDatasetPartitionManager(org.apache.hyracks.api.dataset.IDatasetPartitionManager) AbstractUnaryInputSinkOperatorNodePushable(org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable) FrameOutputStream(org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)

Example 24 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class APIFramework method chooseLocations.

// Chooses the location constraints, i.e., whether to use storage parallelism or use a user-sepcified number
// of cores.
private static AlgebricksAbsolutePartitionConstraint chooseLocations(IClusterInfoCollector clusterInfoCollector, int parallelismHint, AlgebricksAbsolutePartitionConstraint storageLocations) throws AlgebricksException {
    try {
        Map<String, NodeControllerInfo> ncMap = clusterInfoCollector.getNodeControllerInfos();
        // Gets total number of cores in the cluster.
        int totalNumCores = getTotalNumCores(ncMap);
        // Otherwise, we will use all available cores.
        if (parallelismHint == CompilerProperties.COMPILER_PARALLELISM_AS_STORAGE && storageLocations.getLocations().length <= totalNumCores) {
            return storageLocations;
        }
        return getComputationLocations(ncMap, parallelismHint);
    } catch (HyracksException e) {
        throw new AlgebricksException(e);
    }
}
Also used : NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 25 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class ExecuteStatementRequestMessage method handle.

@Override
public void handle(ICcApplicationContext ccAppCtx) throws HyracksDataException, InterruptedException {
    ICCServiceContext ccSrvContext = ccAppCtx.getServiceContext();
    ClusterControllerService ccSrv = (ClusterControllerService) ccSrvContext.getControllerService();
    CCApplication ccApp = (CCApplication) ccSrv.getApplication();
    CCMessageBroker messageBroker = (CCMessageBroker) ccSrvContext.getMessageBroker();
    CCExtensionManager ccExtMgr = (CCExtensionManager) ccAppCtx.getExtensionManager();
    ILangCompilationProvider compilationProvider = ccExtMgr.getCompilationProvider(lang);
    IStorageComponentProvider storageComponentProvider = ccAppCtx.getStorageComponentProvider();
    IStatementExecutorFactory statementExecutorFactory = ccApp.getStatementExecutorFactory();
    IStatementExecutorContext statementExecutorContext = ccApp.getStatementExecutorContext();
    ccSrv.getExecutor().submit(() -> {
        ExecuteStatementResponseMessage responseMsg = new ExecuteStatementResponseMessage(requestMessageId);
        try {
            final IClusterManagementWork.ClusterState clusterState = ClusterStateManager.INSTANCE.getState();
            if (clusterState != IClusterManagementWork.ClusterState.ACTIVE) {
                throw new IllegalStateException("Cannot execute request, cluster is " + clusterState);
            }
            IParser parser = compilationProvider.getParserFactory().createParser(statementsText);
            List<Statement> statements = parser.parse();
            StringWriter outWriter = new StringWriter(256);
            PrintWriter outPrinter = new PrintWriter(outWriter);
            SessionOutput.ResultDecorator resultPrefix = ResultUtil.createPreResultDecorator();
            SessionOutput.ResultDecorator resultPostfix = ResultUtil.createPostResultDecorator();
            SessionOutput.ResultAppender appendHandle = ResultUtil.createResultHandleAppender(handleUrl);
            SessionOutput.ResultAppender appendStatus = ResultUtil.createResultStatusAppender();
            SessionOutput sessionOutput = new SessionOutput(sessionConfig, outPrinter, resultPrefix, resultPostfix, appendHandle, appendStatus);
            IStatementExecutor.ResultMetadata outMetadata = new IStatementExecutor.ResultMetadata();
            MetadataManager.INSTANCE.init();
            IStatementExecutor translator = statementExecutorFactory.create(ccAppCtx, statements, sessionOutput, compilationProvider, storageComponentProvider);
            translator.compileAndExecute(ccAppCtx.getHcc(), null, delivery, outMetadata, new IStatementExecutor.Stats(), clientContextID, statementExecutorContext);
            outPrinter.close();
            responseMsg.setResult(outWriter.toString());
            responseMsg.setMetadata(outMetadata);
        } catch (AlgebricksException | HyracksException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
            // we trust that "our" exceptions are serializable and have a comprehensible error message
            GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
            responseMsg.setError(pe);
        } catch (Exception e) {
            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
            responseMsg.setError(new Exception(e.toString()));
        }
        try {
            messageBroker.sendApplicationMessageToNC(responseMsg, requestNodeId);
        } catch (Exception e) {
            LOGGER.log(Level.WARNING, e.toString(), e);
        }
    });
}
Also used : ICCServiceContext(org.apache.hyracks.api.application.ICCServiceContext) CCApplication(org.apache.asterix.hyracks.bootstrap.CCApplication) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) StringWriter(java.io.StringWriter) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IStatementExecutorFactory(org.apache.asterix.translator.IStatementExecutorFactory) PrintWriter(java.io.PrintWriter) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) IStatementExecutorContext(org.apache.asterix.translator.IStatementExecutorContext) Statement(org.apache.asterix.lang.common.base.Statement) CCMessageBroker(org.apache.asterix.messaging.CCMessageBroker) IClusterManagementWork(org.apache.asterix.common.api.IClusterManagementWork) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) CCExtensionManager(org.apache.asterix.app.cc.CCExtensionManager) IStatementExecutor(org.apache.asterix.translator.IStatementExecutor) SessionOutput(org.apache.asterix.translator.SessionOutput) ClusterControllerService(org.apache.hyracks.control.cc.ClusterControllerService) IParser(org.apache.asterix.lang.common.base.IParser)

Aggregations

HyracksException (org.apache.hyracks.api.exceptions.HyracksException)48 IOException (java.io.IOException)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)8 JobId (org.apache.hyracks.api.job.JobId)8 HashMap (java.util.HashMap)7 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 IJobCapacityController (org.apache.hyracks.api.job.resource.IJobCapacityController)5 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)5 JobRun (org.apache.hyracks.control.cc.job.JobRun)5 URL (java.net.URL)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)4 File (java.io.File)3 HashSet (java.util.HashSet)3 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)3 JobSpecification (org.apache.hyracks.api.job.JobSpecification)3 PrintWriter (java.io.PrintWriter)2 StringWriter (java.io.StringWriter)2