Search in sources :

Example 26 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class IndexingScheduler method getLocationConstraints.

/**
     * Set location constraints for a file scan operator with a list of file
     * splits. It tries to assign splits to their local machines fairly
     * Locality is more important than fairness
     *
     * @throws HyracksDataException
     */
public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
    if (splits == null) {
        /** deal the case when the splits array is null */
        return new String[] {};
    }
    int[] workloads = new int[NCs.length];
    Arrays.fill(workloads, 0);
    String[] locations = new String[splits.length];
    Map<String, IntWritable> locationToNumOfSplits = new HashMap<String, IntWritable>();
    /**
         * upper bound is number of splits
         */
    int upperBoundSlots = splits.length;
    try {
        Random random = new Random(System.currentTimeMillis());
        boolean[] scheduled = new boolean[splits.length];
        Arrays.fill(scheduled, false);
        /**
             * scan the splits and build the popularity map
             * give the machines with less local splits more scheduling priority
             */
        buildPopularityMap(splits, locationToNumOfSplits);
        HashMap<String, Integer> locationToNumOfAssignement = new HashMap<String, Integer>();
        for (String location : locationToNumOfSplits.keySet()) {
            locationToNumOfAssignement.put(location, 0);
        }
        /**
             * push data-local upper-bounds slots to each machine
             */
        scheduleLocalSlots(splits, workloads, locations, upperBoundSlots, random, scheduled, locationToNumOfSplits, locationToNumOfAssignement);
        int dataLocalCount = 0;
        for (int i = 0; i < scheduled.length; i++) {
            if (scheduled[i] == true) {
                dataLocalCount++;
            }
        }
        LOGGER.info("Data local rate: " + (scheduled.length == 0 ? 0.0 : ((float) dataLocalCount / (float) (scheduled.length))));
        /**
             * push non-data-local upper-bounds slots to each machine
             */
        locationToNumOfAssignement.clear();
        for (String nc : NCs) {
            locationToNumOfAssignement.put(nc, 0);
        }
        for (int i = 0; i < scheduled.length; i++) {
            if (scheduled[i]) {
                locationToNumOfAssignement.put(locations[i], locationToNumOfAssignement.get(locations[i]) + 1);
            }
        }
        scheduleNonLocalSlots(splits, workloads, locations, upperBoundSlots, scheduled, locationToNumOfAssignement);
        return locations;
    } catch (IOException e) {
        throw new HyracksException(e);
    }
}
Also used : HashMap(java.util.HashMap) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) IOException(java.io.IOException) Random(java.util.Random) IntWritable(org.apache.hadoop.io.IntWritable)

Example 27 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class IndexingScheduler method loadIPAddressToNCMap.

/**
     * Load the IP-address-to-NC map from the NCNameToNCInfoMap
     *
     * @param ncNameToNcInfos
     * @throws HyracksException
     */
private void loadIPAddressToNCMap(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
    try {
        NCs = new String[ncNameToNcInfos.size()];
        ipToNcMapping.clear();
        ncNameToIndex.clear();
        int i = 0;
        /**
             * build the IP address to NC map
             */
        for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
            String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().lookupIpAddress()).getHostAddress();
            List<String> matchedNCs = ipToNcMapping.get(ipAddr);
            if (matchedNCs == null) {
                matchedNCs = new ArrayList<String>();
                ipToNcMapping.put(ipAddr, matchedNCs);
            }
            matchedNCs.add(entry.getKey());
            NCs[i] = entry.getKey();
            i++;
        }
        /**
             * set up the NC name to index mapping
             */
        for (i = 0; i < NCs.length; i++) {
            ncNameToIndex.put(NCs[i], i);
        }
    } catch (Exception e) {
        throw new HyracksException(e);
    }
}
Also used : NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HashMap(java.util.HashMap) Map(java.util.Map) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException)

Example 28 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class PartitionManager method registerPartitionRequest.

public synchronized void registerPartitionRequest(PartitionId partitionId, NetworkOutputChannel writer) throws HyracksException {
    try {
        List<IPartition> pList = availablePartitionMap.get(partitionId);
        if (pList != null && !pList.isEmpty()) {
            IPartition partition = pList.get(0);
            writer.setFrameSize(partition.getTaskContext().getInitialFrameSize());
            partition.writeTo(writer);
            if (!partition.isReusable()) {
                availablePartitionMap.remove(partitionId);
            }
        } else {
            //throw new HyracksException("Request for unknown partition " + partitionId);
            partitionRequests.put(partitionId, writer);
        }
    } catch (Exception e) {
        throw new HyracksDataException(e);
    }
}
Also used : IPartition(org.apache.hyracks.api.partitions.IPartition) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 29 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class DistributeJobWork method run.

@Override
public void run() {
    try {
        ncs.checkForDuplicateDistributedJob(jobId);
        ActivityClusterGraph acg = (ActivityClusterGraph) DeploymentUtils.deserialize(acgBytes, null, ncs.getContext());
        ncs.storeActivityClusterGraph(jobId, acg);
    } catch (HyracksException e) {
        try {
            ncs.getClusterController().notifyDistributedJobFailure(jobId, ncs.getId());
        } catch (Exception e1) {
            e1.printStackTrace();
        }
    }
}
Also used : ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException)

Example 30 with HyracksException

use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.

the class TestUtils method create.

public static IHyracksTaskContext create(int frameSize) {
    try {
        IOManager ioManager = createIoManager();
        INCServiceContext serviceCtx = new TestNCServiceContext(ioManager, null);
        TestJobletContext jobletCtx = new TestJobletContext(frameSize, serviceCtx, new JobId(0));
        TaskAttemptId tid = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), 0), 0);
        IHyracksTaskContext taskCtx = new TestTaskContext(jobletCtx, tid);
        return taskCtx;
    } catch (HyracksException e) {
        throw new RuntimeException(e);
    }
}
Also used : OperatorDescriptorId(org.apache.hyracks.api.dataflow.OperatorDescriptorId) TaskId(org.apache.hyracks.api.dataflow.TaskId) IOManager(org.apache.hyracks.control.nc.io.IOManager) TaskAttemptId(org.apache.hyracks.api.dataflow.TaskAttemptId) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) INCServiceContext(org.apache.hyracks.api.application.INCServiceContext) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) JobId(org.apache.hyracks.api.job.JobId)

Aggregations

HyracksException (org.apache.hyracks.api.exceptions.HyracksException)48 IOException (java.io.IOException)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)8 JobId (org.apache.hyracks.api.job.JobId)8 HashMap (java.util.HashMap)7 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 IJobCapacityController (org.apache.hyracks.api.job.resource.IJobCapacityController)5 INodeManager (org.apache.hyracks.control.cc.cluster.INodeManager)5 JobRun (org.apache.hyracks.control.cc.job.JobRun)5 URL (java.net.URL)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 NodeControllerState (org.apache.hyracks.control.cc.NodeControllerState)4 File (java.io.File)3 HashSet (java.util.HashSet)3 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)3 JobSpecification (org.apache.hyracks.api.job.JobSpecification)3 PrintWriter (java.io.PrintWriter)2 StringWriter (java.io.StringWriter)2