Search in sources :

Example 16 with LinkedList

use of java.util.LinkedList in project storm by apache.

the class DefaultResourceAwareStrategy method orderExecutors.

/**
     * Order executors based on how many in and out connections it will potentially need to make.
     * First order components by the number of in and out connections it will have.  Then iterate through the sorted list of components.
     * For each component sort the neighbors of that component by how many connections it will have to make with that component.
     * Add an executor from this component and then from each neighboring component in sorted order.  Do this until there is nothing left to schedule
     *
     * @param td                  The topology the executors belong to
     * @param unassignedExecutors a collection of unassigned executors that need to be unassigned. Should only try to assign executors from this list
     * @return a list of executors in sorted order
     */
private List<ExecutorDetails> orderExecutors(TopologyDetails td, Collection<ExecutorDetails> unassignedExecutors) {
    Map<String, Component> componentMap = td.getComponents();
    List<ExecutorDetails> execsScheduled = new LinkedList<>();
    Map<String, Queue<ExecutorDetails>> compToExecsToSchedule = new HashMap<>();
    for (Component component : componentMap.values()) {
        compToExecsToSchedule.put(component.id, new LinkedList<ExecutorDetails>());
        for (ExecutorDetails exec : component.execs) {
            if (unassignedExecutors.contains(exec)) {
                compToExecsToSchedule.get(component.id).add(exec);
            }
        }
    }
    Set<Component> sortedComponents = sortComponents(componentMap);
    sortedComponents.addAll(componentMap.values());
    for (Component currComp : sortedComponents) {
        Map<String, Component> neighbors = new HashMap<String, Component>();
        for (String compId : (List<String>) ListUtils.union(currComp.children, currComp.parents)) {
            neighbors.put(compId, componentMap.get(compId));
        }
        Set<Component> sortedNeighbors = sortNeighbors(currComp, neighbors);
        Queue<ExecutorDetails> currCompExesToSched = compToExecsToSchedule.get(currComp.id);
        boolean flag = false;
        do {
            flag = false;
            if (!currCompExesToSched.isEmpty()) {
                execsScheduled.add(currCompExesToSched.poll());
                flag = true;
            }
            for (Component neighborComp : sortedNeighbors) {
                Queue<ExecutorDetails> neighborCompExesToSched = compToExecsToSchedule.get(neighborComp.id);
                if (!neighborCompExesToSched.isEmpty()) {
                    execsScheduled.add(neighborCompExesToSched.poll());
                    flag = true;
                }
            }
        } while (flag);
    }
    return execsScheduled;
}
Also used : ExecutorDetails(org.apache.storm.scheduler.ExecutorDetails) HashMap(java.util.HashMap) LinkedList(java.util.LinkedList) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) Component(org.apache.storm.scheduler.resource.Component) Queue(java.util.Queue)

Example 17 with LinkedList

use of java.util.LinkedList in project hadoop by apache.

the class TestContainerManagerSecurity method testContainerTokenWithEpoch.

/**
   * This tests whether a containerId is serialized/deserialized with epoch.
   *
   * @throws IOException
   * @throws InterruptedException
   * @throws YarnException
   */
private void testContainerTokenWithEpoch(Configuration conf) throws IOException, InterruptedException, YarnException {
    LOG.info("Running test for serializing/deserializing containerIds");
    NMTokenSecretManagerInRM nmTokenSecretManagerInRM = yarnCluster.getResourceManager().getRMContext().getNMTokenSecretManager();
    ApplicationId appId = ApplicationId.newInstance(1, 1);
    ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0);
    ContainerId cId = ContainerId.newContainerId(appAttemptId, (5L << 40) | 3L);
    NodeManager nm = yarnCluster.getNodeManager(0);
    NMTokenSecretManagerInNM nmTokenSecretManagerInNM = nm.getNMContext().getNMTokenSecretManager();
    String user = "test";
    waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM, nm);
    NodeId nodeId = nm.getNMContext().getNodeId();
    // Both id should be equal.
    Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(), nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
    // Creating a normal Container Token
    RMContainerTokenSecretManager containerTokenSecretManager = yarnCluster.getResourceManager().getRMContext().getContainerTokenSecretManager();
    Resource r = Resource.newInstance(1230, 2);
    Token containerToken = containerTokenSecretManager.createContainerToken(cId, 0, nodeId, user, r, Priority.newInstance(0), 0);
    ContainerTokenIdentifier containerTokenIdentifier = new ContainerTokenIdentifier();
    byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
    containerTokenIdentifier.readFields(dib);
    Assert.assertEquals(cId, containerTokenIdentifier.getContainerID());
    Assert.assertEquals(cId.toString(), containerTokenIdentifier.getContainerID().toString());
    Token nmToken = nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
    YarnRPC rpc = YarnRPC.create(conf);
    testStartContainer(rpc, appAttemptId, nodeId, containerToken, nmToken, false);
    List<ContainerId> containerIds = new LinkedList<ContainerId>();
    containerIds.add(cId);
    ContainerManagementProtocol proxy = getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
    GetContainerStatusesResponse res = proxy.getContainerStatuses(GetContainerStatusesRequest.newInstance(containerIds));
    Assert.assertNotNull(res.getContainerStatuses().get(0));
    Assert.assertEquals(cId, res.getContainerStatuses().get(0).getContainerId());
    Assert.assertEquals(cId.toString(), res.getContainerStatuses().get(0).getContainerId().toString());
}
Also used : Resource(org.apache.hadoop.yarn.api.records.Resource) NMTokenSecretManagerInNM(org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.yarn.api.records.Token) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) NMTokenSecretManagerInRM(org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM) LinkedList(java.util.LinkedList) ContainerTokenIdentifier(org.apache.hadoop.yarn.security.ContainerTokenIdentifier) GetContainerStatusesResponse(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse) NodeManager(org.apache.hadoop.yarn.server.nodemanager.NodeManager) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) ContainerManagementProtocol(org.apache.hadoop.yarn.api.ContainerManagementProtocol) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) NodeId(org.apache.hadoop.yarn.api.records.NodeId) RMContainerTokenSecretManager(org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)

Example 18 with LinkedList

use of java.util.LinkedList in project hadoop by apache.

the class TestFSRMStateStore method verifyFilesUnreadablebyHDFS.

private void verifyFilesUnreadablebyHDFS(MiniDFSCluster cluster, Path root) throws Exception {
    DistributedFileSystem fs = cluster.getFileSystem();
    Queue<Path> paths = new LinkedList<>();
    paths.add(root);
    while (!paths.isEmpty()) {
        Path p = paths.poll();
        FileStatus stat = fs.getFileStatus(p);
        if (!stat.isDirectory()) {
            try {
                LOG.warn("\n\n ##Testing path [" + p + "]\n\n");
                fs.open(p);
                Assert.fail("Super user should not be able to read [" + UserGroupInformation.getCurrentUser() + "] [" + p.getName() + "]");
            } catch (AccessControlException e) {
                Assert.assertTrue(e.getMessage().contains("superuser is not allowed to perform this operation"));
            } catch (Exception e) {
                Assert.fail("Should get an AccessControlException here");
            }
        }
        if (stat.isDirectory()) {
            FileStatus[] ls = fs.listStatus(p);
            for (FileStatus f : ls) {
                paths.add(f.getPath());
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) LinkedList(java.util.LinkedList) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException)

Example 19 with LinkedList

use of java.util.LinkedList in project hadoop by apache.

the class EntityGroupFSTimelineStore method getTimelineStoresFromCacheIds.

private List<TimelineStore> getTimelineStoresFromCacheIds(Set<TimelineEntityGroupId> groupIds, String entityType, List<EntityCacheItem> cacheItems) throws IOException {
    List<TimelineStore> stores = new LinkedList<TimelineStore>();
    // non-null storage for the group ids.
    for (TimelineEntityGroupId groupId : groupIds) {
        TimelineStore storeForId = getCachedStore(groupId, cacheItems);
        if (storeForId != null) {
            LOG.debug("Adding {} as a store for the query", storeForId.getName());
            stores.add(storeForId);
            metrics.incrGetEntityToDetailOps();
        }
    }
    if (stores.size() == 0) {
        LOG.debug("Using summary store for {}", entityType);
        stores.add(this.summaryStore);
        metrics.incrGetEntityToSummaryOps();
    }
    return stores;
}
Also used : TimelineEntityGroupId(org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId) LinkedList(java.util.LinkedList)

Example 20 with LinkedList

use of java.util.LinkedList in project hive by apache.

the class HiveMetaStoreChecker method checkPartitionDirs.

private void checkPartitionDirs(final ExecutorService executor, final Path basePath, final Set<Path> result, final FileSystem fs, final int maxDepth) throws HiveException {
    try {
        Queue<Future<Path>> futures = new LinkedList<Future<Path>>();
        ConcurrentLinkedQueue<PathDepthInfo> nextLevel = new ConcurrentLinkedQueue<>();
        nextLevel.add(new PathDepthInfo(basePath, 0));
        //not done right
        while (!nextLevel.isEmpty()) {
            ConcurrentLinkedQueue<PathDepthInfo> tempQueue = new ConcurrentLinkedQueue<>();
            //process each level in parallel
            while (!nextLevel.isEmpty()) {
                futures.add(executor.submit(new PathDepthInfoCallable(nextLevel.poll(), maxDepth, fs, tempQueue)));
            }
            while (!futures.isEmpty()) {
                Path p = futures.poll().get();
                if (p != null) {
                    result.add(p);
                }
            }
            //update the nextlevel with newly discovered sub-directories from the above
            nextLevel = tempQueue;
        }
    } catch (InterruptedException | ExecutionException e) {
        LOG.error(e.getMessage());
        executor.shutdownNow();
        throw new HiveException(e.getCause());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Future(java.util.concurrent.Future) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) ExecutionException(java.util.concurrent.ExecutionException) LinkedList(java.util.LinkedList)

Aggregations

LinkedList (java.util.LinkedList)10856 Test (org.junit.Test)1545 List (java.util.List)1517 HashMap (java.util.HashMap)1413 ArrayList (java.util.ArrayList)1368 Map (java.util.Map)915 IOException (java.io.IOException)826 File (java.io.File)721 HashSet (java.util.HashSet)632 LinkedHashMap (java.util.LinkedHashMap)390 GenericValue (org.apache.ofbiz.entity.GenericValue)296 Iterator (java.util.Iterator)281 Set (java.util.Set)274 Date (java.util.Date)249 GenericEntityException (org.apache.ofbiz.entity.GenericEntityException)232 Collection (java.util.Collection)208 Collectors (java.util.stream.Collectors)162 Delegator (org.apache.ofbiz.entity.Delegator)162 URL (java.net.URL)159 Locale (java.util.Locale)159