Search in sources :

Example 71 with AtomicLong

use of java.util.concurrent.atomic.AtomicLong in project jersey by jersey.

the class BasicTypesMessageProvider method readFrom.

@Override
public Object readFrom(Class<Object> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException {
    final String entityString = readFromAsString(entityStream, mediaType);
    if (entityString.isEmpty()) {
        throw new NoContentException(LocalizationMessages.ERROR_READING_ENTITY_MISSING());
    }
    final PrimitiveTypes primitiveType = PrimitiveTypes.forType(type);
    if (primitiveType != null) {
        return primitiveType.convert(entityString);
    }
    final Constructor constructor = AccessController.doPrivileged(ReflectionHelper.getStringConstructorPA(type));
    if (constructor != null) {
        try {
            return type.cast(constructor.newInstance(entityString));
        } catch (Exception e) {
            throw new MessageBodyProcessingException(LocalizationMessages.ERROR_ENTITY_PROVIDER_BASICTYPES_CONSTRUCTOR(type));
        }
    }
    if (AtomicInteger.class.isAssignableFrom(type)) {
        return new AtomicInteger((Integer) PrimitiveTypes.INTEGER.convert(entityString));
    }
    if (AtomicLong.class.isAssignableFrom(type)) {
        return new AtomicLong((Long) PrimitiveTypes.LONG.convert(entityString));
    }
    throw new MessageBodyProcessingException(LocalizationMessages.ERROR_ENTITY_PROVIDER_BASICTYPES_UNKWNOWN(type));
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Constructor(java.lang.reflect.Constructor) NoContentException(javax.ws.rs.core.NoContentException) NoContentException(javax.ws.rs.core.NoContentException) IOException(java.io.IOException) WebApplicationException(javax.ws.rs.WebApplicationException)

Example 72 with AtomicLong

use of java.util.concurrent.atomic.AtomicLong in project druid by druid-io.

the class JettyQosTest method testQoS.

@Test(timeout = 60_000L)
public void testQoS() throws Exception {
    final int fastThreads = 20;
    final int slowThreads = 15;
    final int slowRequestsPerThread = 5;
    final int fastRequestsPerThread = 200;
    final HttpClient fastClient = new ClientHolder(fastThreads).getClient();
    final HttpClient slowClient = new ClientHolder(slowThreads).getClient();
    final ExecutorService fastPool = Execs.multiThreaded(fastThreads, "fast-%d");
    final ExecutorService slowPool = Execs.multiThreaded(slowThreads, "slow-%d");
    final CountDownLatch latch = new CountDownLatch(fastThreads * fastRequestsPerThread);
    final AtomicLong fastCount = new AtomicLong();
    final AtomicLong slowCount = new AtomicLong();
    final AtomicLong fastElapsed = new AtomicLong();
    final AtomicLong slowElapsed = new AtomicLong();
    for (int i = 0; i < slowThreads; i++) {
        slowPool.submit(new Runnable() {

            @Override
            public void run() {
                for (int i = 0; i < slowRequestsPerThread; i++) {
                    long startTime = System.currentTimeMillis();
                    try {
                        ListenableFuture<StatusResponseHolder> go = slowClient.go(new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/slow/hello")), new StatusResponseHandler(Charset.defaultCharset()));
                        go.get();
                        slowCount.incrementAndGet();
                        slowElapsed.addAndGet(System.currentTimeMillis() - startTime);
                    } catch (InterruptedException e) {
                    // BE COOL
                    } catch (Exception e) {
                        e.printStackTrace();
                        throw Throwables.propagate(e);
                    }
                }
            }
        });
    }
    // wait for jetty server pool to completely fill up
    while (server.getThreadPool().getIdleThreads() != 0) {
        Thread.sleep(25);
    }
    for (int i = 0; i < fastThreads; i++) {
        fastPool.submit(new Runnable() {

            @Override
            public void run() {
                for (int i = 0; i < fastRequestsPerThread; i++) {
                    long startTime = System.currentTimeMillis();
                    try {
                        ListenableFuture<StatusResponseHolder> go = fastClient.go(new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/default")), new StatusResponseHandler(Charset.defaultCharset()));
                        go.get();
                        fastCount.incrementAndGet();
                        fastElapsed.addAndGet(System.currentTimeMillis() - startTime);
                        latch.countDown();
                    } catch (InterruptedException e) {
                    // BE COOL
                    } catch (Exception e) {
                        e.printStackTrace();
                        throw Throwables.propagate(e);
                    }
                }
            }
        });
    }
    // Wait for all fast requests to be served
    latch.await();
    slowPool.shutdownNow();
    fastPool.shutdown();
    // check that fast requests finished quickly
    Assert.assertTrue(fastElapsed.get() / fastCount.get() < 500);
}
Also used : Request(com.metamx.http.client.Request) CountDownLatch(java.util.concurrent.CountDownLatch) URL(java.net.URL) AtomicLong(java.util.concurrent.atomic.AtomicLong) HttpClient(com.metamx.http.client.HttpClient) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) StatusResponseHandler(com.metamx.http.client.response.StatusResponseHandler) Test(org.junit.Test)

Example 73 with AtomicLong

use of java.util.concurrent.atomic.AtomicLong in project druid by druid-io.

the class JettyTest method testTimeouts.

@Test
// this test will deadlock if it hits an issue, so ignored by default
@Ignore
public void testTimeouts() throws Exception {
    // test for request timeouts properly not locking up all threads
    final Executor executor = Executors.newFixedThreadPool(100);
    final AtomicLong count = new AtomicLong(0);
    final CountDownLatch latch = new CountDownLatch(1000);
    for (int i = 0; i < 10000; i++) {
        executor.execute(new Runnable() {

            @Override
            public void run() {
                executor.execute(new Runnable() {

                    @Override
                    public void run() {
                        long startTime = System.currentTimeMillis();
                        long startTime2 = 0;
                        try {
                            ListenableFuture<StatusResponseHolder> go = client.go(new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/slow/hello")), new StatusResponseHandler(Charset.defaultCharset()));
                            startTime2 = System.currentTimeMillis();
                            go.get();
                        } catch (Exception e) {
                            e.printStackTrace();
                        } finally {
                            System.out.println("Response time client" + (System.currentTimeMillis() - startTime) + "time taken for getting future" + (System.currentTimeMillis() - startTime2) + "Counter " + count.incrementAndGet());
                            latch.countDown();
                        }
                    }
                });
            }
        });
    }
    latch.await();
}
Also used : Request(com.metamx.http.client.Request) CountDownLatch(java.util.concurrent.CountDownLatch) URL(java.net.URL) IOException(java.io.IOException) AtomicLong(java.util.concurrent.atomic.AtomicLong) Executor(java.util.concurrent.Executor) StatusResponseHolder(com.metamx.http.client.response.StatusResponseHolder) StatusResponseHandler(com.metamx.http.client.response.StatusResponseHandler) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 74 with AtomicLong

use of java.util.concurrent.atomic.AtomicLong in project deeplearning4j by deeplearning4j.

the class SequenceVectors method fit.

/**
     * Starts training over
     */
public void fit() {
    Properties props = Nd4j.getExecutioner().getEnvironmentInformation();
    if (props.getProperty("backend").equals("CUDA")) {
        if (Nd4j.getAffinityManager().getNumberOfDevices() > 1)
            throw new IllegalStateException("Multi-GPU word2vec/doc2vec isn't available atm");
    //if (!NativeOpsHolder.getInstance().getDeviceNativeOps().isP2PAvailable())
    //throw new IllegalStateException("Running Word2Vec on multi-gpu system requires P2P support between GPUs, which looks to be unavailable on your system.");
    }
    Nd4j.getRandom().setSeed(configuration.getSeed());
    AtomicLong timeSpent = new AtomicLong(0);
    if (!trainElementsVectors && !trainSequenceVectors)
        throw new IllegalStateException("You should define at least one training goal 'trainElementsRepresentation' or 'trainSequenceRepresentation'");
    if (iterator == null)
        throw new IllegalStateException("You can't fit() data without SequenceIterator defined");
    if (resetModel || (lookupTable != null && vocab != null && vocab.numWords() == 0)) {
        // build vocabulary from scratches
        buildVocab();
    }
    WordVectorSerializer.printOutProjectedMemoryUse(vocab.numWords(), configuration.getLayersSize(), configuration.isUseHierarchicSoftmax() && configuration.getNegative() > 0 ? 3 : 2);
    if (vocab == null || lookupTable == null || vocab.numWords() == 0)
        throw new IllegalStateException("You can't fit() model with empty Vocabulary or WeightLookupTable");
    // if model vocab and lookupTable is built externally we basically should check that lookupTable was properly initialized
    if (!resetModel || existingModel != null) {
        lookupTable.resetWeights(false);
    } else {
        // otherwise we reset weights, independent of actual current state of lookup table
        lookupTable.resetWeights(true);
        // if preciseWeights used, we roll over data once again
        if (configuration.isPreciseWeightInit()) {
            log.info("Using precise weights init...");
            iterator.reset();
            while (iterator.hasMoreSequences()) {
                Sequence<T> sequence = iterator.nextSequence();
                // initializing elements, only once
                for (T element : sequence.getElements()) {
                    T realElement = vocab.tokenFor(element.getLabel());
                    if (realElement != null && !realElement.isInit()) {
                        Random rng = Nd4j.getRandomFactory().getNewRandomInstance(configuration.getSeed() * realElement.hashCode(), configuration.getLayersSize() + 1);
                        INDArray randArray = Nd4j.rand(new int[] { 1, configuration.getLayersSize() }, rng).subi(0.5).divi(configuration.getLayersSize());
                        lookupTable.getWeights().getRow(realElement.getIndex()).assign(randArray);
                        realElement.setInit(true);
                    }
                }
                // initializing labels, only once
                for (T label : sequence.getSequenceLabels()) {
                    T realElement = vocab.tokenFor(label.getLabel());
                    if (realElement != null && !realElement.isInit()) {
                        Random rng = Nd4j.getRandomFactory().getNewRandomInstance(configuration.getSeed() * realElement.hashCode(), configuration.getLayersSize() + 1);
                        INDArray randArray = Nd4j.rand(new int[] { 1, configuration.getLayersSize() }, rng).subi(0.5).divi(configuration.getLayersSize());
                        lookupTable.getWeights().getRow(realElement.getIndex()).assign(randArray);
                        realElement.setInit(true);
                    }
                }
            }
            this.iterator.reset();
        }
    }
    initLearners();
    log.info("Starting learning process...");
    timeSpent.set(System.currentTimeMillis());
    if (this.stopWords == null)
        this.stopWords = new ArrayList<>();
    for (int currentEpoch = 1; currentEpoch <= numEpochs; currentEpoch++) {
        final AtomicLong linesCounter = new AtomicLong(0);
        final AtomicLong wordsCounter = new AtomicLong(0);
        AsyncSequencer sequencer = new AsyncSequencer(this.iterator, this.stopWords);
        sequencer.start();
        //final VectorCalculationsThread[] threads = new VectorCalculationsThread[workers];
        final AtomicLong timer = new AtomicLong(System.currentTimeMillis());
        final List<VectorCalculationsThread> threads = new ArrayList<>();
        for (int x = 0; x < workers; x++) {
            threads.add(x, new VectorCalculationsThread(x, currentEpoch, wordsCounter, vocab.totalWordOccurrences(), linesCounter, sequencer, timer));
            threads.get(x).start();
        }
        try {
            sequencer.join();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        for (int x = 0; x < workers; x++) {
            try {
                threads.get(x).join();
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
        // TODO: fix this to non-exclusive termination
        if (trainElementsVectors && elementsLearningAlgorithm != null && (!trainSequenceVectors || sequenceLearningAlgorithm == null) && elementsLearningAlgorithm.isEarlyTerminationHit()) {
            break;
        }
        if (trainSequenceVectors && sequenceLearningAlgorithm != null && (!trainElementsVectors || elementsLearningAlgorithm == null) && sequenceLearningAlgorithm.isEarlyTerminationHit()) {
            break;
        }
        log.info("Epoch: [" + currentEpoch + "]; Words vectorized so far: [" + wordsCounter.get() + "];  Lines vectorized so far: [" + linesCounter.get() + "]; learningRate: [" + minLearningRate + "]");
        if (eventListeners != null && !eventListeners.isEmpty()) {
            for (VectorsListener listener : eventListeners) {
                if (listener.validateEvent(ListenerEvent.EPOCH, currentEpoch))
                    listener.processEvent(ListenerEvent.EPOCH, this, currentEpoch);
            }
        }
    }
    log.info("Time spent on training: {} ms", System.currentTimeMillis() - timeSpent.get());
}
Also used : AtomicLong(java.util.concurrent.atomic.AtomicLong) Random(org.nd4j.linalg.api.rng.Random) INDArray(org.nd4j.linalg.api.ndarray.INDArray) VectorsListener(org.deeplearning4j.models.sequencevectors.interfaces.VectorsListener)

Example 75 with AtomicLong

use of java.util.concurrent.atomic.AtomicLong in project deeplearning4j by deeplearning4j.

the class VocabConstructor method buildJointVocabulary.

/**
     * This method scans all sources passed through builder, and returns all words as vocab.
     * If TargetVocabCache was set during instance creation, it'll be filled too.
     *
     *
     * @return
     */
public VocabCache<T> buildJointVocabulary(boolean resetCounters, boolean buildHuffmanTree) {
    long lastTime = System.currentTimeMillis();
    long lastSequences = 0;
    long lastElements = 0;
    long startTime = lastTime;
    long startWords = 0;
    AtomicLong parsedCount = new AtomicLong(0);
    if (resetCounters && buildHuffmanTree)
        throw new IllegalStateException("You can't reset counters and build Huffman tree at the same time!");
    if (cache == null)
        cache = new AbstractCache.Builder<T>().build();
    log.debug("Target vocab size before building: [" + cache.numWords() + "]");
    final AtomicLong loopCounter = new AtomicLong(0);
    AbstractCache<T> topHolder = new AbstractCache.Builder<T>().minElementFrequency(0).build();
    int cnt = 0;
    int numProc = Runtime.getRuntime().availableProcessors();
    int numThreads = Math.max(numProc / 2, 2);
    ExecutorService executorService = new ThreadPoolExecutor(numThreads, numThreads, 0L, TimeUnit.MILLISECONDS, new LinkedTransferQueue<Runnable>());
    final AtomicLong execCounter = new AtomicLong(0);
    final AtomicLong finCounter = new AtomicLong(0);
    for (VocabSource<T> source : sources) {
        SequenceIterator<T> iterator = source.getIterator();
        iterator.reset();
        log.debug("Trying source iterator: [" + cnt + "]");
        log.debug("Target vocab size before building: [" + cache.numWords() + "]");
        cnt++;
        AbstractCache<T> tempHolder = new AbstractCache.Builder<T>().build();
        List<Long> timesHasNext = new ArrayList<>();
        List<Long> timesNext = new ArrayList<>();
        int sequences = 0;
        long time3 = 0;
        while (iterator.hasMoreSequences()) {
            Sequence<T> document = iterator.nextSequence();
            seqCount.incrementAndGet();
            parsedCount.addAndGet(document.size());
            tempHolder.incrementTotalDocCount();
            execCounter.incrementAndGet();
            VocabRunnable runnable = new VocabRunnable(tempHolder, document, finCounter, loopCounter);
            executorService.execute(runnable);
            // if we're not in parallel mode - wait till this runnable finishes
            if (!allowParallelBuilder) {
                while (execCounter.get() != finCounter.get()) LockSupport.parkNanos(1000);
            }
            while (execCounter.get() - finCounter.get() > numProc) {
                try {
                    Thread.sleep(1);
                } catch (Exception e) {
                }
            }
            sequences++;
            if (seqCount.get() % 100000 == 0) {
                long currentTime = System.currentTimeMillis();
                long currentSequences = seqCount.get();
                long currentElements = parsedCount.get();
                double seconds = (currentTime - lastTime) / (double) 1000;
                //                    Collections.sort(timesHasNext);
                //                    Collections.sort(timesNext);
                double seqPerSec = (currentSequences - lastSequences) / seconds;
                double elPerSec = (currentElements - lastElements) / seconds;
                //                    log.info("Document time: {} us; hasNext time: {} us", timesNext.get(timesNext.size() / 2), timesHasNext.get(timesHasNext.size() / 2));
                log.info("Sequences checked: [{}]; Current vocabulary size: [{}]; Sequences/sec: {}; Words/sec: {};", seqCount.get(), tempHolder.numWords(), String.format("%.2f", seqPerSec), String.format("%.2f", elPerSec));
                lastTime = currentTime;
                lastElements = currentElements;
                lastSequences = currentSequences;
            //                    timesHasNext.clear();
            //                    timesNext.clear();
            }
            /**
                 * Firing scavenger loop
                 */
            if (enableScavenger && loopCounter.get() >= 2000000 && tempHolder.numWords() > 10000000) {
                log.info("Starting scavenger...");
                while (execCounter.get() != finCounter.get()) {
                    try {
                        Thread.sleep(2);
                    } catch (Exception e) {
                    }
                }
                filterVocab(tempHolder, Math.max(1, source.getMinWordFrequency() / 2));
                loopCounter.set(0);
            }
        //                timesNext.add((time2 - time1) / 1000L);
        //                timesHasNext.add((time1 - time3) / 1000L);
        //                time3 = System.nanoTime();
        }
        // block untill all threads are finished
        log.debug("Wating till all processes stop...");
        while (execCounter.get() != finCounter.get()) {
            try {
                Thread.sleep(2);
            } catch (Exception e) {
            }
        }
        // apply minWordFrequency set for this source
        log.debug("Vocab size before truncation: [" + tempHolder.numWords() + "],  NumWords: [" + tempHolder.totalWordOccurrences() + "], sequences parsed: [" + seqCount.get() + "], counter: [" + parsedCount.get() + "]");
        if (source.getMinWordFrequency() > 0) {
            filterVocab(tempHolder, source.getMinWordFrequency());
        }
        log.debug("Vocab size after truncation: [" + tempHolder.numWords() + "],  NumWords: [" + tempHolder.totalWordOccurrences() + "], sequences parsed: [" + seqCount.get() + "], counter: [" + parsedCount.get() + "]");
        // at this moment we're ready to transfer
        topHolder.importVocabulary(tempHolder);
    }
    // at this moment, we have vocabulary full of words, and we have to reset counters before transfer everything back to VocabCache
    //topHolder.resetWordCounters();
    System.gc();
    System.gc();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    //
    }
    cache.importVocabulary(topHolder);
    // adding UNK word
    if (unk != null) {
        log.info("Adding UNK element to vocab...");
        unk.setSpecial(true);
        cache.addToken(unk);
    }
    if (resetCounters) {
        for (T element : cache.vocabWords()) {
            element.setElementFrequency(0);
        }
        cache.updateWordsOccurencies();
    }
    if (buildHuffmanTree) {
        Huffman huffman = new Huffman(cache.vocabWords());
        huffman.build();
        huffman.applyIndexes(cache);
        if (limit > 0) {
            LinkedBlockingQueue<String> labelsToRemove = new LinkedBlockingQueue<>();
            for (T element : cache.vocabWords()) {
                if (element.getIndex() > limit && !element.isSpecial() && !element.isLabel())
                    labelsToRemove.add(element.getLabel());
            }
            for (String label : labelsToRemove) {
                cache.removeElement(label);
            }
        }
    }
    executorService.shutdown();
    System.gc();
    System.gc();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    //
    }
    long endSequences = seqCount.get();
    long endTime = System.currentTimeMillis();
    double seconds = (endTime - startTime) / (double) 1000;
    double seqPerSec = endSequences / seconds;
    log.info("Sequences checked: [{}], Current vocabulary size: [{}]; Sequences/sec: [{}];", seqCount.get(), cache.numWords(), String.format("%.2f", seqPerSec));
    return cache;
}
Also used : AbstractCache(org.deeplearning4j.models.word2vec.wordstore.inmemory.AbstractCache) AtomicLong(java.util.concurrent.atomic.AtomicLong) AtomicLong(java.util.concurrent.atomic.AtomicLong) Huffman(org.deeplearning4j.models.word2vec.Huffman)

Aggregations

AtomicLong (java.util.concurrent.atomic.AtomicLong)2292 Test (org.junit.Test)986 ArrayList (java.util.ArrayList)300 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)273 IOException (java.io.IOException)254 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)250 List (java.util.List)222 HashMap (java.util.HashMap)212 Map (java.util.Map)209 CountDownLatch (java.util.concurrent.CountDownLatch)185 AtomicReference (java.util.concurrent.atomic.AtomicReference)174 HashSet (java.util.HashSet)106 Arrays (java.util.Arrays)101 File (java.io.File)99 Test (org.junit.jupiter.api.Test)98 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)95 Set (java.util.Set)94 TimeUnit (java.util.concurrent.TimeUnit)91 Collections (java.util.Collections)88 Random (java.util.Random)85