Search in sources :

Example 26 with RejectedExecutionHandler

use of java.util.concurrent.RejectedExecutionHandler in project ddf by codice.

the class DumpCommand method executeWithSubject.

@Override
protected Object executeWithSubject() throws Exception {
    if (FilenameUtils.getExtension(dirPath).equals("") && !dirPath.endsWith(File.separator)) {
        dirPath += File.separator;
    }
    final File dumpDir = new File(dirPath);
    if (!dumpDir.exists()) {
        printErrorMessage("Directory [" + dirPath + "] must exist.");
        console.println("If the directory does indeed exist, try putting the path in quotes.");
        return null;
    }
    if (!dumpDir.isDirectory()) {
        printErrorMessage("Path [" + dirPath + "] must be a directory.");
        return null;
    }
    if (!SERIALIZED_OBJECT_ID.matches(transformerId)) {
        transformers = getTransformers();
        if (transformers == null) {
            console.println(transformerId + " is an invalid metacard transformer.");
            return null;
        }
    }
    if (StringUtils.isNotBlank(zipFileName) && new File(dirPath + zipFileName).exists()) {
        console.println("Cannot dump Catalog.  Zip file " + zipFileName + " already exists.");
        return null;
    }
    SecurityLogger.audit("Called catalog:dump command with path : {}", dirPath);
    CatalogFacade catalog = getCatalog();
    if (StringUtils.isNotBlank(zipFileName)) {
        zipArgs = new HashMap<>();
        zipArgs.put(FILE_PATH, dirPath + zipFileName);
    }
    QueryImpl query = new QueryImpl(getFilter());
    query.setRequestsTotalResultsCount(false);
    query.setPageSize(pageSize);
    Map<String, Serializable> props = new HashMap<>();
    // Avoid caching all results while dumping with native query mode
    props.put("mode", "native");
    final AtomicLong resultCount = new AtomicLong(0);
    long start = System.currentTimeMillis();
    SourceResponse response = catalog.query(new QueryRequestImpl(query, props));
    BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(multithreaded);
    RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
    final ExecutorService executorService = new ThreadPoolExecutor(multithreaded, multithreaded, 0L, TimeUnit.MILLISECONDS, blockingQueue, rejectedExecutionHandler);
    while (response.getResults().size() > 0) {
        response = catalog.query(new QueryRequestImpl(query, props));
        if (StringUtils.isNotBlank(zipFileName)) {
            try {
                Optional<QueryResponseTransformer> zipCompression = getZipCompression();
                if (zipCompression.isPresent()) {
                    BinaryContent binaryContent = zipCompression.get().transform(response, zipArgs);
                    if (binaryContent != null) {
                        IOUtils.closeQuietly(binaryContent.getInputStream());
                    }
                    Long resultSize = (long) response.getResults().size();
                    printStatus(resultCount.addAndGet(resultSize));
                }
            } catch (InvalidSyntaxException e) {
                LOGGER.info("No Zip Transformer found.  Unable export metacards to a zip file.");
            }
        } else if (multithreaded > 1) {
            final List<Result> results = new ArrayList<>(response.getResults());
            executorService.submit(() -> {
                boolean transformationFailed = false;
                for (final Result result : results) {
                    Metacard metacard = result.getMetacard();
                    try {
                        exportMetacard(dumpDir, metacard);
                    } catch (IOException | CatalogTransformerException e) {
                        transformationFailed = true;
                        LOGGER.debug("Failed to dump metacard {}", metacard.getId(), e);
                        executorService.shutdownNow();
                    }
                    printStatus(resultCount.incrementAndGet());
                }
                if (transformationFailed) {
                    LOGGER.info("One or more metacards failed to transform. Enable debug log for more details.");
                }
            });
        } else {
            for (final Result result : response.getResults()) {
                Metacard metacard = result.getMetacard();
                exportMetacard(dumpDir, metacard);
                printStatus(resultCount.incrementAndGet());
            }
        }
        if (response.getResults().size() < pageSize || pageSize == -1) {
            break;
        }
        if (pageSize > 0) {
            query.setStartIndex(query.getStartIndex() + pageSize);
        }
    }
    executorService.shutdown();
    while (!executorService.isTerminated()) {
        try {
            TimeUnit.MILLISECONDS.sleep(100);
        } catch (InterruptedException e) {
        // ignore
        }
    }
    long end = System.currentTimeMillis();
    String elapsedTime = timeFormatter.print(new Period(start, end).withMillis(0));
    console.printf(" %d file(s) dumped in %s\t%n", resultCount.get(), elapsedTime);
    LOGGER.debug("{} file(s) dumped in {}", resultCount.get(), elapsedTime);
    console.println();
    SecurityLogger.audit("Exported {} files to {}", resultCount.get(), dirPath);
    return null;
}
Also used : Serializable(java.io.Serializable) HashMap(java.util.HashMap) BinaryContent(ddf.catalog.data.BinaryContent) Result(ddf.catalog.data.Result) QueryImpl(ddf.catalog.operation.impl.QueryImpl) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) CatalogFacade(org.codice.ddf.commands.catalog.facade.CatalogFacade) InvalidSyntaxException(org.osgi.framework.InvalidSyntaxException) ArrayList(java.util.ArrayList) List(java.util.List) SourceResponse(ddf.catalog.operation.SourceResponse) RejectedExecutionHandler(java.util.concurrent.RejectedExecutionHandler) Period(org.joda.time.Period) AtomicLong(java.util.concurrent.atomic.AtomicLong) Metacard(ddf.catalog.data.Metacard) QueryResponseTransformer(ddf.catalog.transform.QueryResponseTransformer) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) ExecutorService(java.util.concurrent.ExecutorService) AtomicLong(java.util.concurrent.atomic.AtomicLong) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) File(java.io.File)

Example 27 with RejectedExecutionHandler

use of java.util.concurrent.RejectedExecutionHandler in project ddf by codice.

the class DuplicateCommands method duplicateInBatches.

/**
     * In batches, loops through a query of the queryFacade and an ingest to the ingestFacade of the
     * metacards from the response until there are no more metacards from the queryFacade or the
     * maxMetacards has been reached.
     *
     * @param queryFacade  - the CatalogFacade to duplicate from
     * @param ingestFacade - the CatalogFacade to duplicate to
     * @param filter       - the filter to query with
     */
protected void duplicateInBatches(CatalogFacade queryFacade, CatalogFacade ingestFacade, Filter filter) {
    AtomicInteger queryIndex = new AtomicInteger(1);
    final long originalQuerySize;
    if (maxMetacards > 0 && maxMetacards < batchSize) {
        originalQuerySize = maxMetacards;
    } else {
        originalQuerySize = batchSize;
    }
    final SourceResponse originalResponse = query(queryFacade, filter, queryIndex.get(), originalQuerySize);
    if (originalResponse == null) {
        return;
    }
    final long totalHits = originalResponse.getHits();
    if (totalHits <= 0) {
        LOGGER.debug("Query returned 0 hits.");
        return;
    }
    // If the maxMetacards is set, restrict the totalWanted to the number of maxMetacards
    final long totalWanted;
    if (maxMetacards > 0 && maxMetacards <= totalHits) {
        totalWanted = maxMetacards;
    } else {
        totalWanted = totalHits;
    }
    ingestMetacards(ingestFacade, getMetacardsFromSourceResponse(originalResponse));
    if (multithreaded > 1) {
        BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(multithreaded);
        RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
        final ExecutorService executorService = new ThreadPoolExecutor(multithreaded, multithreaded, 0L, TimeUnit.MILLISECONDS, blockingQueue, rejectedExecutionHandler);
        console.printf("Running a maximum of %d threads during replication.%n", multithreaded);
        printProgressAndFlush(start, totalWanted, ingestedCount.get());
        int index;
        while ((index = queryIndex.addAndGet(batchSize)) <= totalWanted) {
            final int i = index;
            executorService.submit(() -> {
                final SourceResponse response = query(queryFacade, filter, i, getQuerySizeFromIndex(totalWanted, i));
                if (response != null) {
                    ingestMetacards(ingestFacade, getMetacardsFromSourceResponse(response));
                }
                printProgressAndFlush(start, totalWanted, ingestedCount.get());
            });
        }
        executorService.shutdown();
        while (!executorService.isTerminated()) {
            try {
                TimeUnit.SECONDS.sleep(1);
            } catch (InterruptedException e) {
            // ignore
            }
        }
    } else {
        while (queryIndex.addAndGet(batchSize) <= totalWanted) {
            printProgressAndFlush(start, totalWanted, ingestedCount.get());
            final SourceResponse response = query(queryFacade, filter, queryIndex.get(), getQuerySizeFromIndex(totalWanted, queryIndex.get()));
            if (response != null) {
                ingestMetacards(ingestFacade, getMetacardsFromSourceResponse(response));
            }
        }
    }
    printProgressAndFlush(start, totalWanted, ingestedCount.get());
    if (failedCount.get() > 0) {
        LOGGER.info("Not all records were ingested. [{}] failed", failedCount.get());
        if (StringUtils.isNotBlank(failedDir)) {
            try {
                writeFailedMetacards(failedMetacards);
            } catch (IOException e) {
                console.println("Error occurred while writing failed metacards to failedDir.");
            }
        }
    }
}
Also used : SourceResponse(ddf.catalog.operation.SourceResponse) RejectedExecutionHandler(java.util.concurrent.RejectedExecutionHandler) IOException(java.io.IOException) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Aggregations

RejectedExecutionHandler (java.util.concurrent.RejectedExecutionHandler)27 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)23 ArrayBlockingQueue (java.util.concurrent.ArrayBlockingQueue)6 ThreadFactory (java.util.concurrent.ThreadFactory)6 ExecutorService (java.util.concurrent.ExecutorService)5 RejectedExecutionException (java.util.concurrent.RejectedExecutionException)4 SynchronousQueue (java.util.concurrent.SynchronousQueue)4 LinkedBlockingQueue (java.util.concurrent.LinkedBlockingQueue)3 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)3 ScheduledThreadPoolExecutor (java.util.concurrent.ScheduledThreadPoolExecutor)3 Metacard (ddf.catalog.data.Metacard)2 SourceResponse (ddf.catalog.operation.SourceResponse)2 File (java.io.File)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 XRejectedExecutionHandler (org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler)2 Test (org.junit.Test)2 NamedThreadFactory (com.alibaba.otter.shared.common.utils.thread.NamedThreadFactory)1