Search in sources :

Example 56 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project cdap by caskdata.

the class SparkTransactionHandlerTest method testConcurrentJobRun.

/**
   * Tests concurrent jobs submission.
   */
@Test(timeout = 120000L)
public void testConcurrentJobRun() throws Exception {
    final AtomicInteger jobIdGen = new AtomicInteger();
    final AtomicInteger stageIdGen = new AtomicInteger();
    // Start 30 jobs concurrently
    int threads = 30;
    ExecutorService executor = Executors.newFixedThreadPool(threads);
    try {
        final CyclicBarrier barrier = new CyclicBarrier(threads);
        final Random random = new Random();
        CompletionService<Boolean> completionService = new ExecutorCompletionService<>(executor);
        // For each run, return the verification result
        for (int i = 0; i < threads; i++) {
            completionService.submit(new Callable<Boolean>() {

                @Override
                public Boolean call() throws Exception {
                    barrier.await();
                    try {
                        // Run job with 2-5 stages, with job either succeeded or failed
                        testRunJob(jobIdGen.getAndIncrement(), generateStages(stageIdGen, 2 + random.nextInt(4)), random.nextBoolean());
                        return true;
                    } catch (Throwable t) {
                        LOG.error("testRunJob failed.", t);
                        return false;
                    }
                }
            });
        }
        // All testRunJob must be completed successfully
        boolean result = true;
        for (int i = 0; i < threads; i++) {
            result = result && completionService.take().get();
        }
        Assert.assertTrue(result);
    } finally {
        executor.shutdown();
    }
}
Also used : ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) TransactionFailureException(org.apache.tephra.TransactionFailureException) TimeoutException(java.util.concurrent.TimeoutException) UnknownHostException(java.net.UnknownHostException) CyclicBarrier(java.util.concurrent.CyclicBarrier) Random(java.util.Random) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) Test(org.junit.Test)

Example 57 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project ddf by codice.

the class CachingFederationStrategy method sourceFederate.

private QueryResponse sourceFederate(List<Source> sources, final QueryRequest queryRequest) {
    if (LOGGER.isDebugEnabled()) {
        for (Source source : sources) {
            if (source != null) {
                LOGGER.debug("source to query: {}", source.getId());
            }
        }
    }
    Query originalQuery = queryRequest.getQuery();
    int offset = originalQuery.getStartIndex();
    final int pageSize = originalQuery.getPageSize();
    // limit offset to max value
    if (offset > this.maxStartIndex) {
        offset = this.maxStartIndex;
    }
    final QueryResponseImpl queryResponseQueue = new QueryResponseImpl(queryRequest, null);
    Map<Future<SourceResponse>, QueryRequest> futures = new HashMap<>();
    Query modifiedQuery = getModifiedQuery(originalQuery, sources.size(), offset, pageSize);
    QueryRequest modifiedQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), queryRequest.getSourceIds(), queryRequest.getProperties());
    CompletionService<SourceResponse> queryCompletion = new ExecutorCompletionService<>(queryExecutorService);
    // Do NOT call source.isAvailable() when checking sources
    for (final Source source : sources) {
        if (source != null) {
            if (!futuresContainsSource(source, futures)) {
                LOGGER.debug("running query on source: {}", source.getId());
                QueryRequest sourceQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), Collections.singleton(source.getId()), new HashMap<>(queryRequest.getProperties()));
                try {
                    for (PreFederatedQueryPlugin service : preQuery) {
                        try {
                            sourceQueryRequest = service.process(source, sourceQueryRequest);
                        } catch (PluginExecutionException e) {
                            LOGGER.info("Error executing PreFederatedQueryPlugin", e);
                        }
                    }
                } catch (StopProcessingException e) {
                    LOGGER.info("Plugin stopped processing", e);
                }
                if (source instanceof CatalogProvider && SystemInfo.getSiteName().equals(source.getId())) {
                    // TODO RAP 12 Jul 16: DDF-2294 - Extract into a new PreFederatedQueryPlugin
                    sourceQueryRequest = validationQueryFactory.getQueryRequestWithValidationFilter(sourceQueryRequest, showErrors, showWarnings);
                }
                futures.put(queryCompletion.submit(new CallableSourceResponse(source, sourceQueryRequest)), sourceQueryRequest);
            } else {
                LOGGER.info("Duplicate source found with name {}. Ignoring second one.", source.getId());
            }
        }
    }
    QueryResponseImpl offsetResults = null;
    // OffsetResultHandler does.
    if (offset > 1 && sources.size() > 1) {
        offsetResults = new QueryResponseImpl(queryRequest, null);
        queryExecutorService.submit(new OffsetResultHandler(queryResponseQueue, offsetResults, pageSize, offset));
    }
    queryExecutorService.submit(sortedQueryMonitorFactory.createMonitor(queryCompletion, futures, queryResponseQueue, modifiedQueryRequest, postQuery));
    QueryResponse queryResponse;
    if (offset > 1 && sources.size() > 1) {
        queryResponse = offsetResults;
        LOGGER.debug("returning offsetResults");
    } else {
        queryResponse = queryResponseQueue;
        LOGGER.debug("returning returnResults: {}", queryResponse);
    }
    LOGGER.debug("returning Query Results: {}", queryResponse);
    return queryResponse;
}
Also used : Query(ddf.catalog.operation.Query) QueryRequest(ddf.catalog.operation.QueryRequest) SourceResponse(ddf.catalog.operation.SourceResponse) PreFederatedQueryPlugin(ddf.catalog.plugin.PreFederatedQueryPlugin) HashMap(java.util.HashMap) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) StopProcessingException(ddf.catalog.plugin.StopProcessingException) Source(ddf.catalog.source.Source) QueryResponseImpl(ddf.catalog.operation.impl.QueryResponseImpl) CatalogProvider(ddf.catalog.source.CatalogProvider) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) QueryResponse(ddf.catalog.operation.QueryResponse) Future(java.util.concurrent.Future) PluginExecutionException(ddf.catalog.plugin.PluginExecutionException)

Example 58 with ExecutorCompletionService

use of java.util.concurrent.ExecutorCompletionService in project ddf by codice.

the class CswQueryResponseTransformer method multiThreadedMarshal.

/**
     * Multi-threaded marshal of metacard assumes that the query size is unbounded to guard against
     * resource exhaustion with fixed thread-pool and fixed work-queue.  CPU-bound for optimum utilization
     * from availableProcessors()+1 thread pool.
     *
     * @param results - the list of results to marshal
     * @param recordSchema - the schema
     * @param arguments - additional args
     * @return - the marshaled results
     * @throws CatalogTransformerException
     */
private String multiThreadedMarshal(List<Result> results, AtomicLong numResults, String recordSchema, final Map<String, Serializable> arguments) throws CatalogTransformerException {
    CompletionService<BinaryContent> completionService = new ExecutorCompletionService<>(queryExecutor);
    final MetacardTransformer transformer = metacardTransformerManager.getTransformerBySchema(recordSchema);
    if (transformer == null) {
        throw new CatalogTransformerException("Cannot find transformer for schema: " + recordSchema);
    }
    Map<Future<BinaryContent>, Result> futures = new HashMap<>(results.size());
    for (Result result : results) {
        final Metacard mc = result.getMetacard();
        // the "current" thread will run submitted task when queueSize exceeded; effectively
        // blocking enqueue of more tasks.
        futures.put(completionService.submit(() -> {
            BinaryContent content = transformer.transform(mc, arguments);
            return content;
        }), result);
    }
    InputStream[] contents = new InputStream[results.size()];
    while (!futures.isEmpty()) {
        try {
            Future<BinaryContent> completedFuture = completionService.take();
            int index = results.indexOf(futures.get(completedFuture));
            try {
                contents[index] = completedFuture.get().getInputStream();
            } catch (ExecutionException | CancellationException | InterruptedException e) {
                LOGGER.debug("Error transforming Metacard", e);
                numResults.decrementAndGet();
            } finally {
                futures.remove(completedFuture);
            }
        } catch (InterruptedException e) {
            LOGGER.debug("Metacard transform interrupted", e);
        }
    }
    CharArrayWriter accum = new CharArrayWriter(ACCUM_INITIAL_SIZE);
    for (InputStream is : contents) {
        try {
            if (is != null) {
                IOUtils.copy(is, accum);
            }
        } catch (IOException e) {
            LOGGER.debug("Error copying Metacard Binary content", e);
        }
    }
    return accum.toString();
}
Also used : MetacardTransformer(ddf.catalog.transform.MetacardTransformer) HashMap(java.util.HashMap) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) CatalogTransformerException(ddf.catalog.transform.CatalogTransformerException) IOException(java.io.IOException) BinaryContent(ddf.catalog.data.BinaryContent) CharArrayWriter(java.io.CharArrayWriter) Result(ddf.catalog.data.Result) Metacard(ddf.catalog.data.Metacard) CancellationException(java.util.concurrent.CancellationException) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException)

Aggregations

ExecutorCompletionService (java.util.concurrent.ExecutorCompletionService)58 ExecutionException (java.util.concurrent.ExecutionException)27 ExecutorService (java.util.concurrent.ExecutorService)27 ArrayList (java.util.ArrayList)26 IOException (java.io.IOException)23 Future (java.util.concurrent.Future)18 Test (org.junit.Test)12 InterruptedIOException (java.io.InterruptedIOException)9 List (java.util.List)8 Path (org.apache.hadoop.fs.Path)8 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)6 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)6 Callable (java.util.concurrent.Callable)5 HashMap (java.util.HashMap)4 TimeoutException (java.util.concurrent.TimeoutException)4 File (java.io.File)3 Random (java.util.Random)3 FileData (com.alibaba.otter.shared.etl.model.FileData)2 UnknownHostException (java.net.UnknownHostException)2 Path (java.nio.file.Path)2