use of java.util.concurrent.ExecutorCompletionService in project tika by apache.
the class StrawManTikaAppDriver method main.
public static void main(String[] args) {
long start = new Date().getTime();
if (args.length < 6) {
System.err.println(StrawManTikaAppDriver.usage());
}
Path inputDir = Paths.get(args[0]);
Path outputDir = Paths.get(args[1]);
int totalThreads = Integer.parseInt(args[2]);
List<String> commandLine = new ArrayList<>();
commandLine.addAll(Arrays.asList(args).subList(3, args.length));
totalThreads = (totalThreads < 1) ? 1 : totalThreads;
ExecutorService ex = Executors.newFixedThreadPool(totalThreads);
ExecutorCompletionService<Integer> completionService = new ExecutorCompletionService<>(ex);
for (int i = 0; i < totalThreads; i++) {
StrawManTikaAppDriver driver = new StrawManTikaAppDriver(inputDir, outputDir, totalThreads, commandLine.toArray(new String[commandLine.size()]));
completionService.submit(driver);
}
int totalFilesProcessed = 0;
for (int i = 0; i < totalThreads; i++) {
try {
Future<Integer> future = completionService.take();
if (future != null) {
totalFilesProcessed += future.get();
}
} catch (InterruptedException | ExecutionException e) {
LOG.error(e.getMessage(), e);
}
}
double elapsedSeconds = (double) (new Date().getTime() - start) / (double) 1000;
LOG.info("Processed {} in {} seconds", totalFilesProcessed, elapsedSeconds);
}
use of java.util.concurrent.ExecutorCompletionService in project tika by apache.
the class TestMetadata method testMultithreadedDates.
@Test
public void testMultithreadedDates() throws Exception {
int numThreads = 10;
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
ExecutorCompletionService<Integer> executorCompletionService = new ExecutorCompletionService<Integer>(executorService);
for (int i = 0; i < numThreads; i++) {
executorCompletionService.submit(new MetadataDateAdder());
}
int finished = 0;
while (finished < numThreads) {
Future<Integer> future = executorCompletionService.take();
if (future != null && future.isDone()) {
Integer retVal = future.get();
finished++;
}
}
}
use of java.util.concurrent.ExecutorCompletionService in project cdap by caskdata.
the class WorkflowDriver method executeFork.
private void executeFork(final ApplicationSpecification appSpec, WorkflowForkNode fork, final InstantiatorFactory instantiator, final ClassLoader classLoader, final WorkflowToken token) throws Exception {
CountDownLatch executorTerminateLatch = new CountDownLatch(1);
ExecutorService executorService = createExecutor(fork.getBranches().size(), executorTerminateLatch, "fork-" + fork.getNodeId() + "-%d");
CompletionService<Map.Entry<String, WorkflowToken>> completionService = new ExecutorCompletionService<>(executorService);
try {
for (final List<WorkflowNode> branch : fork.getBranches()) {
completionService.submit(new Callable<Map.Entry<String, WorkflowToken>>() {
@Override
public Map.Entry<String, WorkflowToken> call() throws Exception {
WorkflowToken copiedToken = ((BasicWorkflowToken) token).deepCopy();
executeAll(branch.iterator(), appSpec, instantiator, classLoader, copiedToken);
return Maps.immutableEntry(branch.toString(), copiedToken);
}
});
}
for (int i = 0; i < fork.getBranches().size(); i++) {
try {
Future<Map.Entry<String, WorkflowToken>> forkBranchResult = completionService.take();
Map.Entry<String, WorkflowToken> retValue = forkBranchResult.get();
String branchInfo = retValue.getKey();
WorkflowToken branchToken = retValue.getValue();
((BasicWorkflowToken) token).mergeToken(branchToken);
LOG.trace("Execution of branch {} for fork {} completed.", branchInfo, fork);
} catch (InterruptedException e) {
// Due to workflow abortion, so just break the loop
break;
} catch (ExecutionException e) {
// Unwrap the cause
Throwables.propagateIfPossible(e.getCause(), Exception.class);
throw Throwables.propagate(e.getCause());
}
}
} finally {
// Update the WorkflowToken after the execution of the FORK node completes.
runtimeStore.updateWorkflowToken(workflowRunId, token);
executorService.shutdownNow();
// Wait for the executor termination
executorTerminateLatch.await();
}
}
use of java.util.concurrent.ExecutorCompletionService in project tika by apache.
the class HtmlParserTest method testDetector.
private void testDetector(EncodingDetector detector) throws Exception {
Path testDocs = Paths.get(this.getClass().getResource("/test-documents").toURI());
List<Path> tmp = new ArrayList<>();
Map<Path, String> encodings = new ConcurrentHashMap<>();
File[] testDocArray = testDocs.toFile().listFiles();
assertNotNull("no test docs??", testDocArray);
for (File file : testDocArray) {
if (file.getName().endsWith(".txt") || file.getName().endsWith(".html")) {
String encoding = getEncoding(detector, file.toPath());
tmp.add(file.toPath());
encodings.put(file.toPath(), encoding);
}
}
ArrayBlockingQueue<Path> paths = new ArrayBlockingQueue<>(tmp.size());
paths.addAll(tmp);
int numThreads = paths.size() + 1;
ExecutorService ex = Executors.newFixedThreadPool(numThreads);
CompletionService<String> completionService = new ExecutorCompletionService<>(ex);
for (int i = 0; i < numThreads; i++) {
completionService.submit(new EncodingDetectorRunner(paths, encodings, detector));
}
int completed = 0;
while (completed < numThreads) {
Future<String> future = completionService.take();
if (future.isDone() && //was thrown during call
EncodingDetectorRunner.DONE.equals(future.get())) {
completed++;
}
}
}
use of java.util.concurrent.ExecutorCompletionService in project ignite by apache.
the class GridTestCacheStore method loadCache.
/**
* Preload data from store. In this case we just auto-generate random values.
*
* @param clo Callback for every key.
* @param args Optional arguments.
*/
@Override
public void loadCache(final IgniteBiInClosure<GridTestKey, Long> clo, Object... args) {
// Number of threads is passed in as argument by caller.
final int numThreads = (Integer) args[0];
int entryCnt = (Integer) args[1];
log.info("Number of load threads: " + numThreads);
log.info("Number of cache entries to load: " + entryCnt);
ExecutorService execSvc = Executors.newFixedThreadPool(numThreads);
try {
ExecutorCompletionService<Object> completeSvc = new ExecutorCompletionService<>(execSvc);
final IgniteCache<GridTestKey, Long> cache = ignite.cache("partitioned");
assert cache != null;
final LongAdder adder = new LongAdder();
for (int i = 0; i < numThreads; i++) {
final int threadId = i;
final int perThreadKeys = entryCnt / numThreads;
final int mod = entryCnt % numThreads;
completeSvc.submit(new Callable<Object>() {
@Override
public Object call() throws Exception {
int start = threadId * perThreadKeys;
int end = start + perThreadKeys;
if (threadId + 1 == numThreads)
end += mod;
for (long i = start; i < end; i++) {
if (ignite.affinity(cache.getName()).mapKeyToNode(GridTestKey.affinityKey(i)).isLocal()) {
// Only add if key is local.
clo.apply(new GridTestKey(i), i);
adder.increment();
}
if (i % 10000 == 0)
log.info("Loaded " + adder.intValue() + " keys.");
}
return null;
}
});
}
// Wait for threads to complete.
for (int i = 0; i < numThreads; i++) {
try {
completeSvc.take().get();
} catch (InterruptedException | ExecutionException e) {
throw new CacheLoaderException(e);
}
}
// Final print out.
log.info("Loaded " + adder.intValue() + " keys.");
} finally {
execSvc.shutdown();
}
}
Aggregations