use of java.util.concurrent.ArrayBlockingQueue in project pinot by linkedin.
the class MCombineOperator method getNextBlock.
@Override
public Block getNextBlock() {
final long startTime = System.currentTimeMillis();
final long queryEndTime = System.currentTimeMillis() + _timeOutMs;
int numGroups = Math.max(MIN_THREADS_PER_QUERY, Math.min(MAX_THREADS_PER_QUERY, (_operators.size() + MIN_SEGMENTS_PER_THREAD - 1) / MIN_SEGMENTS_PER_THREAD));
//ensure that the number of groups is not more than the number of segments
numGroups = Math.min(_operators.size(), numGroups);
final List<List<Operator>> operatorGroups = new ArrayList<List<Operator>>(numGroups);
for (int i = 0; i < numGroups; i++) {
operatorGroups.add(new ArrayList<Operator>());
}
for (int i = 0; i < _operators.size(); i++) {
operatorGroups.get(i % numGroups).add(_operators.get(i));
}
final BlockingQueue<Block> blockingQueue = new ArrayBlockingQueue<>(operatorGroups.size());
// Submit operators.
for (final List<Operator> operatorGroup : operatorGroups) {
_executorService.submit(new TraceRunnable() {
@Override
public void runJob() {
IntermediateResultsBlock mergedBlock = null;
try {
for (Operator operator : operatorGroup) {
IntermediateResultsBlock blockToMerge = (IntermediateResultsBlock) operator.nextBlock();
if (mergedBlock == null) {
mergedBlock = blockToMerge;
} else {
try {
CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
} catch (Exception e) {
LOGGER.error("Caught exception while merging two blocks (step 1).", e);
mergedBlock.addToProcessingExceptions(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
}
}
}
} catch (Exception e) {
LOGGER.error("Caught exception while executing query.", e);
mergedBlock = new IntermediateResultsBlock(e);
}
blockingQueue.offer(mergedBlock);
}
});
}
LOGGER.debug("Submitting operators to be run in parallel and it took:" + (System.currentTimeMillis() - startTime));
// Submit merger job:
Future<IntermediateResultsBlock> mergedBlockFuture = _executorService.submit(new TraceCallable<IntermediateResultsBlock>() {
@Override
public IntermediateResultsBlock callJob() throws Exception {
int mergedBlocksNumber = 0;
IntermediateResultsBlock mergedBlock = null;
while ((queryEndTime > System.currentTimeMillis()) && (mergedBlocksNumber < operatorGroups.size())) {
if (mergedBlock == null) {
mergedBlock = (IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
if (mergedBlock != null) {
mergedBlocksNumber++;
}
LOGGER.debug("Got response from operator 0 after: {}", (System.currentTimeMillis() - startTime));
} else {
IntermediateResultsBlock blockToMerge = (IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
if (blockToMerge != null) {
try {
LOGGER.debug("Got response from operator {} after: {}", mergedBlocksNumber, (System.currentTimeMillis() - startTime));
CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
LOGGER.debug("Merged response from operator {} after: {}", mergedBlocksNumber, (System.currentTimeMillis() - startTime));
} catch (Exception e) {
LOGGER.error("Caught exception while merging two blocks (step 2).", e);
mergedBlock.addToProcessingExceptions(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
}
mergedBlocksNumber++;
}
}
}
return mergedBlock;
}
});
// Get merge results.
IntermediateResultsBlock mergedBlock;
try {
mergedBlock = mergedBlockFuture.get(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOGGER.error("Caught InterruptedException.", e);
mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.FUTURE_CALL_ERROR, e));
} catch (ExecutionException e) {
LOGGER.error("Caught ExecutionException.", e);
mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
} catch (TimeoutException e) {
LOGGER.error("Caught TimeoutException", e);
mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.EXECUTION_TIMEOUT_ERROR, e));
}
// Update execution statistics.
ExecutionStatistics executionStatistics = new ExecutionStatistics();
for (Operator operator : _operators) {
ExecutionStatistics executionStatisticsToMerge = operator.getExecutionStatistics();
if (executionStatisticsToMerge != null) {
executionStatistics.merge(executionStatisticsToMerge);
}
}
mergedBlock.setNumDocsScanned(executionStatistics.getNumDocsScanned());
mergedBlock.setNumEntriesScannedInFilter(executionStatistics.getNumEntriesScannedInFilter());
mergedBlock.setNumEntriesScannedPostFilter(executionStatistics.getNumEntriesScannedPostFilter());
mergedBlock.setNumTotalRawDocs(executionStatistics.getNumTotalRawDocs());
return mergedBlock;
}
use of java.util.concurrent.ArrayBlockingQueue in project jersey by jersey.
the class EventOutputTest method testReadCommentsOnlySseEvents.
/**
* Reproducer for JERSEY-2912: Sending and receiving comments-only events.
*
* @throws Exception
*/
@Test
public void testReadCommentsOnlySseEvents() throws Exception {
ClientConfig clientConfig = new ClientConfig();
clientConfig.property(ClientProperties.CONNECT_TIMEOUT, 15000);
clientConfig.property(ClientProperties.READ_TIMEOUT, 0);
clientConfig.property(ClientProperties.ASYNC_THREADPOOL_SIZE, 8);
clientConfig.connectorProvider(new GrizzlyConnectorProvider());
Client client = ClientBuilder.newBuilder().withConfig(clientConfig).build();
final CountDownLatch latch = new CountDownLatch(2);
final Queue<String> eventComments = new ArrayBlockingQueue<>(2);
WebTarget single = client.target(getBaseUri()).path("test/comments-only");
EventSource es = EventSource.target(single).build();
es.register(new EventListener() {
@Override
public void onEvent(InboundEvent inboundEvent) {
eventComments.add(inboundEvent.getComment());
latch.countDown();
}
});
boolean latchTimedOut;
boolean closeTimedOut;
try {
es.open();
latchTimedOut = latch.await(5 * getAsyncTimeoutMultiplier(), TimeUnit.SECONDS);
} finally {
closeTimedOut = es.close(5, TimeUnit.SECONDS);
}
assertEquals("Unexpected event count", 2, eventComments.size());
for (int i = 1; i <= 2; i++) {
assertEquals("Unexpected comment data on event #" + i, "No comment #" + i, eventComments.poll());
}
assertTrue("Event latch has timed out", latchTimedOut);
assertTrue("EventSource.close() has timed out", closeTimedOut);
}
use of java.util.concurrent.ArrayBlockingQueue in project languagetool by languagetool-org.
the class AbstractCompoundRule method match.
@Override
public RuleMatch[] match(AnalyzedSentence sentence) {
List<RuleMatch> ruleMatches = new ArrayList<>();
AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace();
RuleMatch prevRuleMatch = null;
Queue<AnalyzedTokenReadings> prevTokens = new ArrayBlockingQueue<>(MAX_TERMS);
for (int i = 0; i < tokens.length + MAX_TERMS - 1; i++) {
AnalyzedTokenReadings token;
// we need to extend the token list so we find matches at the end of the original list:
if (i >= tokens.length) {
token = new AnalyzedTokenReadings(new AnalyzedToken("", "", null), prevTokens.peek().getStartPos());
} else {
token = tokens[i];
}
if (i == 0) {
addToQueue(token, prevTokens);
continue;
}
if (token.isImmunized()) {
continue;
}
AnalyzedTokenReadings firstMatchToken = prevTokens.peek();
List<String> stringsToCheck = new ArrayList<>();
// original upper/lowercase spelling
List<String> origStringsToCheck = new ArrayList<>();
Map<String, AnalyzedTokenReadings> stringToToken = getStringToTokenMap(prevTokens, stringsToCheck, origStringsToCheck);
// sure we match longer strings first:
for (int k = stringsToCheck.size() - 1; k >= 0; k--) {
String stringToCheck = stringsToCheck.get(k);
String origStringToCheck = origStringsToCheck.get(k);
if (getCompoundRuleData().getIncorrectCompounds().contains(stringToCheck)) {
AnalyzedTokenReadings atr = stringToToken.get(stringToCheck);
String msg = null;
List<String> replacement = new ArrayList<>();
if (!getCompoundRuleData().getNoDashSuggestion().contains(stringToCheck)) {
replacement.add(origStringToCheck.replace(' ', '-'));
msg = withHyphenMessage;
}
if (isNotAllUppercase(origStringToCheck) && !getCompoundRuleData().getOnlyDashSuggestion().contains(stringToCheck)) {
replacement.add(mergeCompound(origStringToCheck));
msg = withoutHyphenMessage;
}
String[] parts = stringToCheck.split(" ");
if (parts.length > 0 && parts[0].length() == 1) {
replacement.clear();
replacement.add(origStringToCheck.replace(' ', '-'));
msg = withHyphenMessage;
} else if (replacement.isEmpty() || replacement.size() == 2) {
// isEmpty shouldn't happen
msg = withOrWithoutHyphenMessage;
}
RuleMatch ruleMatch = new RuleMatch(this, firstMatchToken.getStartPos(), atr.getEndPos(), msg, shortDesc);
ruleMatch.setSuggestedReplacements(replacement);
// avoid duplicate matches:
if (prevRuleMatch != null && prevRuleMatch.getFromPos() == ruleMatch.getFromPos()) {
prevRuleMatch = ruleMatch;
break;
}
prevRuleMatch = ruleMatch;
ruleMatches.add(ruleMatch);
break;
}
}
addToQueue(token, prevTokens);
}
return toRuleMatchArray(ruleMatches);
}
use of java.util.concurrent.ArrayBlockingQueue in project mapdb by jankotek.
the class ArrayBlockingQueueTest method testToArray2.
/**
* toArray(a) contains all elements in FIFO order
*/
public void testToArray2() {
ArrayBlockingQueue q = new ArrayBlockingQueue(SIZE);
for (int i = 0; i < SIZE; i++) {
checkToArray2(q);
q.add(i);
}
// Provoke wraparound
for (int i = 0; i < SIZE; i++) {
checkToArray2(q);
assertEquals(i, q.poll());
checkToArray2(q);
q.add(SIZE + i);
}
for (int i = 0; i < SIZE; i++) {
checkToArray2(q);
assertEquals(SIZE + i, q.poll());
}
}
use of java.util.concurrent.ArrayBlockingQueue in project mapdb by jankotek.
the class ArrayBlockingQueueTest method testWeaklyConsistentIteration.
/**
* Modifications do not cause iterators to fail
*/
public void testWeaklyConsistentIteration() {
final ArrayBlockingQueue q = new ArrayBlockingQueue(3);
q.add(one);
q.add(two);
q.add(three);
for (Iterator it = q.iterator(); it.hasNext(); ) {
q.remove();
it.next();
}
assertEquals(0, q.size());
}
Aggregations