Search in sources :

Example 6 with ThreadInterruptedException

use of org.apache.lucene.util.ThreadInterruptedException in project lucene-solr by apache.

the class TestStressNRTReplication method startNode.

/** Launches a child "server" (separate JVM), which is either primary or replica node */
@SuppressForbidden(reason = "ProcessBuilder requires java.io.File for CWD")
NodeProcess startNode(final int id, Path indexPath, boolean isPrimary, long forcePrimaryVersion) throws IOException {
    nodeTimeStamps[id] = System.nanoTime();
    List<String> cmd = new ArrayList<>();
    NodeProcess curPrimary = primary;
    cmd.add(System.getProperty("java.home") + System.getProperty("file.separator") + "bin" + System.getProperty("file.separator") + "java");
    cmd.add("-Xmx512m");
    if (curPrimary != null) {
        cmd.add("-Dtests.nrtreplication.primaryTCPPort=" + curPrimary.tcpPort);
    } else if (isPrimary == false) {
        // We cannot start a replica when there is no primary:
        return null;
    }
    // This is very costly (takes more time to check than it did to index); we do this ourselves in the end instead of each time a replica
    // is restarted:
    // cmd.add("-Dtests.nrtreplication.checkonclose=true");
    cmd.add("-Dtests.nrtreplication.node=true");
    cmd.add("-Dtests.nrtreplication.nodeid=" + id);
    cmd.add("-Dtests.nrtreplication.startNS=" + Node.globalStartNS);
    cmd.add("-Dtests.nrtreplication.indexpath=" + indexPath);
    if (isPrimary) {
        cmd.add("-Dtests.nrtreplication.isPrimary=true");
        cmd.add("-Dtests.nrtreplication.forcePrimaryVersion=" + forcePrimaryVersion);
        if (DO_CRASH_PRIMARY) {
            cmd.add("-Dtests.nrtreplication.doRandomCrash=true");
        }
        if (DO_CLOSE_PRIMARY) {
            cmd.add("-Dtests.nrtreplication.doRandomClose=true");
        }
    } else {
        if (DO_CRASH_REPLICA) {
            cmd.add("-Dtests.nrtreplication.doRandomCrash=true");
        }
        if (DO_CLOSE_REPLICA) {
            cmd.add("-Dtests.nrtreplication.doRandomClose=true");
        }
    }
    if (DO_BIT_FLIPS_DURING_COPY) {
        cmd.add("-Dtests.nrtreplication.doFlipBitsDuringCopy=true");
    }
    long myPrimaryGen = primaryGen;
    cmd.add("-Dtests.nrtreplication.primaryGen=" + myPrimaryGen);
    // Mixin our own counter because this is called from a fresh thread which means the seed otherwise isn't changing each time we spawn a
    // new node:
    long seed = random().nextLong() * nodeStartCounter.incrementAndGet();
    cmd.add("-Dtests.seed=" + SeedUtils.formatSeed(seed));
    cmd.add("-ea");
    cmd.add("-cp");
    cmd.add(System.getProperty("java.class.path"));
    cmd.add("org.junit.runner.JUnitCore");
    cmd.add(getClass().getName().replace(getClass().getSimpleName(), "SimpleServer"));
    Writer childLog;
    if (SEPARATE_CHILD_OUTPUT) {
        Path childOut = childTempDir.resolve(id + ".log");
        message("logging to " + childOut);
        childLog = Files.newBufferedWriter(childOut, StandardCharsets.UTF_8, StandardOpenOption.APPEND, StandardOpenOption.CREATE);
        childLog.write("\n\nSTART NEW CHILD:\n");
    } else {
        childLog = null;
    }
    //message("child process command: " + cmd);
    ProcessBuilder pb = new ProcessBuilder(cmd);
    pb.redirectErrorStream(true);
    // Important, so that the scary looking hs_err_<pid>.log appear under our test temp dir:
    pb.directory(childTempDir.toFile());
    Process p = pb.start();
    BufferedReader r;
    try {
        r = new BufferedReader(new InputStreamReader(p.getInputStream(), IOUtils.UTF_8));
    } catch (UnsupportedEncodingException uee) {
        throw new RuntimeException(uee);
    }
    int tcpPort = -1;
    long initCommitVersion = -1;
    long initInfosVersion = -1;
    Pattern logTimeStart = Pattern.compile("^[0-9\\.]+s .*");
    boolean willCrash = false;
    while (true) {
        String l = r.readLine();
        if (l == null) {
            message("top: node=" + id + " failed to start");
            try {
                p.waitFor();
            } catch (InterruptedException ie) {
                throw new RuntimeException(ie);
            }
            message("exit value=" + p.exitValue());
            if (p.exitValue() == 0) {
                message("zero exit status; assuming failed to remove segments_N; skipping");
                return null;
            }
            // Hackity hack, in case primary crashed/closed and we haven't noticed (reaped the process) yet:
            if (isPrimary == false) {
                for (int i = 0; i < 100; i++) {
                    NodeProcess primary2 = primary;
                    if (primaryGen != myPrimaryGen || primary2 == null || primary2.nodeIsClosing.get()) {
                        // OK: primary crashed while we were trying to start, so it's expected/allowed that we could not start the replica:
                        message("primary crashed/closed while replica R" + id + " tried to start; skipping");
                        return null;
                    } else {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ie) {
                            throw new ThreadInterruptedException(ie);
                        }
                    }
                }
            }
            // Should fail the test:
            message("top: now fail test replica R" + id + " failed to start");
            failed.set(true);
            throw new RuntimeException("replica R" + id + " failed to start");
        }
        if (childLog != null) {
            childLog.write(l);
            childLog.write("\n");
            childLog.flush();
        } else if (logTimeStart.matcher(l).matches()) {
            // Already a well-formed log output:
            System.out.println(l);
        } else {
            message(l);
        }
        if (l.startsWith("PORT: ")) {
            tcpPort = Integer.parseInt(l.substring(6).trim());
        } else if (l.startsWith("COMMIT VERSION: ")) {
            initCommitVersion = Integer.parseInt(l.substring(16).trim());
        } else if (l.startsWith("INFOS VERSION: ")) {
            initInfosVersion = Integer.parseInt(l.substring(15).trim());
        } else if (l.contains("will crash after")) {
            willCrash = true;
        } else if (l.startsWith("NODE STARTED")) {
            break;
        }
    }
    final boolean finalWillCrash = willCrash;
    final AtomicBoolean nodeIsClosing = new AtomicBoolean();
    // Baby sits the child process, pulling its stdout and printing to our stdout, calling nodeClosed once it exits:
    Thread pumper = ThreadPumper.start(new Runnable() {

        @Override
        public void run() {
            message("now wait for process " + p);
            try {
                p.waitFor();
            } catch (Throwable t) {
                throw new RuntimeException(t);
            }
            message("done wait for process " + p);
            int exitValue = p.exitValue();
            message("exit value=" + exitValue + " willCrash=" + finalWillCrash);
            if (childLog != null) {
                try {
                    childLog.write("process done; exitValue=" + exitValue + "\n");
                    childLog.close();
                } catch (IOException ioe) {
                    throw new RuntimeException(ioe);
                }
            }
            if (exitValue != 0 && finalWillCrash == false && crashingNodes.remove(id) == false) {
                // should fail test
                failed.set(true);
                if (childLog != null) {
                    throw new RuntimeException("node " + id + " process had unexpected non-zero exit status=" + exitValue + "; see " + childLog + " for details");
                } else {
                    throw new RuntimeException("node " + id + " process had unexpected non-zero exit status=" + exitValue);
                }
            }
            nodeClosed(id);
        }
    }, r, System.out, childLog, nodeIsClosing);
    pumper.setName("pump" + id);
    message("top: node=" + id + " started at tcpPort=" + tcpPort + " initCommitVersion=" + initCommitVersion + " initInfosVersion=" + initInfosVersion);
    return new NodeProcess(p, id, tcpPort, pumper, isPrimary, initCommitVersion, initInfosVersion, nodeIsClosing);
}
Also used : Path(java.nio.file.Path) Pattern(java.util.regex.Pattern) InputStreamReader(java.io.InputStreamReader) ArrayList(java.util.ArrayList) UnsupportedEncodingException(java.io.UnsupportedEncodingException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) IOException(java.io.IOException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) BufferedReader(java.io.BufferedReader) Writer(java.io.Writer) SuppressForbidden(org.apache.lucene.util.SuppressForbidden)

Example 7 with ThreadInterruptedException

use of org.apache.lucene.util.ThreadInterruptedException in project elasticsearch by elastic.

the class CancellableThreads method executeIO.

/**
     * run the Interruptable, capturing the executing thread. Concurrent calls to {@link #cancel(String)} will interrupt this thread
     * causing the call to prematurely return.
     *
     * @param interruptable code to run
     */
public void executeIO(IOInterruptable interruptable) throws IOException {
    boolean wasInterrupted = add();
    boolean cancelledByExternalInterrupt = false;
    RuntimeException runtimeException = null;
    IOException ioException = null;
    try {
        interruptable.run();
    } catch (InterruptedException | ThreadInterruptedException e) {
        // ignore, this interrupt has been triggered by us in #cancel()...
        assert cancelled : "Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead";
        // we can only reach here if assertions are disabled. If we reach this code and cancelled is false, this means that we've
        // been interrupted externally (which we don't support).
        cancelledByExternalInterrupt = !cancelled;
    } catch (RuntimeException t) {
        runtimeException = t;
    } catch (IOException e) {
        ioException = e;
    } finally {
        remove();
    }
    // restore old flag and see if we need to fail
    if (wasInterrupted) {
        Thread.currentThread().interrupt();
    } else {
        // clear the flag interrupted flag as we are checking for failure..
        Thread.interrupted();
    }
    synchronized (this) {
        if (isCancelled()) {
            onCancel(reason, ioException != null ? ioException : runtimeException);
        } else if (ioException != null) {
            // if we're not canceling, we throw the original exception
            throw ioException;
        }
        if (runtimeException != null) {
            // if we're not canceling, we throw the original exception
            throw runtimeException;
        }
    }
    if (cancelledByExternalInterrupt) {
        // restore interrupt flag to at least adhere to expected behavior
        Thread.currentThread().interrupt();
        throw new RuntimeException("Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead");
    }
}
Also used : ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) IOException(java.io.IOException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException)

Example 8 with ThreadInterruptedException

use of org.apache.lucene.util.ThreadInterruptedException in project lucene-solr by apache.

the class TestSnapshotDeletionPolicy method runTest.

private void runTest(Random random, Directory dir) throws Exception {
    // Run for ~1 seconds
    final long stopTime = System.currentTimeMillis() + 1000;
    SnapshotDeletionPolicy dp = getDeletionPolicy();
    final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)).setIndexDeletionPolicy(dp).setMaxBufferedDocs(2));
    // Verify we catch misuse:
    expectThrows(IllegalStateException.class, () -> {
        dp.snapshot();
    });
    writer.commit();
    final Thread t = new Thread() {

        @Override
        public void run() {
            Document doc = new Document();
            FieldType customType = new FieldType(TextField.TYPE_STORED);
            customType.setStoreTermVectors(true);
            customType.setStoreTermVectorPositions(true);
            customType.setStoreTermVectorOffsets(true);
            doc.add(newField("content", "aaa", customType));
            do {
                for (int i = 0; i < 27; i++) {
                    try {
                        writer.addDocument(doc);
                    } catch (Throwable t) {
                        t.printStackTrace(System.out);
                        fail("addDocument failed");
                    }
                    if (i % 2 == 0) {
                        try {
                            writer.commit();
                        } catch (Exception e) {
                            throw new RuntimeException(e);
                        }
                    }
                }
                try {
                    Thread.sleep(1);
                } catch (InterruptedException ie) {
                    throw new ThreadInterruptedException(ie);
                }
            } while (System.currentTimeMillis() < stopTime);
        }
    };
    t.start();
    // backups:
    do {
        backupIndex(dir, dp);
        Thread.sleep(20);
    } while (t.isAlive());
    t.join();
    // Add one more document to force writer to commit a
    // final segment, so deletion policy has a chance to
    // delete again:
    Document doc = new Document();
    FieldType customType = new FieldType(TextField.TYPE_STORED);
    customType.setStoreTermVectors(true);
    customType.setStoreTermVectorPositions(true);
    customType.setStoreTermVectorOffsets(true);
    doc.add(newField("content", "aaa", customType));
    writer.addDocument(doc);
    // Make sure we don't have any leftover files in the
    // directory:
    writer.close();
    TestIndexWriter.assertNoUnreferencedFiles(dir, "some files were not deleted but should have been");
}
Also used : ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) Document(org.apache.lucene.document.Document) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) IOException(java.io.IOException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) FieldType(org.apache.lucene.document.FieldType) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer)

Example 9 with ThreadInterruptedException

use of org.apache.lucene.util.ThreadInterruptedException in project lucene-solr by apache.

the class DrillSideways method search.

/** Runs a search, using a {@link CollectorManager} to gather and merge search results */
public <R> ConcurrentDrillSidewaysResult<R> search(final DrillDownQuery query, final CollectorManager<?, R> hitCollectorManager) throws IOException {
    final Map<String, Integer> drillDownDims = query.getDims();
    final List<CallableCollector> callableCollectors = new ArrayList<>(drillDownDims.size() + 1);
    // Add the main DrillDownQuery
    callableCollectors.add(new CallableCollector(-1, searcher, query, new MultiCollectorManager(new FacetsCollectorManager(), hitCollectorManager)));
    int i = 0;
    final Query[] filters = query.getDrillDownQueries();
    for (String dim : drillDownDims.keySet()) callableCollectors.add(new CallableCollector(i++, searcher, getDrillDownQuery(query, filters, dim), new FacetsCollectorManager()));
    final FacetsCollector mainFacetsCollector;
    final FacetsCollector[] facetsCollectors = new FacetsCollector[drillDownDims.size()];
    final R collectorResult;
    try {
        // Run the query pool
        final List<Future<CallableResult>> futures = executor.invokeAll(callableCollectors);
        // Extract the results
        final Object[] mainResults = (Object[]) futures.get(0).get().result;
        mainFacetsCollector = (FacetsCollector) mainResults[0];
        collectorResult = (R) mainResults[1];
        for (i = 1; i < futures.size(); i++) {
            final CallableResult result = futures.get(i).get();
            facetsCollectors[result.pos] = (FacetsCollector) result.result;
        }
        // Fill the null results with the mainFacetsCollector
        for (i = 0; i < facetsCollectors.length; i++) if (facetsCollectors[i] == null)
            facetsCollectors[i] = mainFacetsCollector;
    } catch (InterruptedException e) {
        throw new ThreadInterruptedException(e);
    } catch (ExecutionException e) {
        throw new RuntimeException(e);
    }
    // build the facets and return the result
    return new ConcurrentDrillSidewaysResult<>(buildFacetsResult(mainFacetsCollector, facetsCollectors, drillDownDims.keySet().toArray(new String[drillDownDims.size()])), null, collectorResult);
}
Also used : Query(org.apache.lucene.search.Query) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) ArrayList(java.util.ArrayList) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) MultiCollectorManager(org.apache.lucene.search.MultiCollectorManager) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException)

Example 10 with ThreadInterruptedException

use of org.apache.lucene.util.ThreadInterruptedException in project lucene-solr by apache.

the class DocumentsWriterStallControl method waitIfStalled.

/**
   * Blocks if documents writing is currently in a stalled state. 
   * 
   */
void waitIfStalled() {
    if (stalled) {
        synchronized (this) {
            if (stalled) {
                // don't loop here, higher level logic will re-stall!
                try {
                    incWaiters();
                    // Defensive, in case we have a concurrency bug that fails to .notify/All our thread:
                    // just wait for up to 1 second here, and let caller re-stall if it's still needed:
                    wait(1000);
                    decrWaiters();
                } catch (InterruptedException e) {
                    throw new ThreadInterruptedException(e);
                }
            }
        }
    }
}
Also used : ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException) ThreadInterruptedException(org.apache.lucene.util.ThreadInterruptedException)

Aggregations

ThreadInterruptedException (org.apache.lucene.util.ThreadInterruptedException)17 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)3 CountDownLatch (java.util.concurrent.CountDownLatch)2 ExecutionException (java.util.concurrent.ExecutionException)2 Future (java.util.concurrent.Future)2 SimpleRateLimiter (org.apache.lucene.store.RateLimiter.SimpleRateLimiter)2 BufferedReader (java.io.BufferedReader)1 InputStreamReader (java.io.InputStreamReader)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 Writer (java.io.Writer)1 Path (java.nio.file.Path)1 Callable (java.util.concurrent.Callable)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 AtomicLong (java.util.concurrent.atomic.AtomicLong)1 Pattern (java.util.regex.Pattern)1 MockAnalyzer (org.apache.lucene.analysis.MockAnalyzer)1 Document (org.apache.lucene.document.Document)1 FieldType (org.apache.lucene.document.FieldType)1 LeafReaderContext (org.apache.lucene.index.LeafReaderContext)1