Search in sources :

Example 1 with Function

use of java.util.function.Function in project elasticsearch by elastic.

the class RecoverySourceHandler method phase1.

/**
     * Perform phase1 of the recovery operations. Once this {@link IndexCommit}
     * snapshot has been performed no commit operations (files being fsync'd)
     * are effectively allowed on this index until all recovery phases are done
     * <p>
     * Phase1 examines the segment files on the target node and copies over the
     * segments that are missing. Only segments that have the same size and
     * checksum can be reused
     */
public void phase1(final IndexCommit snapshot, final Translog.View translogView) {
    cancellableThreads.checkForCancel();
    // Total size of segment files that are recovered
    long totalSize = 0;
    // Total size of segment files that were able to be re-used
    long existingTotalSize = 0;
    final Store store = shard.store();
    store.incRef();
    try {
        StopWatch stopWatch = new StopWatch().start();
        final Store.MetadataSnapshot recoverySourceMetadata;
        try {
            recoverySourceMetadata = store.getMetadata(snapshot);
        } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
            shard.failShard("recovery", ex);
            throw ex;
        }
        for (String name : snapshot.getFileNames()) {
            final StoreFileMetaData md = recoverySourceMetadata.get(name);
            if (md == null) {
                logger.info("Snapshot differs from actual index for file: {} meta: {}", name, recoverySourceMetadata.asMap());
                throw new CorruptIndexException("Snapshot differs from actual index - maybe index was removed metadata has " + recoverySourceMetadata.asMap().size() + " files", name);
            }
        }
        // Generate a "diff" of all the identical, different, and missing
        // segment files on the target node, using the existing files on
        // the source node
        String recoverySourceSyncId = recoverySourceMetadata.getSyncId();
        String recoveryTargetSyncId = request.metadataSnapshot().getSyncId();
        final boolean recoverWithSyncId = recoverySourceSyncId != null && recoverySourceSyncId.equals(recoveryTargetSyncId);
        if (recoverWithSyncId) {
            final long numDocsTarget = request.metadataSnapshot().getNumDocs();
            final long numDocsSource = recoverySourceMetadata.getNumDocs();
            if (numDocsTarget != numDocsSource) {
                throw new IllegalStateException("try to recover " + request.shardId() + " from primary shard with sync id but number " + "of docs differ: " + numDocsSource + " (" + request.sourceNode().getName() + ", primary) vs " + numDocsTarget + "(" + request.targetNode().getName() + ")");
            }
            // we shortcut recovery here because we have nothing to copy. but we must still start the engine on the target.
            // so we don't return here
            logger.trace("skipping [phase1]- identical sync id [{}] found on both source and target", recoverySourceSyncId);
        } else {
            final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(request.metadataSnapshot());
            for (StoreFileMetaData md : diff.identical) {
                response.phase1ExistingFileNames.add(md.name());
                response.phase1ExistingFileSizes.add(md.length());
                existingTotalSize += md.length();
                if (logger.isTraceEnabled()) {
                    logger.trace("recovery [phase1]: not recovering [{}], exist in local store and has checksum [{}]," + " size [{}]", md.name(), md.checksum(), md.length());
                }
                totalSize += md.length();
            }
            List<StoreFileMetaData> phase1Files = new ArrayList<>(diff.different.size() + diff.missing.size());
            phase1Files.addAll(diff.different);
            phase1Files.addAll(diff.missing);
            for (StoreFileMetaData md : phase1Files) {
                if (request.metadataSnapshot().asMap().containsKey(md.name())) {
                    logger.trace("recovery [phase1]: recovering [{}], exists in local store, but is different: remote [{}], local [{}]", md.name(), request.metadataSnapshot().asMap().get(md.name()), md);
                } else {
                    logger.trace("recovery [phase1]: recovering [{}], does not exist in remote", md.name());
                }
                response.phase1FileNames.add(md.name());
                response.phase1FileSizes.add(md.length());
                totalSize += md.length();
            }
            response.phase1TotalSize = totalSize;
            response.phase1ExistingTotalSize = existingTotalSize;
            logger.trace("recovery [phase1]: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]", response.phase1FileNames.size(), new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize));
            cancellableThreads.execute(() -> recoveryTarget.receiveFileInfo(response.phase1FileNames, response.phase1FileSizes, response.phase1ExistingFileNames, response.phase1ExistingFileSizes, translogView.totalOperations()));
            // How many bytes we've copied since we last called RateLimiter.pause
            final Function<StoreFileMetaData, OutputStream> outputStreamFactories = md -> new BufferedOutputStream(new RecoveryOutputStream(md, translogView), chunkSizeInBytes);
            sendFiles(store, phase1Files.toArray(new StoreFileMetaData[phase1Files.size()]), outputStreamFactories);
            // are deleted
            try {
                cancellableThreads.executeIO(() -> recoveryTarget.cleanFiles(translogView.totalOperations(), recoverySourceMetadata));
            } catch (RemoteTransportException | IOException targetException) {
                final IOException corruptIndexException;
                //   - maybe due to old segments without checksums or length only checks
                if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(targetException)) != null) {
                    try {
                        final Store.MetadataSnapshot recoverySourceMetadata1 = store.getMetadata(snapshot);
                        StoreFileMetaData[] metadata = StreamSupport.stream(recoverySourceMetadata1.spliterator(), false).toArray(size -> new StoreFileMetaData[size]);
                        ArrayUtil.timSort(metadata, (o1, o2) -> {
                            // check small files first
                            return Long.compare(o1.length(), o2.length());
                        });
                        for (StoreFileMetaData md : metadata) {
                            cancellableThreads.checkForCancel();
                            logger.debug("checking integrity for file {} after remove corruption exception", md);
                            if (store.checkIntegrityNoException(md) == false) {
                                // we are corrupted on the primary -- fail!
                                shard.failShard("recovery", corruptIndexException);
                                logger.warn("Corrupted file detected {} checksum mismatch", md);
                                throw corruptIndexException;
                            }
                        }
                    } catch (IOException ex) {
                        targetException.addSuppressed(ex);
                        throw targetException;
                    }
                    // corruption has happened on the way to replica
                    RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but " + "checksums are ok", null);
                    exception.addSuppressed(targetException);
                    logger.warn((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage("{} Remote file corruption during finalization of recovery on node {}. local checksum OK", shard.shardId(), request.targetNode()), corruptIndexException);
                    throw exception;
                } else {
                    throw targetException;
                }
            }
        }
        logger.trace("recovery [phase1]: took [{}]", stopWatch.totalTime());
        response.phase1Time = stopWatch.totalTime().millis();
    } catch (Exception e) {
        throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e);
    } finally {
        store.decRef();
    }
}
Also used : IndexCommit(org.apache.lucene.index.IndexCommit) CancellableThreads(org.elasticsearch.common.util.CancellableThreads) IndexShardClosedException(org.elasticsearch.index.shard.IndexShardClosedException) IndexFormatTooNewException(org.apache.lucene.index.IndexFormatTooNewException) Nullable(org.elasticsearch.common.Nullable) IndexShardRelocatedException(org.elasticsearch.index.shard.IndexShardRelocatedException) RecoveryEngineException(org.elasticsearch.index.engine.RecoveryEngineException) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) Function(java.util.function.Function) Supplier(java.util.function.Supplier) CorruptIndexException(org.apache.lucene.index.CorruptIndexException) BufferedOutputStream(java.io.BufferedOutputStream) ArrayList(java.util.ArrayList) BytesArray(org.elasticsearch.common.bytes.BytesArray) StoreFileMetaData(org.elasticsearch.index.store.StoreFileMetaData) Settings(org.elasticsearch.common.settings.Settings) SequenceNumbersService(org.elasticsearch.index.seqno.SequenceNumbersService) Store(org.elasticsearch.index.store.Store) LocalCheckpointTracker(org.elasticsearch.index.seqno.LocalCheckpointTracker) Streams(org.elasticsearch.common.io.Streams) StreamSupport(java.util.stream.StreamSupport) IOContext(org.apache.lucene.store.IOContext) Releasable(org.elasticsearch.common.lease.Releasable) Loggers(org.elasticsearch.common.logging.Loggers) ByteSizeValue(org.elasticsearch.common.unit.ByteSizeValue) OutputStream(java.io.OutputStream) ArrayUtil(org.apache.lucene.util.ArrayUtil) IndexShardState(org.elasticsearch.index.shard.IndexShardState) IndexInput(org.apache.lucene.store.IndexInput) IndexShard(org.elasticsearch.index.shard.IndexShard) IOUtils(org.apache.lucene.util.IOUtils) IOException(java.io.IOException) StopWatch(org.elasticsearch.common.StopWatch) IndexFormatTooOldException(org.apache.lucene.index.IndexFormatTooOldException) ExceptionsHelper(org.elasticsearch.ExceptionsHelper) RemoteTransportException(org.elasticsearch.transport.RemoteTransportException) List(java.util.List) Logger(org.apache.logging.log4j.Logger) InputStreamIndexInput(org.elasticsearch.common.lucene.store.InputStreamIndexInput) Translog(org.elasticsearch.index.translog.Translog) RateLimiter(org.apache.lucene.store.RateLimiter) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) ByteSizeValue(org.elasticsearch.common.unit.ByteSizeValue) Store(org.elasticsearch.index.store.Store) IndexFormatTooOldException(org.apache.lucene.index.IndexFormatTooOldException) StoreFileMetaData(org.elasticsearch.index.store.StoreFileMetaData) Supplier(java.util.function.Supplier) BufferedOutputStream(java.io.BufferedOutputStream) RemoteTransportException(org.elasticsearch.transport.RemoteTransportException) CorruptIndexException(org.apache.lucene.index.CorruptIndexException) IOException(java.io.IOException) IndexShardClosedException(org.elasticsearch.index.shard.IndexShardClosedException) IndexFormatTooNewException(org.apache.lucene.index.IndexFormatTooNewException) IndexShardRelocatedException(org.elasticsearch.index.shard.IndexShardRelocatedException) RecoveryEngineException(org.elasticsearch.index.engine.RecoveryEngineException) CorruptIndexException(org.apache.lucene.index.CorruptIndexException) IOException(java.io.IOException) IndexFormatTooOldException(org.apache.lucene.index.IndexFormatTooOldException) RemoteTransportException(org.elasticsearch.transport.RemoteTransportException) StopWatch(org.elasticsearch.common.StopWatch) ParameterizedMessage(org.apache.logging.log4j.message.ParameterizedMessage) IndexFormatTooNewException(org.apache.lucene.index.IndexFormatTooNewException)

Example 2 with Function

use of java.util.function.Function in project elasticsearch by elastic.

the class ShardSearchRequest method parseAliasFilter.

/**
     * Returns the filter associated with listed filtering aliases.
     * <p>
     * The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
     * Returns <tt>null</tt> if no filtering is required.</p>
     */
static QueryBuilder parseAliasFilter(CheckedFunction<byte[], QueryBuilder, IOException> filterParser, IndexMetaData metaData, String... aliasNames) {
    if (aliasNames == null || aliasNames.length == 0) {
        return null;
    }
    Index index = metaData.getIndex();
    ImmutableOpenMap<String, AliasMetaData> aliases = metaData.getAliases();
    Function<AliasMetaData, QueryBuilder> parserFunction = (alias) -> {
        if (alias.filter() == null) {
            return null;
        }
        try {
            return filterParser.apply(alias.filter().uncompressed());
        } catch (IOException ex) {
            throw new AliasFilterParsingException(index, alias.getAlias(), "Invalid alias filter", ex);
        }
    };
    if (aliasNames.length == 1) {
        AliasMetaData alias = aliases.get(aliasNames[0]);
        if (alias == null) {
            // This shouldn't happen unless alias disappeared after filteringAliases was called.
            throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter");
        }
        return parserFunction.apply(alias);
    } else {
        // we need to bench here a bit, to see maybe it makes sense to use OrFilter
        BoolQueryBuilder combined = new BoolQueryBuilder();
        for (String aliasName : aliasNames) {
            AliasMetaData alias = aliases.get(aliasName);
            if (alias == null) {
                // This shouldn't happen unless alias disappeared after filteringAliases was called.
                throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter");
            }
            QueryBuilder parsedFilter = parserFunction.apply(alias);
            if (parsedFilter != null) {
                combined.should(parsedFilter);
            } else {
                // The filter might be null only if filter was removed after filteringAliases was called
                return null;
            }
        }
        return combined;
    }
}
Also used : ShardId(org.elasticsearch.index.shard.ShardId) QueryBuilder(org.elasticsearch.index.query.QueryBuilder) SearchType(org.elasticsearch.action.search.SearchType) ImmutableOpenMap(org.elasticsearch.common.collect.ImmutableOpenMap) QueryShardContext(org.elasticsearch.index.query.QueryShardContext) AliasMetaData(org.elasticsearch.cluster.metadata.AliasMetaData) Scroll(org.elasticsearch.search.Scroll) IOException(java.io.IOException) Index(org.elasticsearch.index.Index) BytesReference(org.elasticsearch.common.bytes.BytesReference) Function(java.util.function.Function) CheckedFunction(org.elasticsearch.common.CheckedFunction) IndexMetaData(org.elasticsearch.cluster.metadata.IndexMetaData) AliasFilterParsingException(org.elasticsearch.indices.AliasFilterParsingException) InvalidAliasNameException(org.elasticsearch.indices.InvalidAliasNameException) SearchSourceBuilder(org.elasticsearch.search.builder.SearchSourceBuilder) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) AliasMetaData(org.elasticsearch.cluster.metadata.AliasMetaData) AliasFilterParsingException(org.elasticsearch.indices.AliasFilterParsingException) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) Index(org.elasticsearch.index.Index) IOException(java.io.IOException) QueryBuilder(org.elasticsearch.index.query.QueryBuilder) BoolQueryBuilder(org.elasticsearch.index.query.BoolQueryBuilder) InvalidAliasNameException(org.elasticsearch.indices.InvalidAliasNameException)

Example 3 with Function

use of java.util.function.Function in project elasticsearch by elastic.

the class QueryShardContext method getLazySearchScript.

/**
     * Returns a lazily created {@link SearchScript} that is compiled immediately but can be pulled later once all
     * parameters are available.
     */
public final Function<Map<String, Object>, SearchScript> getLazySearchScript(Script script, ScriptContext context) {
    failIfFrozen();
    CompiledScript compile = scriptService.compile(script, context);
    return (p) -> scriptService.search(lookup(), compile, p);
}
Also used : CompiledScript(org.elasticsearch.script.CompiledScript) Query(org.apache.lucene.search.Query) BitsetFilterCache(org.elasticsearch.index.cache.bitset.BitsetFilterCache) Arrays(java.util.Arrays) LongSupplier(java.util.function.LongSupplier) ParsingException(org.elasticsearch.common.ParsingException) IndexFieldDataService(org.elasticsearch.index.fielddata.IndexFieldDataService) IndexAnalyzers(org.elasticsearch.index.analysis.IndexAnalyzers) HashMap(java.util.HashMap) Index(org.elasticsearch.index.Index) MapperQueryParser(org.apache.lucene.queryparser.classic.MapperQueryParser) Function(java.util.function.Function) ScriptContext(org.elasticsearch.script.ScriptContext) Strings(org.elasticsearch.common.Strings) NestedScope(org.elasticsearch.index.query.support.NestedScope) BitSetProducer(org.apache.lucene.search.join.BitSetProducer) MappedFieldType(org.elasticsearch.index.mapper.MappedFieldType) Similarity(org.apache.lucene.search.similarities.Similarity) Map(java.util.Map) IndexSettings(org.elasticsearch.index.IndexSettings) QueryParserSettings(org.apache.lucene.queryparser.classic.QueryParserSettings) NamedXContentRegistry(org.elasticsearch.common.xcontent.NamedXContentRegistry) DocumentMapper(org.elasticsearch.index.mapper.DocumentMapper) SearchScript(org.elasticsearch.script.SearchScript) SearchLookup(org.elasticsearch.search.lookup.SearchLookup) ObjectMapper(org.elasticsearch.index.mapper.ObjectMapper) Script(org.elasticsearch.script.Script) SetOnce(org.apache.lucene.util.SetOnce) Mapper(org.elasticsearch.index.mapper.Mapper) Analyzer(org.apache.lucene.analysis.Analyzer) Client(org.elasticsearch.client.Client) Collection(java.util.Collection) IOException(java.io.IOException) TextFieldMapper(org.elasticsearch.index.mapper.TextFieldMapper) BytesReference(org.elasticsearch.common.bytes.BytesReference) SimilarityService(org.elasticsearch.index.similarity.SimilarityService) CheckedFunction(org.elasticsearch.common.CheckedFunction) MapperService(org.elasticsearch.index.mapper.MapperService) Version(org.elasticsearch.Version) CompiledScript(org.elasticsearch.script.CompiledScript) IndexFieldData(org.elasticsearch.index.fielddata.IndexFieldData) Queries(org.elasticsearch.common.lucene.search.Queries) Collections.unmodifiableMap(java.util.Collections.unmodifiableMap) ExecutableScript(org.elasticsearch.script.ExecutableScript) ContentPath(org.elasticsearch.index.mapper.ContentPath) ScriptService(org.elasticsearch.script.ScriptService) IndexReader(org.apache.lucene.index.IndexReader)

Example 4 with Function

use of java.util.function.Function in project vert.x by eclipse.

the class MetricsContextTest method testHttpClientWebsocket.

private void testHttpClientWebsocket(Function<Vertx, Context> contextFactory, BiConsumer<Thread, Context> checker) throws Exception {
    AtomicReference<Thread> expectedThread = new AtomicReference<>();
    AtomicReference<Context> expectedContext = new AtomicReference<>();
    AtomicBoolean websocketConnected = new AtomicBoolean();
    AtomicBoolean websocketDisconnected = new AtomicBoolean();
    AtomicBoolean socketConnectedCalled = new AtomicBoolean();
    AtomicBoolean socketDisconnectedCalled = new AtomicBoolean();
    AtomicBoolean bytesReadCalled = new AtomicBoolean();
    AtomicBoolean bytesWrittenCalled = new AtomicBoolean();
    AtomicBoolean closeCalled = new AtomicBoolean();
    VertxMetricsFactory factory = (vertx, options) -> new DummyVertxMetrics() {

        @Override
        public HttpClientMetrics createMetrics(HttpClient client, HttpClientOptions options) {
            return new DummyHttpClientMetrics() {

                @Override
                public Void connected(Void endpointMetric, Void socketMetric, WebSocket webSocket) {
                    websocketConnected.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                    return null;
                }

                @Override
                public void disconnected(Void webSocketMetric) {
                    websocketDisconnected.set(true);
                }

                @Override
                public Void connected(SocketAddress remoteAddress, String remoteName) {
                    socketConnectedCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                    return null;
                }

                @Override
                public void disconnected(Void socketMetric, SocketAddress remoteAddress) {
                    socketDisconnectedCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public void bytesRead(Void socketMetric, SocketAddress remoteAddress, long numberOfBytes) {
                    bytesReadCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public void bytesWritten(Void socketMetric, SocketAddress remoteAddress, long numberOfBytes) {
                    bytesWrittenCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public void close() {
                    closeCalled.set(true);
                }

                @Override
                public boolean isEnabled() {
                    return true;
                }
            };
        }
    };
    Vertx vertx = vertx(new VertxOptions().setMetricsOptions(new MetricsOptions().setEnabled(true).setFactory(factory)));
    HttpServer server = vertx.createHttpServer();
    server.websocketHandler(ws -> {
        ws.handler(buf -> {
            ws.write(Buffer.buffer("bye"));
        });
    });
    CountDownLatch latch = new CountDownLatch(1);
    server.listen(8080, "localhost", onSuccess(s -> {
        latch.countDown();
    }));
    awaitLatch(latch);
    Context ctx = contextFactory.apply(vertx);
    ctx.runOnContext(v1 -> {
        expectedThread.set(Thread.currentThread());
        expectedContext.set(Vertx.currentContext());
        HttpClient client = vertx.createHttpClient();
        checker.accept(expectedThread.get(), expectedContext.get());
        client.websocket(8080, "localhost", "/", ws -> {
            ws.handler(buf -> {
                ws.closeHandler(v2 -> {
                    executeInVanillaThread(() -> {
                        client.close();
                        vertx.close(v3 -> {
                            assertTrue(websocketConnected.get());
                            assertTrue(websocketDisconnected.get());
                            assertTrue(socketConnectedCalled.get());
                            assertTrue(socketDisconnectedCalled.get());
                            assertTrue(bytesReadCalled.get());
                            assertTrue(bytesWrittenCalled.get());
                            assertTrue(closeCalled.get());
                            testComplete();
                        });
                    });
                });
                ws.close();
            });
            ws.write(Buffer.buffer("hello"));
        });
    });
    await();
}
Also used : io.vertx.core(io.vertx.core) DatagramSocket(io.vertx.core.datagram.DatagramSocket) VertxMetricsFactory(io.vertx.core.spi.VertxMetricsFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Test(org.junit.Test) DummyVertxMetrics(io.vertx.core.metrics.impl.DummyVertxMetrics) io.vertx.core.net(io.vertx.core.net) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) io.vertx.core.http(io.vertx.core.http) CountDownLatch(java.util.concurrent.CountDownLatch) EventBus(io.vertx.core.eventbus.EventBus) Buffer(io.vertx.core.buffer.Buffer) MetricsOptions(io.vertx.core.metrics.MetricsOptions) Ignore(org.junit.Ignore) BiConsumer(java.util.function.BiConsumer) io.vertx.core.spi.metrics(io.vertx.core.spi.metrics) DatagramSocketOptions(io.vertx.core.datagram.DatagramSocketOptions) MessageConsumer(io.vertx.core.eventbus.MessageConsumer) MetricsOptions(io.vertx.core.metrics.MetricsOptions) AtomicReference(java.util.concurrent.atomic.AtomicReference) DummyVertxMetrics(io.vertx.core.metrics.impl.DummyVertxMetrics) CountDownLatch(java.util.concurrent.CountDownLatch) VertxMetricsFactory(io.vertx.core.spi.VertxMetricsFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean)

Example 5 with Function

use of java.util.function.Function in project vert.x by eclipse.

the class MetricsContextTest method testHttpServerWebsocket.

private void testHttpServerWebsocket(Function<Vertx, Context> contextFactory, BiConsumer<Thread, Context> checker) throws Exception {
    AtomicReference<Thread> expectedThread = new AtomicReference<>();
    AtomicReference<Context> expectedContext = new AtomicReference<>();
    AtomicBoolean websocketConnected = new AtomicBoolean();
    AtomicBoolean websocketDisconnected = new AtomicBoolean();
    AtomicBoolean socketConnectedCalled = new AtomicBoolean();
    AtomicBoolean socketDisconnectedCalled = new AtomicBoolean();
    AtomicBoolean bytesReadCalled = new AtomicBoolean();
    AtomicBoolean bytesWrittenCalled = new AtomicBoolean();
    AtomicBoolean closeCalled = new AtomicBoolean();
    VertxMetricsFactory factory = (vertx, options) -> new DummyVertxMetrics() {

        @Override
        public HttpServerMetrics createMetrics(HttpServer server, SocketAddress localAddress, HttpServerOptions options) {
            return new DummyHttpServerMetrics() {

                @Override
                public Void connected(Void socketMetric, ServerWebSocket serverWebSocket) {
                    websocketConnected.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                    return null;
                }

                @Override
                public void disconnected(Void serverWebSocketMetric) {
                    websocketDisconnected.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public Void connected(SocketAddress remoteAddress, String remoteName) {
                    socketConnectedCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                    return null;
                }

                @Override
                public void disconnected(Void socketMetric, SocketAddress remoteAddress) {
                    socketDisconnectedCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public void bytesRead(Void socketMetric, SocketAddress remoteAddress, long numberOfBytes) {
                    bytesReadCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public void bytesWritten(Void socketMetric, SocketAddress remoteAddress, long numberOfBytes) {
                    bytesWrittenCalled.set(true);
                    checker.accept(expectedThread.get(), expectedContext.get());
                }

                @Override
                public boolean isEnabled() {
                    return true;
                }

                @Override
                public void close() {
                    closeCalled.set(true);
                }
            };
        }
    };
    CountDownLatch latch = new CountDownLatch(1);
    Vertx vertx = vertx(new VertxOptions().setMetricsOptions(new MetricsOptions().setEnabled(true).setFactory(factory)));
    Context ctx = contextFactory.apply(vertx);
    ctx.runOnContext(v1 -> {
        HttpServer server = vertx.createHttpServer().websocketHandler(ws -> {
            ws.handler(buf -> {
                ws.write(Buffer.buffer("bye"));
            });
        });
        server.listen(8080, "localhost", onSuccess(s -> {
            expectedThread.set(Thread.currentThread());
            expectedContext.set(Vertx.currentContext());
            latch.countDown();
        }));
    });
    awaitLatch(latch);
    HttpClient client = vertx.createHttpClient();
    client.websocket(8080, "localhost", "/", ws -> {
        ws.handler(buf -> {
            ws.closeHandler(v -> {
                vertx.close(v4 -> {
                    assertTrue(websocketConnected.get());
                    assertTrue(websocketDisconnected.get());
                    assertTrue(bytesReadCalled.get());
                    assertTrue(bytesWrittenCalled.get());
                    assertTrue(socketConnectedCalled.get());
                    assertTrue(socketDisconnectedCalled.get());
                    assertTrue(closeCalled.get());
                    testComplete();
                });
            });
            ws.close();
        });
        ws.write(Buffer.buffer("hello"));
    });
    await();
}
Also used : io.vertx.core(io.vertx.core) DatagramSocket(io.vertx.core.datagram.DatagramSocket) VertxMetricsFactory(io.vertx.core.spi.VertxMetricsFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Test(org.junit.Test) DummyVertxMetrics(io.vertx.core.metrics.impl.DummyVertxMetrics) io.vertx.core.net(io.vertx.core.net) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) io.vertx.core.http(io.vertx.core.http) CountDownLatch(java.util.concurrent.CountDownLatch) EventBus(io.vertx.core.eventbus.EventBus) Buffer(io.vertx.core.buffer.Buffer) MetricsOptions(io.vertx.core.metrics.MetricsOptions) Ignore(org.junit.Ignore) BiConsumer(java.util.function.BiConsumer) io.vertx.core.spi.metrics(io.vertx.core.spi.metrics) DatagramSocketOptions(io.vertx.core.datagram.DatagramSocketOptions) MessageConsumer(io.vertx.core.eventbus.MessageConsumer) MetricsOptions(io.vertx.core.metrics.MetricsOptions) AtomicReference(java.util.concurrent.atomic.AtomicReference) DummyVertxMetrics(io.vertx.core.metrics.impl.DummyVertxMetrics) CountDownLatch(java.util.concurrent.CountDownLatch) VertxMetricsFactory(io.vertx.core.spi.VertxMetricsFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean)

Aggregations

Function (java.util.function.Function)1261 List (java.util.List)606 Map (java.util.Map)447 ArrayList (java.util.ArrayList)416 Test (org.junit.Test)358 Collectors (java.util.stream.Collectors)324 HashMap (java.util.HashMap)287 Collections (java.util.Collections)284 Arrays (java.util.Arrays)271 Set (java.util.Set)255 IOException (java.io.IOException)252 Collection (java.util.Collection)192 HashSet (java.util.HashSet)191 TimeUnit (java.util.concurrent.TimeUnit)174 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)158 Optional (java.util.Optional)157 Assert (org.junit.Assert)137 Consumer (java.util.function.Consumer)134 Supplier (java.util.function.Supplier)126 CompletableFuture (java.util.concurrent.CompletableFuture)121