Search in sources :

Example 1 with Stream

use of java.util.stream.Stream in project camel by apache.

the class CdiCamelExtension method beans.

private void beans(@Observes ProcessBean<?> pb, BeanManager manager) {
    cdiBeans.add(pb.getBean());
    // Lookup for CDI event endpoint injection points
    pb.getBean().getInjectionPoints().stream().filter(ip -> CdiEventEndpoint.class.equals(getRawType(ip.getType()))).forEach(ip -> {
        Type type = ip.getType() instanceof ParameterizedType ? ((ParameterizedType) ip.getType()).getActualTypeArguments()[0] : Object.class;
        String uri = eventEndpointUri(type, ip.getQualifiers());
        cdiEventEndpoints.put(uri, new CdiEventEndpoint<>(uri, type, ip.getQualifiers(), manager));
    });
}
Also used : CdiSpiHelper.isAnnotationType(org.apache.camel.cdi.CdiSpiHelper.isAnnotationType) Produces(javax.enterprise.inject.Produces) AfterBeanDiscovery(javax.enterprise.inject.spi.AfterBeanDiscovery) LoggerFactory(org.slf4j.LoggerFactory) ProcessBean(javax.enterprise.inject.spi.ProcessBean) Endpoint(org.apache.camel.Endpoint) PropertyInject(org.apache.camel.PropertyInject) EventObject(java.util.EventObject) ProcessProducerField(javax.enterprise.inject.spi.ProcessProducerField) ProcessObserverMethod(javax.enterprise.inject.spi.ProcessObserverMethod) Map(java.util.Map) Observes(javax.enterprise.event.Observes) ProducerTemplate(org.apache.camel.ProducerTemplate) ANY(org.apache.camel.cdi.AnyLiteral.ANY) Method(java.lang.reflect.Method) DefaultCamelContext(org.apache.camel.impl.DefaultCamelContext) Collectors.toSet(java.util.stream.Collectors.toSet) CamelContextAware(org.apache.camel.CamelContextAware) Annotated(javax.enterprise.inject.spi.Annotated) Extension(javax.enterprise.inject.spi.Extension) CdiSpiHelper.hasType(org.apache.camel.cdi.CdiSpiHelper.hasType) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ProcessAnnotatedType(javax.enterprise.inject.spi.ProcessAnnotatedType) BeanManagerHelper.getReference(org.apache.camel.cdi.BeanManagerHelper.getReference) Set(java.util.Set) ResourceHelper.getResource(org.apache.camel.cdi.ResourceHelper.getResource) CdiSpiHelper.getRawType(org.apache.camel.cdi.CdiSpiHelper.getRawType) ProcessProducerMethod(javax.enterprise.inject.spi.ProcessProducerMethod) BeanInject(org.apache.camel.BeanInject) Stream(java.util.stream.Stream) Type(java.lang.reflect.Type) CdiEventEndpoint.eventEndpointUri(org.apache.camel.cdi.CdiEventEndpoint.eventEndpointUri) Annotation(java.lang.annotation.Annotation) ConsumerTemplate(org.apache.camel.ConsumerTemplate) EXCLUDED(org.apache.camel.cdi.Excluded.EXCLUDED) Bean(javax.enterprise.inject.spi.Bean) RoutesBuilder(org.apache.camel.RoutesBuilder) EndpointInject(org.apache.camel.EndpointInject) Component(org.apache.camel.Component) AbstractExchangeEvent(org.apache.camel.management.event.AbstractExchangeEvent) Collectors.collectingAndThen(java.util.stream.Collectors.collectingAndThen) AnnotatedType(javax.enterprise.inject.spi.AnnotatedType) Produce(org.apache.camel.Produce) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ProcessProducer(javax.enterprise.inject.spi.ProcessProducer) Collections.newSetFromMap(java.util.Collections.newSetFromMap) APPLICATION_SCOPED(org.apache.camel.cdi.ApplicationScopedLiteral.APPLICATION_SCOPED) BeanManagerHelper.getReferencesByType(org.apache.camel.cdi.BeanManagerHelper.getReferencesByType) FluentProducerTemplate(org.apache.camel.FluentProducerTemplate) AfterDeploymentValidation(javax.enterprise.inject.spi.AfterDeploymentValidation) Stream.concat(java.util.stream.Stream.concat) Named(javax.inject.Named) DEFAULT(org.apache.camel.cdi.DefaultLiteral.DEFAULT) ServiceStatus(org.apache.camel.ServiceStatus) CamelContext(org.apache.camel.CamelContext) Default(javax.enterprise.inject.Default) Logger(org.slf4j.Logger) CdiSpiHelper.getQualifiers(org.apache.camel.cdi.CdiSpiHelper.getQualifiers) Consume(org.apache.camel.Consume) Predicate.isEqual(java.util.function.Predicate.isEqual) STARTUP(org.apache.camel.cdi.Startup.Literal.STARTUP) InjectionException(javax.enterprise.inject.InjectionException) Converter(org.apache.camel.Converter) ProcessInjectionTarget(javax.enterprise.inject.spi.ProcessInjectionTarget) TypeConverter(org.apache.camel.TypeConverter) ParameterizedType(java.lang.reflect.ParameterizedType) RouteContainer(org.apache.camel.model.RouteContainer) CdiSpiHelper.hasAnnotation(org.apache.camel.cdi.CdiSpiHelper.hasAnnotation) InjectionPoint(javax.enterprise.inject.spi.InjectionPoint) BeanManager(javax.enterprise.inject.spi.BeanManager) ParameterizedType(java.lang.reflect.ParameterizedType) CdiSpiHelper.isAnnotationType(org.apache.camel.cdi.CdiSpiHelper.isAnnotationType) CdiSpiHelper.hasType(org.apache.camel.cdi.CdiSpiHelper.hasType) ProcessAnnotatedType(javax.enterprise.inject.spi.ProcessAnnotatedType) CdiSpiHelper.getRawType(org.apache.camel.cdi.CdiSpiHelper.getRawType) Type(java.lang.reflect.Type) AnnotatedType(javax.enterprise.inject.spi.AnnotatedType) BeanManagerHelper.getReferencesByType(org.apache.camel.cdi.BeanManagerHelper.getReferencesByType) ParameterizedType(java.lang.reflect.ParameterizedType)

Example 2 with Stream

use of java.util.stream.Stream in project hbase by apache.

the class AsyncBatchRpcRetryingCaller method send.

private void send(Map<ServerName, ServerRequest> actionsByServer, int tries) {
    long remainingNs;
    if (operationTimeoutNs > 0) {
        remainingNs = remainingTimeNs();
        if (remainingNs <= 0) {
            failAll(actionsByServer.values().stream().flatMap(m -> m.actionsByRegion.values().stream()).flatMap(r -> r.actions.stream()), tries);
            return;
        }
    } else {
        remainingNs = Long.MAX_VALUE;
    }
    actionsByServer.forEach((sn, serverReq) -> {
        ClientService.Interface stub;
        try {
            stub = conn.getRegionServerStub(sn);
        } catch (IOException e) {
            onError(serverReq.actionsByRegion, tries, e, sn);
            return;
        }
        ClientProtos.MultiRequest req;
        List<CellScannable> cells = new ArrayList<>();
        try {
            req = buildReq(serverReq.actionsByRegion, cells);
        } catch (IOException e) {
            onError(serverReq.actionsByRegion, tries, e, sn);
            return;
        }
        HBaseRpcController controller = conn.rpcControllerFactory.newController();
        resetController(controller, Math.min(rpcTimeoutNs, remainingNs));
        if (!cells.isEmpty()) {
            controller.setCellScanner(createCellScanner(cells));
        }
        stub.multi(controller, req, resp -> {
            if (controller.failed()) {
                onError(serverReq.actionsByRegion, tries, controller.getFailed(), sn);
            } else {
                try {
                    onComplete(serverReq.actionsByRegion, tries, sn, ResponseConverter.getResults(req, resp, controller.cellScanner()));
                } catch (Exception e) {
                    onError(serverReq.actionsByRegion, tries, e, sn);
                    return;
                }
            }
        });
    });
}
Also used : ConnectionUtils.getPauseTime(org.apache.hadoop.hbase.client.ConnectionUtils.getPauseTime) ResponseConverter(org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter) CompletableFuture(java.util.concurrent.CompletableFuture) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) ConnectionUtils.translateException(org.apache.hadoop.hbase.client.ConnectionUtils.translateException) RegionResult(org.apache.hadoop.hbase.client.MultiResponse.RegionResult) Map(java.util.Map) ServerName(org.apache.hadoop.hbase.ServerName) Bytes(org.apache.hadoop.hbase.util.Bytes) CellScannable(org.apache.hadoop.hbase.CellScannable) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TableName(org.apache.hadoop.hbase.TableName) IdentityHashMap(java.util.IdentityHashMap) CellUtil.createCellScanner(org.apache.hadoop.hbase.CellUtil.createCellScanner) ThrowableWithExtraContext(org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) IOException(java.io.IOException) HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) CollectionUtils.computeIfAbsent(org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent) Collectors(java.util.stream.Collectors) RequestConverter(org.apache.hadoop.hbase.shaded.protobuf.RequestConverter) TimeUnit(java.util.concurrent.TimeUnit) ConnectionUtils.resetController(org.apache.hadoop.hbase.client.ConnectionUtils.resetController) List(java.util.List) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap) Stream(java.util.stream.Stream) RegionSpecifierType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) SLEEP_DELTA_NS(org.apache.hadoop.hbase.client.ConnectionUtils.SLEEP_DELTA_NS) EnvironmentEdgeManager(org.apache.hadoop.hbase.util.EnvironmentEdgeManager) HashedWheelTimer(io.netty.util.HashedWheelTimer) Optional(java.util.Optional) Log(org.apache.commons.logging.Log) LogFactory(org.apache.commons.logging.LogFactory) InterfaceAudience(org.apache.hadoop.hbase.classification.InterfaceAudience) ClientService(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService) Collections(java.util.Collections) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) CellScannable(org.apache.hadoop.hbase.CellScannable) HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) ClientService(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService) ArrayList(java.util.ArrayList) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) ConnectionUtils.translateException(org.apache.hadoop.hbase.client.ConnectionUtils.translateException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException)

Example 3 with Stream

use of java.util.stream.Stream in project crate by crate.

the class BlobPathITest method testDataIsStoredInGlobalBlobPath.

@Test
public void testDataIsStoredInGlobalBlobPath() throws Exception {
    launchNodeAndInitClient(configureGlobalBlobPath());
    Settings indexSettings = oneShardAndZeroReplicas();
    blobAdminClient.createBlobTable("test", indexSettings).get();
    client.put("test", "abcdefg");
    String digest = "2fb5e13419fc89246865e7a324f476ec624e8740";
    try (Stream<Path> files = Files.walk(globalBlobPath)) {
        assertThat(files.anyMatch(i -> digest.equals(i.getFileName().toString())), is(true));
    }
}
Also used : Path(java.nio.file.Path) HttpServerTransport(org.elasticsearch.http.HttpServerTransport) Files(java.nio.file.Files) InetSocketTransportAddress(org.elasticsearch.common.transport.InetSocketTransportAddress) SETTING_NUMBER_OF_SHARDS(org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS) Matchers(org.hamcrest.Matchers) Test(org.junit.Test) IOException(java.io.IOException) BlobIndicesService(io.crate.blob.v2.BlobIndicesService) InetSocketAddress(java.net.InetSocketAddress) Collectors(java.util.stream.Collectors) BlobAdminClient(io.crate.blob.v2.BlobAdminClient) SETTING_NUMBER_OF_REPLICAS(org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS) List(java.util.List) Settings(org.elasticsearch.common.settings.Settings) Stream(java.util.stream.Stream) ESIntegTestCase(org.elasticsearch.test.ESIntegTestCase) Matchers.lessThan(org.hamcrest.Matchers.lessThan) Is.is(org.hamcrest.core.Is.is) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) Path(java.nio.file.Path) Settings(org.elasticsearch.common.settings.Settings) Test(org.junit.Test)

Example 4 with Stream

use of java.util.stream.Stream in project elasticsearch by elastic.

the class SharedClusterSnapshotRestoreIT method testGetSnapshotsRequest.

public void testGetSnapshotsRequest() throws Exception {
    final String repositoryName = "test-repo";
    final String indexName = "test-idx";
    final Client client = client();
    final Path repo = randomRepoPath();
    logger.info("-->  creating repository at {}", repo.toAbsolutePath());
    assertAcked(client.admin().cluster().preparePutRepository(repositoryName).setType("mock").setSettings(Settings.builder().put("location", repo).put("compress", false).put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES).put("wait_after_unblock", 200)));
    logger.info("--> get snapshots on an empty repository");
    expectThrows(SnapshotMissingException.class, () -> client.admin().cluster().prepareGetSnapshots(repositoryName).addSnapshots("non-existent-snapshot").get());
    // with ignore unavailable set to true, should not throw an exception
    GetSnapshotsResponse getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots(repositoryName).setIgnoreUnavailable(true).addSnapshots("non-existent-snapshot").get();
    assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(0));
    logger.info("--> creating an index and indexing documents");
    // Create index on 2 nodes and make sure each node has a primary by setting no replicas
    assertAcked(prepareCreate(indexName, 1, Settings.builder().put("number_of_replicas", 0)));
    ensureGreen();
    for (int i = 0; i < 10; i++) {
        index(indexName, "doc", Integer.toString(i), "foo", "bar" + i);
    }
    refresh();
    // make sure we return only the in-progress snapshot when taking the first snapshot on a clean repository
    // take initial snapshot with a block, making sure we only get 1 in-progress snapshot returned
    // block a node so the create snapshot operation can remain in progress
    final String initialBlockedNode = blockNodeWithIndex(repositoryName, indexName);
    ListenableActionFuture<CreateSnapshotResponse> responseListener = client.admin().cluster().prepareCreateSnapshot(repositoryName, "snap-on-empty-repo").setWaitForCompletion(false).setIndices(indexName).execute();
    // wait for block to kick in
    waitForBlock(initialBlockedNode, repositoryName, TimeValue.timeValueSeconds(60));
    getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots(randomFrom("_all", "_current", "snap-on-*", "*-on-empty-repo", "snap-on-empty-repo")).get();
    assertEquals(1, getSnapshotsResponse.getSnapshots().size());
    assertEquals("snap-on-empty-repo", getSnapshotsResponse.getSnapshots().get(0).snapshotId().getName());
    // unblock node
    unblockNode(repositoryName, initialBlockedNode);
    // timeout after 10 seconds
    responseListener.actionGet(TimeValue.timeValueMillis(10000L));
    client.admin().cluster().prepareDeleteSnapshot(repositoryName, "snap-on-empty-repo").get();
    final int numSnapshots = randomIntBetween(1, 3) + 1;
    logger.info("--> take {} snapshot(s)", numSnapshots - 1);
    final String[] snapshotNames = new String[numSnapshots];
    for (int i = 0; i < numSnapshots - 1; i++) {
        final String snapshotName = randomAsciiOfLength(8).toLowerCase(Locale.ROOT);
        CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot(repositoryName, snapshotName).setWaitForCompletion(true).setIndices(indexName).get();
        assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
        snapshotNames[i] = snapshotName;
    }
    logger.info("--> take another snapshot to be in-progress");
    // add documents so there are data files to block on
    for (int i = 10; i < 20; i++) {
        index(indexName, "doc", Integer.toString(i), "foo", "bar" + i);
    }
    refresh();
    final String inProgressSnapshot = randomAsciiOfLength(8).toLowerCase(Locale.ROOT);
    snapshotNames[numSnapshots - 1] = inProgressSnapshot;
    // block a node so the create snapshot operation can remain in progress
    final String blockedNode = blockNodeWithIndex(repositoryName, indexName);
    client.admin().cluster().prepareCreateSnapshot(repositoryName, inProgressSnapshot).setWaitForCompletion(false).setIndices(indexName).get();
    // wait for block to kick in
    waitForBlock(blockedNode, repositoryName, TimeValue.timeValueSeconds(60));
    logger.info("--> get all snapshots with a current in-progress");
    // with ignore unavailable set to true, should not throw an exception
    final List<String> snapshotsToGet = new ArrayList<>();
    if (randomBoolean()) {
        // use _current plus the individual names of the finished snapshots
        snapshotsToGet.add("_current");
        for (int i = 0; i < numSnapshots - 1; i++) {
            snapshotsToGet.add(snapshotNames[i]);
        }
    } else {
        snapshotsToGet.add("_all");
    }
    getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots(repositoryName).setSnapshots(snapshotsToGet.toArray(Strings.EMPTY_ARRAY)).get();
    List<String> sortedNames = Arrays.asList(snapshotNames);
    Collections.sort(sortedNames);
    assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(numSnapshots));
    assertThat(getSnapshotsResponse.getSnapshots().stream().map(s -> s.snapshotId().getName()).sorted().collect(Collectors.toList()), equalTo(sortedNames));
    getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots(repositoryName).addSnapshots(snapshotNames).get();
    sortedNames = Arrays.asList(snapshotNames);
    Collections.sort(sortedNames);
    assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(numSnapshots));
    assertThat(getSnapshotsResponse.getSnapshots().stream().map(s -> s.snapshotId().getName()).sorted().collect(Collectors.toList()), equalTo(sortedNames));
    logger.info("--> make sure duplicates are not returned in the response");
    String regexName = snapshotNames[randomIntBetween(0, numSnapshots - 1)];
    final int splitPos = regexName.length() / 2;
    final String firstRegex = regexName.substring(0, splitPos) + "*";
    final String secondRegex = "*" + regexName.substring(splitPos);
    getSnapshotsResponse = client.admin().cluster().prepareGetSnapshots(repositoryName).addSnapshots(snapshotNames).addSnapshots(firstRegex, secondRegex).get();
    assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(numSnapshots));
    assertThat(getSnapshotsResponse.getSnapshots().stream().map(s -> s.snapshotId().getName()).sorted().collect(Collectors.toList()), equalTo(sortedNames));
    // unblock node
    unblockNode(repositoryName, blockedNode);
    waitForCompletion(repositoryName, inProgressSnapshot, TimeValue.timeValueSeconds(60));
}
Also used : Path(java.nio.file.Path) ShardId(org.elasticsearch.index.shard.ShardId) ByteSizeUnit(org.elasticsearch.common.unit.ByteSizeUnit) Arrays(java.util.Arrays) RestoreSnapshotResponse(org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse) ClusterBlocks(org.elasticsearch.cluster.block.ClusterBlocks) ClusterState(org.elasticsearch.cluster.ClusterState) ClusterStateUpdateTask(org.elasticsearch.cluster.ClusterStateUpdateTask) Matchers.nullValue(org.hamcrest.Matchers.nullValue) Path(java.nio.file.Path) Priority(org.elasticsearch.common.Priority) GetSettingsResponse(org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse) Matchers.notNullValue(org.hamcrest.Matchers.notNullValue) TestLogging(org.elasticsearch.test.junit.annotations.TestLogging) Matchers.allOf(org.hamcrest.Matchers.allOf) DeletePipelineRequest(org.elasticsearch.action.ingest.DeletePipelineRequest) ElasticsearchAssertions.assertAliasesMissing(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesMissing) Matchers.startsWith(org.hamcrest.Matchers.startsWith) ElasticsearchAssertions.assertBlocked(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked) CountDownLatch(java.util.concurrent.CountDownLatch) Stream(java.util.stream.Stream) QueryBuilders.matchQuery(org.elasticsearch.index.query.QueryBuilders.matchQuery) IndexMetaData(org.elasticsearch.cluster.metadata.IndexMetaData) SnapshotIndexStatus(org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexStatus) InvalidIndexNameException(org.elasticsearch.indices.InvalidIndexNameException) Matchers.greaterThan(org.hamcrest.Matchers.greaterThan) Matchers.is(org.hamcrest.Matchers.is) Matchers.containsString(org.hamcrest.Matchers.containsString) ShardSnapshotStatus(org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus) XContentFactory(org.elasticsearch.common.xcontent.XContentFactory) ImmutableOpenMap(org.elasticsearch.common.collect.ImmutableOpenMap) GetPipelineResponse(org.elasticsearch.action.ingest.GetPipelineResponse) ClusterService(org.elasticsearch.cluster.service.ClusterService) ArrayList(java.util.ArrayList) BytesArray(org.elasticsearch.common.bytes.BytesArray) SnapshotStatus(org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus) ElasticsearchAssertions.assertIndexTemplateMissing(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateMissing) Matchers.lessThan(org.hamcrest.Matchers.lessThan) IndicesService(org.elasticsearch.indices.IndicesService) FlushResponse(org.elasticsearch.action.admin.indices.flush.FlushResponse) ElasticsearchAssertions.assertThrows(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows) Files(java.nio.file.Files) SETTING_NUMBER_OF_SHARDS(org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS) Client(org.elasticsearch.client.Client) IndexService(org.elasticsearch.index.IndexService) IOUtils(org.apache.lucene.util.IOUtils) RepositoriesService(org.elasticsearch.repositories.RepositoriesService) ExecutionException(java.util.concurrent.ExecutionException) SnapshotIndexShardStage(org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStage) SnapshotsStatusResponse(org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse) ElasticsearchAssertions.assertAcked(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked) MappingMetaData(org.elasticsearch.cluster.metadata.MappingMetaData) PutRepositoryResponse(org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse) IngestTestPlugin(org.elasticsearch.ingest.IngestTestPlugin) Settings(org.elasticsearch.common.settings.Settings) Locale(java.util.Locale) SearchResponse(org.elasticsearch.action.search.SearchResponse) XContentFactory.jsonBuilder(org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder) RepositoryException(org.elasticsearch.repositories.RepositoryException) ElasticsearchAssertions.assertIndexTemplateExists(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateExists) Collection(java.util.Collection) StandardOpenOption(java.nio.file.StandardOpenOption) State(org.elasticsearch.cluster.SnapshotsInProgress.State) ElasticsearchAssertions.assertHitCount(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount) GetStoredScriptResponse(org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse) BytesReference(org.elasticsearch.common.bytes.BytesReference) Collectors(java.util.stream.Collectors) ActiveShardCount(org.elasticsearch.action.support.ActiveShardCount) SeekableByteChannel(java.nio.channels.SeekableByteChannel) List(java.util.List) Version(org.elasticsearch.Version) IndexRequestBuilder(org.elasticsearch.action.index.IndexRequestBuilder) IndexRoutingTable(org.elasticsearch.cluster.routing.IndexRoutingTable) MockScriptEngine(org.elasticsearch.script.MockScriptEngine) Matchers.equalTo(org.hamcrest.Matchers.equalTo) ElasticsearchAssertions.assertAliasesExist(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesExist) CreateSnapshotResponse(org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse) RepositoryData(org.elasticsearch.repositories.RepositoryData) XContentType(org.elasticsearch.common.xcontent.XContentType) ListenableActionFuture(org.elasticsearch.action.ListenableActionFuture) IndexId(org.elasticsearch.repositories.IndexId) Entry(org.elasticsearch.cluster.SnapshotsInProgress.Entry) Strings(org.elasticsearch.common.Strings) SETTING_NUMBER_OF_REPLICAS(org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS) SnapshotIndexShardStatus(org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus) TimeValue(org.elasticsearch.common.unit.TimeValue) GetSnapshotsResponse(org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse) Plugin(org.elasticsearch.plugins.Plugin) MockRepository(org.elasticsearch.snapshots.mockstore.MockRepository) INDEX_REFRESH_INTERVAL_SETTING(org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING) ClusterStateResponse(org.elasticsearch.action.admin.cluster.state.ClusterStateResponse) GetIndexTemplatesResponse(org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse) TimeUnit(java.util.concurrent.TimeUnit) ExceptionsHelper(org.elasticsearch.ExceptionsHelper) SnapshotsInProgress(org.elasticsearch.cluster.SnapshotsInProgress) ElasticsearchAssertions.assertAllSuccessful(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful) StoredScriptsIT(org.elasticsearch.script.StoredScriptsIT) Collections(java.util.Collections) MetaDataIndexStateService(org.elasticsearch.cluster.metadata.MetaDataIndexStateService) ElasticsearchAssertions.assertNoFailures(org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures) GetSnapshotsResponse(org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse) CreateSnapshotResponse(org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse) ArrayList(java.util.ArrayList) Matchers.containsString(org.hamcrest.Matchers.containsString) Client(org.elasticsearch.client.Client)

Example 5 with Stream

use of java.util.stream.Stream in project che by eclipse.

the class OpenShiftConnector method getCheServiceBySelector.

private Service getCheServiceBySelector(String selectorKey, String selectorValue) {
    ServiceList svcs = openShiftClient.services().inNamespace(this.openShiftCheProjectName).list();
    Service svc = svcs.getItems().stream().filter(s -> s.getSpec().getSelector().containsKey(selectorKey)).filter(s -> s.getSpec().getSelector().get(selectorKey).equals(selectorValue)).findAny().orElse(null);
    if (svc == null) {
        LOG.warn("No Service with selector {}={} could be found", selectorKey, selectorValue);
    }
    return svc;
}
Also used : VolumeMount(io.fabric8.kubernetes.api.model.VolumeMount) Arrays(java.util.Arrays) RemoveContainerParams(org.eclipse.che.plugin.docker.client.params.RemoveContainerParams) HostConfig(org.eclipse.che.plugin.docker.client.json.HostConfig) Deployment(io.fabric8.kubernetes.api.model.extensions.Deployment) LoggerFactory(org.slf4j.LoggerFactory) MessageProcessor(org.eclipse.che.plugin.docker.client.MessageProcessor) IpamConfig(org.eclipse.che.plugin.docker.client.json.network.IpamConfig) KubernetesContainer(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesContainer) ServicePort(io.fabric8.kubernetes.api.model.ServicePort) ContainerListEntry(org.eclipse.che.plugin.docker.client.json.ContainerListEntry) DefaultOpenShiftClient(io.fabric8.openshift.client.DefaultOpenShiftClient) VolumeMountBuilder(io.fabric8.kubernetes.api.model.VolumeMountBuilder) DockerRegistryAuthResolver(org.eclipse.che.plugin.docker.client.DockerRegistryAuthResolver) Filters(org.eclipse.che.plugin.docker.client.json.Filters) Map(java.util.Map) NetworkSettings(org.eclipse.che.plugin.docker.client.json.NetworkSettings) ContainerBuilder(io.fabric8.kubernetes.api.model.ContainerBuilder) DockerConnectionFactory(org.eclipse.che.plugin.docker.client.connection.DockerConnectionFactory) ImageStreamTag(io.fabric8.openshift.api.model.ImageStreamTag) ImageInfo(org.eclipse.che.plugin.docker.client.json.ImageInfo) InspectImageParams(org.eclipse.che.plugin.docker.client.params.InspectImageParams) Set(java.util.Set) OpenShiftException(org.eclipse.che.plugin.openshift.client.exception.OpenShiftException) VolumeBuilder(io.fabric8.kubernetes.api.model.VolumeBuilder) Collectors(java.util.stream.Collectors) GetNetworksParams(org.eclipse.che.plugin.docker.client.params.network.GetNetworksParams) KubernetesService(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesService) ConnectContainerToNetworkParams(org.eclipse.che.plugin.docker.client.params.network.ConnectContainerToNetworkParams) List(java.util.List) Network(org.eclipse.che.plugin.docker.client.json.network.Network) Stream(java.util.stream.Stream) CommitParams(org.eclipse.che.plugin.docker.client.params.CommitParams) ContainerConfig(org.eclipse.che.plugin.docker.client.json.ContainerConfig) GetEventsParams(org.eclipse.che.plugin.docker.client.params.GetEventsParams) DeploymentBuilder(io.fabric8.kubernetes.api.model.extensions.DeploymentBuilder) ImageConfig(org.eclipse.che.plugin.docker.client.json.ImageConfig) TagParams(org.eclipse.che.plugin.docker.client.params.TagParams) CreateContainerParams(org.eclipse.che.plugin.docker.client.params.CreateContainerParams) ServiceList(io.fabric8.kubernetes.api.model.ServiceList) DockerApiVersionPathPrefixProvider(org.eclipse.che.plugin.docker.client.DockerApiVersionPathPrefixProvider) ProbeBuilder(io.fabric8.kubernetes.api.model.ProbeBuilder) Ipam(org.eclipse.che.plugin.docker.client.json.network.Ipam) PullParams(org.eclipse.che.plugin.docker.client.params.PullParams) DockerConnectorConfiguration(org.eclipse.che.plugin.docker.client.DockerConnectorConfiguration) ImageNotFoundException(org.eclipse.che.plugin.docker.client.exception.ImageNotFoundException) Container(io.fabric8.kubernetes.api.model.Container) PutResourceParams(org.eclipse.che.plugin.docker.client.params.PutResourceParams) StopContainerParams(org.eclipse.che.plugin.docker.client.params.StopContainerParams) Strings.isNullOrEmpty(com.google.common.base.Strings.isNullOrEmpty) HashMap(java.util.HashMap) ContainerCreated(org.eclipse.che.plugin.docker.client.json.ContainerCreated) Singleton(javax.inject.Singleton) StartContainerParams(org.eclipse.che.plugin.docker.client.params.StartContainerParams) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Inject(javax.inject.Inject) RemoveImageParams(org.eclipse.che.plugin.docker.client.params.RemoveImageParams) InspectNetworkParams(org.eclipse.che.plugin.docker.client.params.network.InspectNetworkParams) ProgressMonitor(org.eclipse.che.plugin.docker.client.ProgressMonitor) KubernetesEnvVar(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesEnvVar) CreateNetworkParams(org.eclipse.che.plugin.docker.client.params.network.CreateNetworkParams) PodSpecBuilder(io.fabric8.kubernetes.api.model.PodSpecBuilder) PortBinding(org.eclipse.che.plugin.docker.client.json.PortBinding) PodSpec(io.fabric8.kubernetes.api.model.PodSpec) Named(javax.inject.Named) ContainerInfo(org.eclipse.che.plugin.docker.client.json.ContainerInfo) Service(io.fabric8.kubernetes.api.model.Service) DisconnectContainerFromNetworkParams(org.eclipse.che.plugin.docker.client.params.network.DisconnectContainerFromNetworkParams) KillContainerParams(org.eclipse.che.plugin.docker.client.params.KillContainerParams) Volume(io.fabric8.kubernetes.api.model.Volume) ImageStream(io.fabric8.openshift.api.model.ImageStream) Probe(io.fabric8.kubernetes.api.model.Probe) GetResourceParams(org.eclipse.che.plugin.docker.client.params.GetResourceParams) Logger(org.slf4j.Logger) Pod(io.fabric8.kubernetes.api.model.Pod) IOException(java.io.IOException) Event(org.eclipse.che.plugin.docker.client.json.Event) KubernetesLabelConverter(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesLabelConverter) RemoveNetworkParams(org.eclipse.che.plugin.docker.client.params.RemoveNetworkParams) OpenShiftClient(io.fabric8.openshift.client.OpenShiftClient) NetworkCreated(org.eclipse.che.plugin.docker.client.json.NetworkCreated) ContainerInNetwork(org.eclipse.che.plugin.docker.client.json.network.ContainerInNetwork) PodList(io.fabric8.kubernetes.api.model.PodList) DockerConnector(org.eclipse.che.plugin.docker.client.DockerConnector) KubernetesStringUtils(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesStringUtils) Collections(java.util.Collections) InputStream(java.io.InputStream) ServiceList(io.fabric8.kubernetes.api.model.ServiceList) KubernetesService(org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesService) Service(io.fabric8.kubernetes.api.model.Service)

Aggregations

Stream (java.util.stream.Stream)161 Collectors (java.util.stream.Collectors)98 List (java.util.List)89 ArrayList (java.util.ArrayList)66 Map (java.util.Map)66 Set (java.util.Set)59 IOException (java.io.IOException)58 Optional (java.util.Optional)45 Collections (java.util.Collections)43 HashMap (java.util.HashMap)43 Arrays (java.util.Arrays)33 HashSet (java.util.HashSet)33 File (java.io.File)32 Path (java.nio.file.Path)32 Function (java.util.function.Function)28 Logger (org.slf4j.Logger)26 LoggerFactory (org.slf4j.LoggerFactory)26 java.util (java.util)25 Predicate (java.util.function.Predicate)23 Objects (java.util.Objects)22