use of org.elasticsearch.test.cluster.NoopClusterService in project crate by crate.
the class TransportKillJobsNodeActionTest method testKillIsCalledOnJobContextService.
@Test
public void testKillIsCalledOnJobContextService() throws Exception {
TransportService transportService = mock(TransportService.class);
JobContextService jobContextService = mock(JobContextService.class, Answers.RETURNS_MOCKS.get());
TransportKillJobsNodeAction transportKillJobsNodeAction = new TransportKillJobsNodeAction(Settings.EMPTY, jobContextService, new NoopClusterService(), transportService);
final CountDownLatch latch = new CountDownLatch(1);
List<UUID> toKill = ImmutableList.of(UUID.randomUUID(), UUID.randomUUID());
transportKillJobsNodeAction.nodeOperation(new KillJobsRequest(toKill), new ActionListener<KillResponse>() {
@Override
public void onResponse(KillResponse killAllResponse) {
latch.countDown();
}
@Override
public void onFailure(Throwable throwable) {
latch.countDown();
}
});
latch.await(1, TimeUnit.SECONDS);
verify(jobContextService, times(1)).killJobs(toKill);
}
use of org.elasticsearch.test.cluster.NoopClusterService in project crate by crate.
the class MapSideDataCollectOperationTest method testFileUriCollect.
@Test
public void testFileUriCollect() throws Exception {
ClusterService clusterService = new NoopClusterService();
Functions functions = getFunctions();
CollectSourceResolver collectSourceResolver = mock(CollectSourceResolver.class);
when(collectSourceResolver.getService(any(RoutedCollectPhase.class))).thenReturn(new FileCollectSource(functions, clusterService, Collections.<String, FileInputFactory>emptyMap()));
MapSideDataCollectOperation collectOperation = new MapSideDataCollectOperation(collectSourceResolver, threadPool);
File tmpFile = temporaryFolder.newFile("fileUriCollectOperation.json");
try (OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tmpFile), StandardCharsets.UTF_8)) {
writer.write("{\"name\": \"Arthur\", \"id\": 4, \"details\": {\"age\": 38}}\n");
writer.write("{\"id\": 5, \"name\": \"Trillian\", \"details\": {\"age\": 33}}\n");
}
FileUriCollectPhase collectNode = new FileUriCollectPhase(UUID.randomUUID(), 0, "test", Collections.singletonList("noop_id"), Literal.of(Paths.get(tmpFile.toURI()).toUri().toString()), Arrays.<Symbol>asList(createReference("name", DataTypes.STRING), createReference(new ColumnIdent("details", "age"), DataTypes.INTEGER)), Collections.emptyList(), null, false);
String threadPoolName = JobCollectContext.threadPoolName(collectNode, "noop_id");
TestingBatchConsumer consumer = new TestingBatchConsumer();
JobCollectContext jobCollectContext = mock(JobCollectContext.class);
CrateCollector collectors = collectOperation.createCollector(collectNode, consumer, jobCollectContext);
collectOperation.launchCollector(collectors, threadPoolName);
assertThat(new CollectionBucket(consumer.getResult()), contains(isRow("Arthur", 38), isRow("Trillian", 33)));
}
Aggregations