use of io.crate.testing.TestingRowConsumer in project crate by crate.
the class DistributingConsumerTest method test_exception_on_loadNextBatch_is_forwarded.
@Test
public void test_exception_on_loadNextBatch_is_forwarded() throws Exception {
Streamer<?>[] streamers = { DataTypes.INTEGER.streamer() };
TestingRowConsumer collectingConsumer = new TestingRowConsumer();
DistResultRXTask distResultRXTask = createPageDownstreamContext(streamers, collectingConsumer);
TransportDistributedResultAction distributedResultAction = createFakeTransport(streamers, distResultRXTask);
DistributingConsumer distributingConsumer = createDistributingConsumer(streamers, distributedResultAction);
BatchSimulatingIterator<Row> batchSimulatingIterator = new BatchSimulatingIterator<>(TestingBatchIterators.range(0, 5), 2, 3, executorService) {
@Override
public CompletionStage<?> loadNextBatch() {
throw new CircuitBreakingException("data too large");
}
};
distributingConsumer.accept(batchSimulatingIterator, null);
expectedException.expect(CircuitBreakingException.class);
collectingConsumer.getResult();
}
use of io.crate.testing.TestingRowConsumer in project crate by crate.
the class IndexWriterProjectorUnitTest method testNullPKValue.
@Test
public void testNullPKValue() throws Throwable {
InputCollectExpression sourceInput = new InputCollectExpression(0);
List<CollectExpression<Row, ?>> collectExpressions = Collections.<CollectExpression<Row, ?>>singletonList(sourceInput);
TransportCreatePartitionsAction transportCreatePartitionsAction = mock(TransportCreatePartitionsAction.class);
IndexWriterProjector indexWriter = new IndexWriterProjector(clusterService, new NodeLimits(new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), new NoopCircuitBreaker("dummy"), RamAccounting.NO_ACCOUNTING, scheduler, executor, CoordinatorTxnCtx.systemTransactionContext(), createNodeContext(), Settings.EMPTY, 5, 1, transportCreatePartitionsAction, (request, listener) -> {
}, IndexNameResolver.forTable(BULK_IMPORT_IDENT), RAW_SOURCE_REFERENCE, Collections.singletonList(ID_IDENT), Collections.<Symbol>singletonList(new InputColumn(1)), null, null, sourceInput, collectExpressions, 20, null, null, false, false, UUID.randomUUID(), UpsertResultContext.forRowCount(), false);
RowN rowN = new RowN(new Object[] { new BytesRef("{\"y\": \"x\"}"), null });
BatchIterator<Row> batchIterator = InMemoryBatchIterator.of(Collections.singletonList(rowN), SENTINEL, true);
batchIterator = indexWriter.apply(batchIterator);
TestingRowConsumer testingBatchConsumer = new TestingRowConsumer();
testingBatchConsumer.accept(batchIterator, null);
List<Object[]> result = testingBatchConsumer.getResult();
// Zero affected rows as a NULL as a PK value will result in an exception.
// It must never bubble up as other rows might already have been written.
assertThat(result.get(0)[0], is(0L));
}
use of io.crate.testing.TestingRowConsumer in project crate by crate.
the class HashInnerJoinBatchIteratorBehaviouralTest method testDistributed_SwitchToRightEvenIfLeftBatchDoesNotDeliverAllRowsExpectedByOneBatch.
@Test
public void testDistributed_SwitchToRightEvenIfLeftBatchDoesNotDeliverAllRowsExpectedByOneBatch() throws Exception {
BatchSimulatingIterator<Row> leftIterator = new BatchSimulatingIterator<>(TestingBatchIterators.ofValues(Arrays.asList(1, 2, 4)), 1, 2, null);
BatchSimulatingIterator<Row> rightIterator = new BatchSimulatingIterator<>(TestingBatchIterators.ofValues(Arrays.asList(2, 0, 4, 5)), 2, 1, null);
BatchIterator<Row> batchIterator = new HashInnerJoinBatchIterator(leftIterator, rightIterator, mock(RowAccounting.class), new CombinedRow(1, 1), row -> Objects.equals(row.get(0), row.get(1)), row -> Objects.hash(row.get(0)), row -> Objects.hash(row.get(0)), () -> 2);
TestingRowConsumer consumer = new TestingRowConsumer();
consumer.accept(batchIterator, null);
List<Object[]> result = consumer.getResult();
assertThat(result, contains(new Object[] { 2, 2 }, new Object[] { 4, 4 }));
// as the blocksize is defined of 2 but the left batch size 1, normally it would call left loadNextBatch until
// the blocksize is reached. we don't want that as parallel running hash iterators must call loadNextBatch always
// on the same side synchronously as the upstreams will only send new data after all downstreams responded.
// to validate this, the right must be repeated 3 times
assertThat(rightIterator.getMovetoStartCalls(), is(2));
}
use of io.crate.testing.TestingRowConsumer in project crate by crate.
the class RemoteCollectorTest method prepare.
@Before
public void prepare() {
MockitoAnnotations.initMocks(this);
UUID jobId = UUID.randomUUID();
RoutedCollectPhase collectPhase = new RoutedCollectPhase(jobId, 0, "remoteCollect", new Routing(Map.of("remoteNode", Map.of("dummyTable", IntArrayList.from(1)))), RowGranularity.DOC, Collections.singletonList(createReference("name", DataTypes.STRING)), Collections.emptyList(), WhereClause.MATCH_ALL.queryOrFallback(), DistributionInfo.DEFAULT_BROADCAST);
transportJobAction = mock(TransportJobAction.class);
TasksService tasksService = new TasksService(clusterService, new JobsLogs(() -> true));
numBroadcastCalls = new AtomicInteger(0);
transportKillJobsNodeAction = new TransportKillJobsNodeAction(tasksService, clusterService, mock(TransportService.class)) {
@Override
public void broadcast(KillJobsRequest request, ActionListener<Long> listener) {
numBroadcastCalls.incrementAndGet();
}
};
consumer = new TestingRowConsumer();
remoteCollector = new RemoteCollector(jobId, new SessionSettings("dummyUser", SearchPath.createSearchPathFrom("dummySchema")), "localNode", "remoteNode", transportJobAction, transportKillJobsNodeAction, Runnable::run, tasksService, RamAccounting.NO_ACCOUNTING, consumer, collectPhase);
}
use of io.crate.testing.TestingRowConsumer in project crate by crate.
the class MapSideDataCollectOperationTest method testFileUriCollect.
@Test
public void testFileUriCollect() throws Exception {
FileCollectSource fileCollectSource = new FileCollectSource(createNodeContext(), clusterService, Collections.emptyMap());
File tmpFile = temporaryFolder.newFile("fileUriCollectOperation.json");
try (OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tmpFile), StandardCharsets.UTF_8)) {
writer.write("{\"name\": \"Arthur\", \"id\": 4, \"details\": {\"age\": 38}}\n");
writer.write("{\"id\": 5, \"name\": \"Trillian\", \"details\": {\"age\": 33}}\n");
}
FileUriCollectPhase collectNode = new FileUriCollectPhase(UUID.randomUUID(), 0, "test", Collections.singletonList("noop_id"), Literal.of(Paths.get(tmpFile.toURI()).toUri().toString()), Arrays.asList(createReference("name", DataTypes.STRING), createReference(new ColumnIdent("details", "age"), DataTypes.INTEGER)), Collections.emptyList(), null, false, CopyFromParserProperties.DEFAULT, FileUriCollectPhase.InputFormat.JSON, Settings.EMPTY);
TestingRowConsumer consumer = new TestingRowConsumer();
CollectTask collectTask = mock(CollectTask.class);
BatchIterator<Row> iterator = fileCollectSource.getIterator(CoordinatorTxnCtx.systemTransactionContext(), collectNode, collectTask, false).get(5, TimeUnit.SECONDS);
consumer.accept(iterator, null);
assertThat(new CollectionBucket(consumer.getResult()), contains(isRow("Arthur", 38), isRow("Trillian", 33)));
}
Aggregations