use of io.crate.Streamer in project crate by crate.
the class JobExecutionContextTest method testFailureClosesAllSubContexts.
@Test
public void testFailureClosesAllSubContexts() throws Exception {
String localNodeId = "localNodeId";
RoutedCollectPhase collectPhase = Mockito.mock(RoutedCollectPhase.class);
Routing routing = Mockito.mock(Routing.class);
when(routing.containsShards(localNodeId)).thenReturn(false);
when(collectPhase.routing()).thenReturn(routing);
when(collectPhase.maxRowGranularity()).thenReturn(RowGranularity.DOC);
JobExecutionContext.Builder builder = new JobExecutionContext.Builder(UUID.randomUUID(), coordinatorNode, Collections.emptyList(), mock(JobsLogs.class));
JobCollectContext jobCollectContext = new JobCollectContext(collectPhase, mock(MapSideDataCollectOperation.class), localNodeId, mock(RamAccountingContext.class), new TestingBatchConsumer(), mock(SharedShardContexts.class));
TestingBatchConsumer batchConsumer = new TestingBatchConsumer();
PageDownstreamContext pageDownstreamContext = spy(new PageDownstreamContext(Loggers.getLogger(PageDownstreamContext.class), "n1", 2, "dummy", batchConsumer, PassThroughPagingIterator.oneShot(), new Streamer[] { IntegerType.INSTANCE.streamer() }, mock(RamAccountingContext.class), 1));
builder.addSubContext(jobCollectContext);
builder.addSubContext(pageDownstreamContext);
JobExecutionContext jobExecutionContext = builder.build();
Exception failure = new Exception("failure!");
jobCollectContext.close(failure);
// other contexts must be killed with same failure
verify(pageDownstreamContext, times(1)).innerKill(failure);
final Field subContexts = JobExecutionContext.class.getDeclaredField("subContexts");
subContexts.setAccessible(true);
int size = ((ConcurrentMap<Integer, ExecutionSubContext>) subContexts.get(jobExecutionContext)).size();
assertThat(size, is(0));
}
use of io.crate.Streamer in project crate by crate.
the class TDigestStateTest method testStreaming.
@Test
public void testStreaming() throws Exception {
TDigestState digestState1 = new TDigestState(250, new double[] { 0.5, 0.8 });
BytesStreamOutput out = new BytesStreamOutput();
TDigestStateType digestStateType = TDigestStateType.INSTANCE;
Streamer streamer = digestStateType.create().streamer();
streamer.writeValueTo(out, digestState1);
StreamInput in = StreamInput.wrap(out.bytes());
TDigestState digestState2 = (TDigestState) streamer.readValueFrom(in);
assertEquals(digestState1.compression(), digestState2.compression(), 0.001d);
assertEquals(digestState1.fractions()[0], digestState2.fractions()[0], 0.001d);
assertEquals(digestState1.fractions()[1], digestState2.fractions()[1], 0.001d);
}
use of io.crate.Streamer in project crate by crate.
the class DistributedResultRequestTest method testStreaming.
@Test
public void testStreaming() throws Exception {
Streamer<?>[] streamers = new Streamer[] { DataTypes.STRING.streamer() };
Object[][] rows = new Object[][] { { new BytesRef("ab") }, { null }, { new BytesRef("cd") } };
UUID uuid = UUID.randomUUID();
DistributedResultRequest r1 = new DistributedResultRequest(uuid, 1, (byte) 3, 1, streamers, new ArrayBucket(rows), false);
BytesStreamOutput out = new BytesStreamOutput();
r1.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes());
DistributedResultRequest r2 = new DistributedResultRequest();
r2.readFrom(in);
r2.streamers(streamers);
assertTrue(r2.rowsCanBeRead());
assertEquals(r1.rows().size(), r2.rows().size());
assertThat(r1.isLast(), is(r2.isLast()));
assertThat(r1.executionPhaseInputId(), is(r2.executionPhaseInputId()));
assertThat(r2.rows(), contains(isRow("ab"), isNullRow(), isRow("cd")));
}
use of io.crate.Streamer in project crate by crate.
the class ShardUpsertRequest method writeTo.
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
// Stream References
if (updateColumns != null) {
out.writeVInt(updateColumns.length);
for (String column : updateColumns) {
out.writeString(column);
}
} else {
out.writeVInt(0);
}
Streamer[] insertValuesStreamer = null;
if (insertColumns != null) {
out.writeVInt(insertColumns.length);
for (Reference reference : insertColumns) {
Reference.toStream(reference, out);
}
insertValuesStreamer = Symbols.streamerArray(List.of(insertColumns));
} else {
out.writeVInt(0);
}
out.writeBoolean(continueOnError);
out.writeVInt(duplicateKeyAction.ordinal());
out.writeBoolean(validateConstraints);
sessionSettings.writeTo(out);
out.writeVInt(items.size());
for (Item item : items) {
item.writeTo(out, insertValuesStreamer);
}
if (out.getVersion().onOrAfter(Version.V_4_2_0)) {
if (returnValues != null) {
out.writeVInt(returnValues.length);
for (Symbol returnValue : returnValues) {
Symbols.toStream(returnValue, out);
}
} else {
out.writeVInt(0);
}
}
}
use of io.crate.Streamer in project crate by crate.
the class FetchProjection method generateStreamersGroupedByReaderAndNode.
@SuppressWarnings({ "rawtypes" })
public Map<String, ? extends IntObjectMap<Streamer[]>> generateStreamersGroupedByReaderAndNode() {
HashMap<String, IntObjectHashMap<Streamer[]>> streamersByReaderByNode = new HashMap<>();
for (Map.Entry<String, IntSet> entry : nodeReaders.entrySet()) {
IntObjectHashMap<Streamer[]> streamersByReaderId = new IntObjectHashMap<>();
String nodeId = entry.getKey();
streamersByReaderByNode.put(nodeId, streamersByReaderId);
for (IntCursor readerIdCursor : entry.getValue()) {
int readerId = readerIdCursor.value;
String index = readerIndices.floorEntry(readerId).getValue();
RelationName relationName = indicesToIdents.get(index);
FetchSource fetchSource = fetchSources.get(relationName);
if (fetchSource == null) {
continue;
}
streamersByReaderId.put(readerIdCursor.value, Symbols.streamerArray(fetchSource.references()));
}
}
return streamersByReaderByNode;
}
Aggregations