use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchOperation method fetch.
public CompletableFuture<? extends IntObjectMap<StreamBucket>> fetch(UUID jobId, int phaseId, @Nullable IntObjectMap<IntArrayList> docIdsToFetch, boolean closeTaskOnFinish) {
if (docIdsToFetch == null) {
if (closeTaskOnFinish) {
tryCloseTask(jobId, phaseId);
}
jobsLogs.operationStarted(phaseId, jobId, "fetch", () -> -1);
jobsLogs.operationFinished(phaseId, jobId, null);
return CompletableFuture.completedFuture(new IntObjectHashMap<>(0));
}
RootTask context = tasksService.getTask(jobId);
FetchTask fetchTask = context.getTask(phaseId);
jobsLogs.operationStarted(phaseId, jobId, "fetch", () -> -1);
BiConsumer<? super IntObjectMap<StreamBucket>, ? super Throwable> whenComplete = (res, err) -> {
if (closeTaskOnFinish) {
if (err == null) {
fetchTask.close();
} else {
fetchTask.kill(err);
}
}
if (err == null) {
jobsLogs.operationFinished(phaseId, jobId, null);
} else {
jobsLogs.operationFinished(phaseId, jobId, SQLExceptions.messageOf(err));
}
};
try {
return doFetch(fetchTask, docIdsToFetch).whenComplete(whenComplete);
} catch (Throwable t) {
whenComplete.accept(null, t);
return CompletableFuture.failedFuture(t);
}
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchRequestTest method testStreaming.
@Test
public void testStreaming() throws Exception {
IntObjectHashMap<IntArrayList> toFetch = new IntObjectHashMap<>();
IntArrayList docIds = new IntArrayList(3);
toFetch.put(1, docIds);
NodeFetchRequest orig = new NodeFetchRequest(UUID.randomUUID(), 1, true, toFetch);
BytesStreamOutput out = new BytesStreamOutput();
orig.writeTo(out);
StreamInput in = out.bytes().streamInput();
NodeFetchRequest streamed = new NodeFetchRequest(in);
assertThat(orig.jobId(), is(streamed.jobId()));
assertThat(orig.fetchPhaseId(), is(streamed.fetchPhaseId()));
assertThat(orig.isCloseContext(), is(streamed.isCloseContext()));
assertThat(orig.toFetch().toString(), is(streamed.toFetch().toString()));
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class TransportFetchOperationTest method test_ram_accounting_on_non_empty_fetch_ids_and_close.
@Test
public void test_ram_accounting_on_non_empty_fetch_ids_and_close() {
var toFetch = new IntObjectHashMap<IntContainer>();
toFetch.put(1, new IntArrayList());
RamAccounting ramAccounting = TransportFetchOperation.ramAccountingForIncomingResponse(RamAccounting.NO_ACCOUNTING, toFetch, true);
assertThat(ramAccounting, instanceOf(BlockBasedRamAccounting.class));
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class FetchRowsTest method test_fetch_rows_can_map_inputs_and_buckets_to_outputs.
@Test
public void test_fetch_rows_can_map_inputs_and_buckets_to_outputs() throws Exception {
var e = SQLExecutor.builder(clusterService).addTable("create table t1 (x text)").addTable("create table t2 (y text, z int)").build();
var t1 = e.resolveTableInfo("t1");
var x = (Reference) e.asSymbol("x");
var fetchSource1 = new FetchSource();
fetchSource1.addFetchIdColumn(new InputColumn(0, DataTypes.LONG));
fetchSource1.addRefToFetch(x);
var t2 = e.resolveTableInfo("t2");
var y = (Reference) e.asSymbol("y");
var fetchSource2 = new FetchSource();
fetchSource2.addFetchIdColumn(new InputColumn(1, DataTypes.LONG));
fetchSource2.addRefToFetch(y);
var fetchSources = Map.of(t1.ident(), fetchSource1, t2.ident(), fetchSource2);
var fetchRows = FetchRows.create(CoordinatorTxnCtx.systemTransactionContext(), createNodeContext(), fetchSources, List.of(new FetchReference(new InputColumn(0, DataTypes.LONG), x), new FetchReference(new InputColumn(1, DataTypes.LONG), y), new InputColumn(2, DataTypes.INTEGER)));
long fetchIdRel1 = FetchId.encode(1, 1);
long fetchIdRel2 = FetchId.encode(2, 1);
var readerBuckets = new ReaderBuckets(fetchRows, reader -> reader == 1 ? fetchSource1 : fetchSource2, cells -> 0, RamAccounting.NO_ACCOUNTING);
IntHashSet readerIds = new IntHashSet(2);
readerIds.add(1);
readerIds.add(2);
readerBuckets.add(new RowN(fetchIdRel1, fetchIdRel2, 42));
readerBuckets.generateToFetch(readerIds);
IntObjectHashMap<Bucket> results = new IntObjectHashMap<>();
results.put(1, new ArrayBucket($$($("Arthur"))));
results.put(2, new ArrayBucket($$($("Trillian"))));
var it = readerBuckets.getOutputRows(List.of(results));
assertThat(it.hasNext(), is(true));
var outputRow = it.next();
assertThat(outputRow.get(0), is("Arthur"));
assertThat(outputRow.get(1), is("Trillian"));
assertThat(outputRow.get(2), is(42));
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchResponseTest method setUpStreamBucketsAndStreamer.
@Before
public void setUpStreamBucketsAndStreamer() throws Exception {
streamers = new IntObjectHashMap<>(1);
streamers.put(1, new Streamer[] { DataTypes.BOOLEAN.streamer() });
IntObjectHashMap<IntContainer> toFetch = new IntObjectHashMap<>();
IntHashSet docIds = new IntHashSet(3);
toFetch.put(1, docIds);
StreamBucket.Builder builder = new StreamBucket.Builder(streamers.get(1), RamAccounting.NO_ACCOUNTING);
builder.add(new RowN(new Object[] { true }));
fetched = new IntObjectHashMap<>(1);
fetched.put(1, builder.build());
}
Aggregations