use of io.crate.execution.engine.collect.CollectExpression in project crate by crate.
the class InsertFromValues method createRowsByShardGrouper.
private GroupRowsByShard<ShardUpsertRequest, ShardUpsertRequest.Item> createRowsByShardGrouper(Symbol[] assignmentSources, ArrayList<Input<?>> insertInputs, Supplier<String> indexNameResolver, InputFactory.Context<CollectExpression<Row, ?>> collectContext, PlannerContext plannerContext, ClusterService clusterService) {
InputRow insertValues = new InputRow(insertInputs);
Function<String, ShardUpsertRequest.Item> itemFactory = id -> new ShardUpsertRequest.Item(id, assignmentSources, insertValues.materialize(), null, null, null);
var rowShardResolver = new RowShardResolver(plannerContext.transactionContext(), plannerContext.nodeContext(), writerProjection.primaryKeys(), writerProjection.ids(), writerProjection.clusteredByIdent(), writerProjection.clusteredBy());
return new GroupRowsByShard<>(clusterService, rowShardResolver, new TypeGuessEstimateRowSize(), indexNameResolver, collectContext.expressions(), itemFactory, true);
}
use of io.crate.execution.engine.collect.CollectExpression in project crate by crate.
the class IndexWriterProjectorTest method testIndexWriter.
@Test
public void testIndexWriter() throws Throwable {
execute("create table bulk_import (id int primary key, name string) with (number_of_replicas=0)");
ensureGreen();
InputCollectExpression sourceInput = new InputCollectExpression(1);
List<CollectExpression<Row, ?>> collectExpressions = Collections.<CollectExpression<Row, ?>>singletonList(sourceInput);
RelationName bulkImportIdent = new RelationName(sqlExecutor.getCurrentSchema(), "bulk_import");
ClusterState state = clusterService().state();
Settings tableSettings = TableSettingsResolver.get(state.getMetadata(), bulkImportIdent, false);
ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class);
IndexWriterProjector writerProjector = new IndexWriterProjector(clusterService(), new NodeLimits(new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), new NoopCircuitBreaker("dummy"), RamAccounting.NO_ACCOUNTING, threadPool.scheduler(), threadPool.executor(ThreadPool.Names.SEARCH), CoordinatorTxnCtx.systemTransactionContext(), new NodeContext(internalCluster().getInstance(Functions.class)), Settings.EMPTY, IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.get(tableSettings), NumberOfReplicas.fromSettings(tableSettings, state.getNodes().getSize()), internalCluster().getInstance(TransportCreatePartitionsAction.class), internalCluster().getInstance(TransportShardUpsertAction.class)::execute, IndexNameResolver.forTable(bulkImportIdent), new Reference(new ReferenceIdent(bulkImportIdent, DocSysColumns.RAW), RowGranularity.DOC, DataTypes.STRING, 0, null), Collections.singletonList(ID_IDENT), Collections.<Symbol>singletonList(new InputColumn(0)), null, null, sourceInput, collectExpressions, 20, null, null, false, false, UUID.randomUUID(), UpsertResultContext.forRowCount(), false);
BatchIterator rowsIterator = InMemoryBatchIterator.of(IntStream.range(0, 100).mapToObj(i -> new RowN(new Object[] { i, "{\"id\": " + i + ", \"name\": \"Arthur\"}" })).collect(Collectors.toList()), SENTINEL, true);
TestingRowConsumer consumer = new TestingRowConsumer();
consumer.accept(writerProjector.apply(rowsIterator), null);
Bucket objects = consumer.getBucket();
assertThat(objects, contains(isRow(100L)));
execute("refresh table bulk_import");
execute("select count(*) from bulk_import");
assertThat(response.rowCount(), is(1L));
assertThat(response.rows()[0][0], is(100L));
}
use of io.crate.execution.engine.collect.CollectExpression in project crate by crate.
the class IndexWriterProjectorUnitTest method testNullPKValue.
@Test
public void testNullPKValue() throws Throwable {
InputCollectExpression sourceInput = new InputCollectExpression(0);
List<CollectExpression<Row, ?>> collectExpressions = Collections.<CollectExpression<Row, ?>>singletonList(sourceInput);
TransportCreatePartitionsAction transportCreatePartitionsAction = mock(TransportCreatePartitionsAction.class);
IndexWriterProjector indexWriter = new IndexWriterProjector(clusterService, new NodeLimits(new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), new NoopCircuitBreaker("dummy"), RamAccounting.NO_ACCOUNTING, scheduler, executor, CoordinatorTxnCtx.systemTransactionContext(), createNodeContext(), Settings.EMPTY, 5, 1, transportCreatePartitionsAction, (request, listener) -> {
}, IndexNameResolver.forTable(BULK_IMPORT_IDENT), RAW_SOURCE_REFERENCE, Collections.singletonList(ID_IDENT), Collections.<Symbol>singletonList(new InputColumn(1)), null, null, sourceInput, collectExpressions, 20, null, null, false, false, UUID.randomUUID(), UpsertResultContext.forRowCount(), false);
RowN rowN = new RowN(new Object[] { new BytesRef("{\"y\": \"x\"}"), null });
BatchIterator<Row> batchIterator = InMemoryBatchIterator.of(Collections.singletonList(rowN), SENTINEL, true);
batchIterator = indexWriter.apply(batchIterator);
TestingRowConsumer testingBatchConsumer = new TestingRowConsumer();
testingBatchConsumer.accept(batchIterator, null);
List<Object[]> result = testingBatchConsumer.getResult();
// Zero affected rows as a NULL as a PK value will result in an exception.
// It must never bubble up as other rows might already have been written.
assertThat(result.get(0)[0], is(0L));
}
use of io.crate.execution.engine.collect.CollectExpression in project crate by crate.
the class DocRefResolver method getImplementation.
@Override
public CollectExpression<Doc, ?> getImplementation(Reference ref) {
ColumnIdent columnIdent = ref.column();
String fqn = columnIdent.fqn();
switch(fqn) {
case DocSysColumns.Names.VERSION:
return forFunction(Doc::getVersion);
case DocSysColumns.Names.SEQ_NO:
return forFunction(Doc::getSeqNo);
case DocSysColumns.Names.PRIMARY_TERM:
return forFunction(Doc::getPrimaryTerm);
case DocSysColumns.Names.ID:
return NestableCollectExpression.forFunction(Doc::getId);
case DocSysColumns.Names.DOCID:
return forFunction(Doc::docId);
case DocSysColumns.Names.RAW:
return forFunction(Doc::getRaw);
case DocSysColumns.Names.DOC:
return forFunction(Doc::getSource);
default:
for (int i = 0; i < partitionedByColumns.size(); i++) {
var pColumn = partitionedByColumns.get(i);
if (pColumn.equals(columnIdent)) {
final int idx = i;
return forFunction(getResp -> ref.valueType().implicitCast(PartitionName.fromIndexOrTemplate(getResp.getIndex()).values().get(idx)));
} else if (pColumn.isChildOf(columnIdent)) {
final int idx = i;
return forFunction(response -> {
if (response == null) {
return null;
}
var partitionName = PartitionName.fromIndexOrTemplate(response.getIndex());
var partitionValue = partitionName.values().get(idx);
var source = response.getSource();
Maps.mergeInto(source, pColumn.name(), pColumn.path(), partitionValue);
return ref.valueType().implicitCast(ValueExtractors.fromMap(source, columnIdent));
});
}
}
return forFunction(response -> {
if (response == null) {
return null;
}
return ref.valueType().implicitCast(ValueExtractors.fromMap(response.getSource(), ref.column()));
});
}
}
use of io.crate.execution.engine.collect.CollectExpression in project crate by crate.
the class DocRefResolverTest method testSystemColumnsCollectExpressions.
@Test
public void testSystemColumnsCollectExpressions() throws Exception {
List<Reference> references = List.of(refInfo("t1._id", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.ID), RowGranularity.DOC), refInfo("t1._version", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.VERSION), RowGranularity.DOC), refInfo("t1._doc", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.DOC), RowGranularity.DOC), refInfo("t1._raw", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.RAW), RowGranularity.DOC), refInfo("t1._docid", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.DOCID), RowGranularity.DOC), refInfo("t1._seq_no", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.SEQ_NO), RowGranularity.DOC), refInfo("t1._primary_term", DocSysColumns.COLUMN_IDENTS.get(DocSysColumns.PRIMARY_TERM), RowGranularity.DOC));
List<CollectExpression<Doc, ?>> collectExpressions = new ArrayList<>(4);
for (Reference reference : references) {
CollectExpression<Doc, ?> collectExpression = REF_RESOLVER.getImplementation(reference);
collectExpression.setNextRow(GET_RESULT);
collectExpressions.add(collectExpression);
}
assertThat(collectExpressions.get(0).value(), is("abc"));
assertThat(collectExpressions.get(1).value(), is(1L));
assertThat(collectExpressions.get(2).value(), is(XContentHelper.convertToMap(SOURCE, false, XContentType.JSON).v2()));
assertThat(collectExpressions.get(3).value(), is(SOURCE.utf8ToString()));
assertThat(collectExpressions.get(4).value(), is(2));
assertThat(collectExpressions.get(5).value(), is(1L));
assertThat(collectExpressions.get(6).value(), is(1L));
}
Aggregations