use of io.crate.operation.collect.CollectExpression in project crate by crate.
the class RowTransformingBatchIteratorTest method createInputs.
@Before
public void createInputs() throws Exception {
InputFactory inputFactory = new InputFactory(getFunctions());
InputFactory.Context<CollectExpression<Row, ?>> ctx = inputFactory.ctxForInputColumns();
inputs = Collections.singletonList(ctx.add(AddFunction.of(new InputColumn(0), Literal.of(2L))));
expressions = ctx.expressions();
}
use of io.crate.operation.collect.CollectExpression in project crate by crate.
the class GroupingBytesRefCollectorBenchmark method createGroupByMinBytesRefCollector.
private GroupingCollector createGroupByMinBytesRefCollector(Functions functions) {
InputCollectExpression keyInput = new InputCollectExpression(0);
List<Input<?>> keyInputs = Arrays.<Input<?>>asList(keyInput);
CollectExpression[] collectExpressions = new CollectExpression[] { keyInput };
FunctionIdent minBytesRefFuncIdent = new FunctionIdent(MinimumAggregation.NAME, Arrays.asList(DataTypes.STRING));
FunctionInfo minBytesRefFuncInfo = new FunctionInfo(minBytesRefFuncIdent, DataTypes.INTEGER, FunctionInfo.Type.AGGREGATE);
AggregationFunction minAgg = (AggregationFunction) functions.get(minBytesRefFuncIdent);
Aggregation aggregation = Aggregation.finalAggregation(minBytesRefFuncInfo, Arrays.asList(new InputColumn(0)), Aggregation.Step.ITER);
Aggregator[] aggregators = new Aggregator[] { new Aggregator(RAM_ACCOUNTING_CONTEXT, aggregation, minAgg, new Input[] { keyInput }) };
return GroupingCollector.singleKey(collectExpressions, aggregators, RAM_ACCOUNTING_CONTEXT, keyInputs.get(0), DataTypes.STRING);
}
use of io.crate.operation.collect.CollectExpression in project crate by crate.
the class SingleRowSource method getCollector.
@Override
public CrateCollector getCollector(CollectPhase phase, BatchConsumer consumer, JobCollectContext jobCollectContext) {
RoutedCollectPhase collectPhase = (RoutedCollectPhase) phase;
collectPhase = collectPhase.normalize(clusterNormalizer, null);
if (collectPhase.whereClause().noMatch()) {
return RowsCollector.empty(consumer);
}
assert !collectPhase.whereClause().hasQuery() : "WhereClause should have been normalized to either MATCH_ALL or NO_MATCH";
InputFactory inputFactory = new InputFactory(functions);
InputFactory.Context<CollectExpression<Row, ?>> ctx = inputFactory.ctxForInputColumns(collectPhase.toCollect());
return RowsCollector.single(new InputRow(ctx.topLevelInputs()), consumer);
}
use of io.crate.operation.collect.CollectExpression in project crate by crate.
the class IndexWriterCountBatchIteratorTest method testIndexWriterIterator.
@Test
public void testIndexWriterIterator() throws Exception {
execute("create table bulk_import (id int primary key) with (number_of_replicas=0)");
ensureGreen();
Supplier<BatchIterator> sourceSupplier = () -> RowsBatchIterator.newInstance(RowGenerator.fromSingleColValues(() -> IntStream.range(0, 10).mapToObj(i -> new BytesRef("{\"id\": " + i + "}")).iterator()), 1);
Supplier<String> indexNameResolver = IndexNameResolver.forTable(new TableIdent(null, "bulk_import"));
Input<?> sourceInput = new InputCollectExpression(0);
List<CollectExpression<Row, ?>> collectExpressions = Collections.singletonList((InputCollectExpression) sourceInput);
RowShardResolver rowShardResolver = getRowShardResolver();
BulkShardProcessor bulkShardProcessor = getBulkShardProcessor();
Supplier<ShardUpsertRequest.Item> updateItemSupplier = () -> new ShardUpsertRequest.Item(rowShardResolver.id(), null, new Object[] { sourceInput.value() }, null);
List<Object[]> expectedResult = Collections.singletonList(new Object[] { 10L });
BatchIteratorTester tester = new BatchIteratorTester(() -> IndexWriterCountBatchIterator.newIndexInstance(sourceSupplier.get(), indexNameResolver, collectExpressions, rowShardResolver, bulkShardProcessor, updateItemSupplier));
tester.verifyResultAndEdgeCaseBehaviour(expectedResult);
}
use of io.crate.operation.collect.CollectExpression in project crate by crate.
the class IndexWriterProjectorTest method testIndexWriter.
@Test
public void testIndexWriter() throws Throwable {
execute("create table bulk_import (id int primary key, name string) with (number_of_replicas=0)");
ensureGreen();
InputCollectExpression sourceInput = new InputCollectExpression(1);
List<CollectExpression<Row, ?>> collectExpressions = Collections.<CollectExpression<Row, ?>>singletonList(sourceInput);
IndexWriterProjector writerProjector = new IndexWriterProjector(internalCluster().getInstance(ClusterService.class), internalCluster().getInstance(Functions.class), new IndexNameExpressionResolver(Settings.EMPTY), Settings.EMPTY, internalCluster().getInstance(TransportBulkCreateIndicesAction.class), internalCluster().getInstance(TransportShardUpsertAction.class)::execute, IndexNameResolver.forTable(new TableIdent(null, "bulk_import")), internalCluster().getInstance(BulkRetryCoordinatorPool.class), new Reference(new ReferenceIdent(bulkImportIdent, DocSysColumns.RAW), RowGranularity.DOC, DataTypes.STRING), Arrays.asList(ID_IDENT), Arrays.<Symbol>asList(new InputColumn(0)), null, null, sourceInput, collectExpressions, 20, null, null, false, false, UUID.randomUUID());
BatchIterator rowsIterator = RowsBatchIterator.newInstance(IntStream.range(0, 100).mapToObj(i -> new RowN(new Object[] { i, new BytesRef("{\"id\": " + i + ", \"name\": \"Arthur\"}") })).collect(Collectors.toList()), 2);
TestingBatchConsumer consumer = new TestingBatchConsumer();
consumer.accept(writerProjector.apply(rowsIterator), null);
Bucket objects = consumer.getBucket();
assertThat(objects, contains(isRow(100L)));
execute("refresh table bulk_import");
execute("select count(*) from bulk_import");
assertThat(response.rowCount(), is(1L));
assertThat(response.rows()[0][0], is(100L));
}
Aggregations