Search in sources :

Example 6 with CollectExpression

use of io.crate.operation.collect.CollectExpression in project crate by crate.

the class RowTransformingBatchIteratorTest method createInputs.

@Before
public void createInputs() throws Exception {
    InputFactory inputFactory = new InputFactory(getFunctions());
    InputFactory.Context<CollectExpression<Row, ?>> ctx = inputFactory.ctxForInputColumns();
    inputs = Collections.singletonList(ctx.add(AddFunction.of(new InputColumn(0), Literal.of(2L))));
    expressions = ctx.expressions();
}
Also used : InputFactory(io.crate.operation.InputFactory) InputColumn(io.crate.analyze.symbol.InputColumn) CollectExpression(io.crate.operation.collect.CollectExpression) Before(org.junit.Before)

Example 7 with CollectExpression

use of io.crate.operation.collect.CollectExpression in project crate by crate.

the class GroupingBytesRefCollectorBenchmark method createGroupByMinBytesRefCollector.

private GroupingCollector createGroupByMinBytesRefCollector(Functions functions) {
    InputCollectExpression keyInput = new InputCollectExpression(0);
    List<Input<?>> keyInputs = Arrays.<Input<?>>asList(keyInput);
    CollectExpression[] collectExpressions = new CollectExpression[] { keyInput };
    FunctionIdent minBytesRefFuncIdent = new FunctionIdent(MinimumAggregation.NAME, Arrays.asList(DataTypes.STRING));
    FunctionInfo minBytesRefFuncInfo = new FunctionInfo(minBytesRefFuncIdent, DataTypes.INTEGER, FunctionInfo.Type.AGGREGATE);
    AggregationFunction minAgg = (AggregationFunction) functions.get(minBytesRefFuncIdent);
    Aggregation aggregation = Aggregation.finalAggregation(minBytesRefFuncInfo, Arrays.asList(new InputColumn(0)), Aggregation.Step.ITER);
    Aggregator[] aggregators = new Aggregator[] { new Aggregator(RAM_ACCOUNTING_CONTEXT, aggregation, minAgg, new Input[] { keyInput }) };
    return GroupingCollector.singleKey(collectExpressions, aggregators, RAM_ACCOUNTING_CONTEXT, keyInputs.get(0), DataTypes.STRING);
}
Also used : AggregationFunction(io.crate.operation.aggregation.AggregationFunction) Aggregation(io.crate.analyze.symbol.Aggregation) MinimumAggregation(io.crate.operation.aggregation.impl.MinimumAggregation) FunctionIdent(io.crate.metadata.FunctionIdent) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) InputColumn(io.crate.analyze.symbol.InputColumn) FunctionInfo(io.crate.metadata.FunctionInfo) Aggregator(io.crate.operation.aggregation.Aggregator) CollectExpression(io.crate.operation.collect.CollectExpression) InputCollectExpression(io.crate.operation.collect.InputCollectExpression)

Example 8 with CollectExpression

use of io.crate.operation.collect.CollectExpression in project crate by crate.

the class SingleRowSource method getCollector.

@Override
public CrateCollector getCollector(CollectPhase phase, BatchConsumer consumer, JobCollectContext jobCollectContext) {
    RoutedCollectPhase collectPhase = (RoutedCollectPhase) phase;
    collectPhase = collectPhase.normalize(clusterNormalizer, null);
    if (collectPhase.whereClause().noMatch()) {
        return RowsCollector.empty(consumer);
    }
    assert !collectPhase.whereClause().hasQuery() : "WhereClause should have been normalized to either MATCH_ALL or NO_MATCH";
    InputFactory inputFactory = new InputFactory(functions);
    InputFactory.Context<CollectExpression<Row, ?>> ctx = inputFactory.ctxForInputColumns(collectPhase.toCollect());
    return RowsCollector.single(new InputRow(ctx.topLevelInputs()), consumer);
}
Also used : InputFactory(io.crate.operation.InputFactory) InputRow(io.crate.operation.InputRow) CollectExpression(io.crate.operation.collect.CollectExpression) RoutedCollectPhase(io.crate.planner.node.dql.RoutedCollectPhase)

Example 9 with CollectExpression

use of io.crate.operation.collect.CollectExpression in project crate by crate.

the class IndexWriterCountBatchIteratorTest method testIndexWriterIterator.

@Test
public void testIndexWriterIterator() throws Exception {
    execute("create table bulk_import (id int primary key) with (number_of_replicas=0)");
    ensureGreen();
    Supplier<BatchIterator> sourceSupplier = () -> RowsBatchIterator.newInstance(RowGenerator.fromSingleColValues(() -> IntStream.range(0, 10).mapToObj(i -> new BytesRef("{\"id\": " + i + "}")).iterator()), 1);
    Supplier<String> indexNameResolver = IndexNameResolver.forTable(new TableIdent(null, "bulk_import"));
    Input<?> sourceInput = new InputCollectExpression(0);
    List<CollectExpression<Row, ?>> collectExpressions = Collections.singletonList((InputCollectExpression) sourceInput);
    RowShardResolver rowShardResolver = getRowShardResolver();
    BulkShardProcessor bulkShardProcessor = getBulkShardProcessor();
    Supplier<ShardUpsertRequest.Item> updateItemSupplier = () -> new ShardUpsertRequest.Item(rowShardResolver.id(), null, new Object[] { sourceInput.value() }, null);
    List<Object[]> expectedResult = Collections.singletonList(new Object[] { 10L });
    BatchIteratorTester tester = new BatchIteratorTester(() -> IndexWriterCountBatchIterator.newIndexInstance(sourceSupplier.get(), indexNameResolver, collectExpressions, rowShardResolver, bulkShardProcessor, updateItemSupplier));
    tester.verifyResultAndEdgeCaseBehaviour(expectedResult);
}
Also used : CrateSettings(io.crate.metadata.settings.CrateSettings) TransportBulkCreateIndicesAction(org.elasticsearch.action.admin.indices.create.TransportBulkCreateIndicesAction) IntStream(java.util.stream.IntStream) Input(io.crate.data.Input) Arrays(java.util.Arrays) BulkShardProcessor(org.elasticsearch.action.bulk.BulkShardProcessor) InputColumn(io.crate.analyze.symbol.InputColumn) BatchIterator(io.crate.data.BatchIterator) BatchIteratorTester(io.crate.testing.BatchIteratorTester) Supplier(java.util.function.Supplier) SQLTransportIntegrationTest(io.crate.integrationtests.SQLTransportIntegrationTest) CollectExpression(io.crate.operation.collect.CollectExpression) Settings(org.elasticsearch.common.settings.Settings) Symbol(io.crate.analyze.symbol.Symbol) ClusterService(org.elasticsearch.cluster.ClusterService) io.crate.metadata(io.crate.metadata) DocSysColumns(io.crate.metadata.doc.DocSysColumns) RowsBatchIterator(io.crate.data.RowsBatchIterator) RowShardResolver(io.crate.operation.collect.RowShardResolver) BytesRef(org.apache.lucene.util.BytesRef) Test(org.junit.Test) UUID(java.util.UUID) RowGenerator(io.crate.testing.RowGenerator) ShardUpsertRequest(io.crate.executor.transport.ShardUpsertRequest) TransportShardUpsertAction(io.crate.executor.transport.TransportShardUpsertAction) List(java.util.List) Row(io.crate.data.Row) DataTypes(io.crate.types.DataTypes) BulkRetryCoordinatorPool(org.elasticsearch.action.bulk.BulkRetryCoordinatorPool) IndexNameExpressionResolver(org.elasticsearch.cluster.metadata.IndexNameExpressionResolver) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) Collections(java.util.Collections) BatchIterator(io.crate.data.BatchIterator) RowsBatchIterator(io.crate.data.RowsBatchIterator) CollectExpression(io.crate.operation.collect.CollectExpression) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) BulkShardProcessor(org.elasticsearch.action.bulk.BulkShardProcessor) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) BatchIteratorTester(io.crate.testing.BatchIteratorTester) BytesRef(org.apache.lucene.util.BytesRef) RowShardResolver(io.crate.operation.collect.RowShardResolver) SQLTransportIntegrationTest(io.crate.integrationtests.SQLTransportIntegrationTest) Test(org.junit.Test)

Example 10 with CollectExpression

use of io.crate.operation.collect.CollectExpression in project crate by crate.

the class IndexWriterProjectorTest method testIndexWriter.

@Test
public void testIndexWriter() throws Throwable {
    execute("create table bulk_import (id int primary key, name string) with (number_of_replicas=0)");
    ensureGreen();
    InputCollectExpression sourceInput = new InputCollectExpression(1);
    List<CollectExpression<Row, ?>> collectExpressions = Collections.<CollectExpression<Row, ?>>singletonList(sourceInput);
    IndexWriterProjector writerProjector = new IndexWriterProjector(internalCluster().getInstance(ClusterService.class), internalCluster().getInstance(Functions.class), new IndexNameExpressionResolver(Settings.EMPTY), Settings.EMPTY, internalCluster().getInstance(TransportBulkCreateIndicesAction.class), internalCluster().getInstance(TransportShardUpsertAction.class)::execute, IndexNameResolver.forTable(new TableIdent(null, "bulk_import")), internalCluster().getInstance(BulkRetryCoordinatorPool.class), new Reference(new ReferenceIdent(bulkImportIdent, DocSysColumns.RAW), RowGranularity.DOC, DataTypes.STRING), Arrays.asList(ID_IDENT), Arrays.<Symbol>asList(new InputColumn(0)), null, null, sourceInput, collectExpressions, 20, null, null, false, false, UUID.randomUUID());
    BatchIterator rowsIterator = RowsBatchIterator.newInstance(IntStream.range(0, 100).mapToObj(i -> new RowN(new Object[] { i, new BytesRef("{\"id\": " + i + ", \"name\": \"Arthur\"}") })).collect(Collectors.toList()), 2);
    TestingBatchConsumer consumer = new TestingBatchConsumer();
    consumer.accept(writerProjector.apply(rowsIterator), null);
    Bucket objects = consumer.getBucket();
    assertThat(objects, contains(isRow(100L)));
    execute("refresh table bulk_import");
    execute("select count(*) from bulk_import");
    assertThat(response.rowCount(), is(1L));
    assertThat(response.rows()[0][0], is(100L));
}
Also used : CollectExpression(io.crate.operation.collect.CollectExpression) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) BulkRetryCoordinatorPool(org.elasticsearch.action.bulk.BulkRetryCoordinatorPool) InputCollectExpression(io.crate.operation.collect.InputCollectExpression) ClusterService(org.elasticsearch.cluster.ClusterService) InputColumn(io.crate.analyze.symbol.InputColumn) TestingBatchConsumer(io.crate.testing.TestingBatchConsumer) IndexNameExpressionResolver(org.elasticsearch.cluster.metadata.IndexNameExpressionResolver) TransportBulkCreateIndicesAction(org.elasticsearch.action.admin.indices.create.TransportBulkCreateIndicesAction) BytesRef(org.apache.lucene.util.BytesRef) SQLTransportIntegrationTest(io.crate.integrationtests.SQLTransportIntegrationTest) Test(org.junit.Test)

Aggregations

CollectExpression (io.crate.operation.collect.CollectExpression)13 InputColumn (io.crate.analyze.symbol.InputColumn)6 Input (io.crate.data.Input)6 Test (org.junit.Test)6 InputFactory (io.crate.operation.InputFactory)5 InputCollectExpression (io.crate.operation.collect.InputCollectExpression)5 Symbol (io.crate.analyze.symbol.Symbol)4 CrateUnitTest (io.crate.test.integration.CrateUnitTest)4 Row (io.crate.data.Row)3 RowN (io.crate.data.RowN)3 AddFunction (io.crate.operation.scalar.arithmetic.AddFunction)3 BytesRef (org.apache.lucene.util.BytesRef)3 TransportBulkCreateIndicesAction (org.elasticsearch.action.admin.indices.create.TransportBulkCreateIndicesAction)3 BulkRetryCoordinatorPool (org.elasticsearch.action.bulk.BulkRetryCoordinatorPool)3 IndexNameExpressionResolver (org.elasticsearch.cluster.metadata.IndexNameExpressionResolver)3 Aggregation (io.crate.analyze.symbol.Aggregation)2 BatchIterator (io.crate.data.BatchIterator)2 RowsBatchIterator (io.crate.data.RowsBatchIterator)2 SQLTransportIntegrationTest (io.crate.integrationtests.SQLTransportIntegrationTest)2 FunctionIdent (io.crate.metadata.FunctionIdent)2