use of io.crate.operation.reference.doc.lucene.CollectorContext in project crate by crate.
the class DocLevelExpressionsTest method prepare.
@Before
public void prepare() throws Exception {
Settings settings = Settings.builder().put("index.fielddata.cache", "none").build();
IndexService indexService = createIndex("test", settings);
ifd = indexService.fieldData();
writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()));
insertValues(writer);
DirectoryReader directoryReader = DirectoryReader.open(writer, true);
readerContext = directoryReader.leaves().get(0);
ctx = new CollectorContext(ifd, null);
}
use of io.crate.operation.reference.doc.lucene.CollectorContext in project crate by crate.
the class OrderedLuceneBatchIteratorBenchmark method createLuceneBatchIterator.
@Setup
public void createLuceneBatchIterator() throws Exception {
IndexWriter iw = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new StandardAnalyzer()));
dummyShardId = new ShardId("dummy", 1);
columnName = "x";
for (int i = 0; i < 10_000_000; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField(columnName, i));
iw.addDocument(doc);
}
iw.commit();
iw.forceMerge(1, true);
indexSearcher = new IndexSearcher(DirectoryReader.open(iw, true));
collectorContext = new CollectorContext(mock(IndexFieldDataService.class), new CollectorFieldsVisitor(0));
fieldTypeLookup = column -> {
IntegerFieldMapper.IntegerFieldType integerFieldType = new IntegerFieldMapper.IntegerFieldType();
integerFieldType.setNames(new MappedFieldType.Names(column));
return integerFieldType;
};
reference = new Reference(new ReferenceIdent(new TableIdent(null, "dummyTable"), columnName), RowGranularity.DOC, DataTypes.INTEGER);
orderBy = new OrderBy(Collections.singletonList(reference), reverseFlags, nullsFirst);
}
use of io.crate.operation.reference.doc.lucene.CollectorContext in project crate by crate.
the class OrderedLuceneBatchIteratorFactoryTest method createOrderedCollector.
private LuceneOrderedDocCollector createOrderedCollector(IndexSearcher searcher, int shardId) {
CollectorContext collectorContext = new CollectorContext(mock(IndexFieldDataService.class), new CollectorFieldsVisitor(0));
List<LuceneCollectorExpression<?>> expressions = Collections.singletonList(new OrderByCollectorExpression(reference, orderBy));
return new LuceneOrderedDocCollector(new ShardId("dummy", shardId), searcher, new MatchAllDocsQuery(), null, false, // batchSize < 10 to have at least one searchMore call.
5, fieldTypeLookup, collectorContext, orderBy, new Sort(new SortedNumericSortField(columnName, SortField.Type.LONG, reverseFlags[0])), expressions, expressions);
}
use of io.crate.operation.reference.doc.lucene.CollectorContext in project crate by crate.
the class LuceneBatchIteratorTest method testLuceneBatchIterator.
@Test
public void testLuceneBatchIterator() throws Exception {
BatchIteratorTester tester = new BatchIteratorTester(() -> {
return new LuceneBatchIterator(indexSearcher, new MatchAllDocsQuery(), null, false, new CollectorContext(mock(IndexFieldDataService.class), new CollectorFieldsVisitor(0)), new RamAccountingContext("dummy", new NoopCircuitBreaker("dummy")), columnRefs, columnRefs);
});
tester.verifyResultAndEdgeCaseBehaviour(expectedResult);
}
use of io.crate.operation.reference.doc.lucene.CollectorContext in project crate by crate.
the class LuceneShardCollectorProvider method getOrderedCollector.
@Override
public OrderedDocCollector getOrderedCollector(RoutedCollectPhase phase, SharedShardContext sharedShardContext, JobCollectContext jobCollectContext, boolean requiresRepeat) {
RoutedCollectPhase collectPhase = phase.normalize(shardNormalizer, null);
CollectorContext collectorContext;
InputFactory.Context<? extends LuceneCollectorExpression<?>> ctx;
Engine.Searcher searcher = null;
LuceneQueryBuilder.Context queryContext;
try {
searcher = sharedShardContext.acquireSearcher();
IndexService indexService = sharedShardContext.indexService();
queryContext = luceneQueryBuilder.convert(collectPhase.whereClause(), indexService.mapperService(), indexService.fieldData(), indexService.cache());
jobCollectContext.addSearcher(sharedShardContext.readerId(), searcher);
ctx = docInputFactory.extractImplementations(collectPhase);
collectorContext = getCollectorContext(sharedShardContext.readerId(), ctx);
} catch (Throwable t) {
if (searcher != null) {
searcher.close();
}
throw t;
}
int batchSize = collectPhase.shardQueueSize(localNodeId);
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("[{}][{}] creating LuceneOrderedDocCollector. Expected number of rows to be collected: {}", sharedShardContext.indexShard().routingEntry().currentNodeId(), sharedShardContext.indexShard().shardId(), batchSize);
}
return new LuceneOrderedDocCollector(indexShard.shardId(), searcher.searcher(), queryContext.query(), queryContext.minScore(), Symbols.containsColumn(collectPhase.toCollect(), DocSysColumns.SCORE), batchSize, fieldTypeLookup, collectorContext, collectPhase.orderBy(), LuceneSortGenerator.generateLuceneSort(collectorContext, collectPhase.orderBy(), docInputFactory, fieldTypeLookup), ctx.topLevelInputs(), ctx.expressions());
}
Aggregations