Search in sources :

Example 1 with LegacyFieldType

use of org.apache.solr.legacy.LegacyFieldType in project lucene-solr by apache.

the class TestNumericTerms64 method beforeClass.

@BeforeClass
public static void beforeClass() throws Exception {
    noDocs = atLeast(4096);
    distance = (1L << 60) / noDocs;
    directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)).setMergePolicy(newLogMergePolicy()));
    final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
    storedLong.setStored(true);
    storedLong.freeze();
    final LegacyFieldType storedLong8 = new LegacyFieldType(storedLong);
    storedLong8.setNumericPrecisionStep(8);
    final LegacyFieldType storedLong4 = new LegacyFieldType(storedLong);
    storedLong4.setNumericPrecisionStep(4);
    final LegacyFieldType storedLong6 = new LegacyFieldType(storedLong);
    storedLong6.setNumericPrecisionStep(6);
    final LegacyFieldType storedLong2 = new LegacyFieldType(storedLong);
    storedLong2.setNumericPrecisionStep(2);
    LegacyLongField field8 = new LegacyLongField("field8", 0L, storedLong8), field6 = new LegacyLongField("field6", 0L, storedLong6), field4 = new LegacyLongField("field4", 0L, storedLong4), field2 = new LegacyLongField("field2", 0L, storedLong2);
    Document doc = new Document();
    // add fields, that have a distance to test general functionality
    doc.add(field8);
    doc.add(field6);
    doc.add(field4);
    doc.add(field2);
    // Add a series of noDocs docs with increasing long values, by updating the fields
    for (int l = 0; l < noDocs; l++) {
        long val = distance * l + startOffset;
        field8.setLongValue(val);
        field6.setLongValue(val);
        field4.setLongValue(val);
        field2.setLongValue(val);
        val = l - (noDocs / 2);
        writer.addDocument(doc);
    }
    Map<String, Type> map = new HashMap<>();
    map.put("field2", Type.LEGACY_LONG);
    map.put("field4", Type.LEGACY_LONG);
    map.put("field6", Type.LEGACY_LONG);
    map.put("field8", Type.LEGACY_LONG);
    reader = UninvertingReader.wrap(writer.getReader(), map);
    searcher = newSearcher(reader);
    writer.close();
}
Also used : Type(org.apache.solr.uninverting.UninvertingReader.Type) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) HashMap(java.util.HashMap) LegacyLongField(org.apache.solr.legacy.LegacyLongField) Document(org.apache.lucene.document.Document) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) BeforeClass(org.junit.BeforeClass)

Example 2 with LegacyFieldType

use of org.apache.solr.legacy.LegacyFieldType in project lucene-solr by apache.

the class TestUninvertingReader method testSortedSetIntegerManyValues.

/** Tests {@link Type#SORTED_SET_INTEGER} using Integer based fields, with and w/o precision steps */
public void testSortedSetIntegerManyValues() throws IOException {
    final Directory dir = newDirectory();
    final IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
    final LegacyFieldType NO_TRIE_TYPE = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
    NO_TRIE_TYPE.setNumericPrecisionStep(Integer.MAX_VALUE);
    final Map<String, Type> UNINVERT_MAP = new LinkedHashMap<String, Type>();
    UNINVERT_MAP.put("notrie_single", Type.SORTED_SET_INTEGER);
    UNINVERT_MAP.put("notrie_multi", Type.SORTED_SET_INTEGER);
    UNINVERT_MAP.put("trie_single", Type.SORTED_SET_INTEGER);
    UNINVERT_MAP.put("trie_multi", Type.SORTED_SET_INTEGER);
    final Set<String> MULTI_VALUES = new LinkedHashSet<String>();
    MULTI_VALUES.add("trie_multi");
    MULTI_VALUES.add("notrie_multi");
    final int NUM_DOCS = TestUtil.nextInt(random(), 200, 1500);
    final int MIN = TestUtil.nextInt(random(), 10, 100);
    final int MAX = MIN + TestUtil.nextInt(random(), 10, 100);
    final long EXPECTED_VALSET_SIZE = 1 + MAX - MIN;
    {
        // (at least) one doc should have every value, so that at least one segment has every value
        final Document doc = new Document();
        for (int i = MIN; i <= MAX; i++) {
            doc.add(new LegacyIntField("trie_multi", i, Field.Store.NO));
            doc.add(new LegacyIntField("notrie_multi", i, NO_TRIE_TYPE));
        }
        iw.addDocument(doc);
    }
    // now add some more random docs (note: starting at i=1 because of previously added doc)
    for (int i = 1; i < NUM_DOCS; i++) {
        final Document doc = new Document();
        if (0 != TestUtil.nextInt(random(), 0, 9)) {
            int val = TestUtil.nextInt(random(), MIN, MAX);
            doc.add(new LegacyIntField("trie_single", val, Field.Store.NO));
            doc.add(new LegacyIntField("notrie_single", val, NO_TRIE_TYPE));
        }
        if (0 != TestUtil.nextInt(random(), 0, 9)) {
            int numMulti = atLeast(1);
            while (0 < numMulti--) {
                int val = TestUtil.nextInt(random(), MIN, MAX);
                doc.add(new LegacyIntField("trie_multi", val, Field.Store.NO));
                doc.add(new LegacyIntField("notrie_multi", val, NO_TRIE_TYPE));
            }
        }
        iw.addDocument(doc);
    }
    iw.close();
    final DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), UNINVERT_MAP);
    TestUtil.checkReader(ir);
    final int NUM_LEAVES = ir.leaves().size();
    // check the leaves: no more then total set size
    for (LeafReaderContext rc : ir.leaves()) {
        final LeafReader ar = rc.reader();
        for (String f : UNINVERT_MAP.keySet()) {
            final SortedSetDocValues v = DocValues.getSortedSet(ar, f);
            final long valSetSize = v.getValueCount();
            assertTrue(f + ": Expected no more then " + EXPECTED_VALSET_SIZE + " values per segment, got " + valSetSize + " from: " + ar.toString(), valSetSize <= EXPECTED_VALSET_SIZE);
            if (1 == NUM_LEAVES && MULTI_VALUES.contains(f)) {
                // tighter check on multi fields in single segment index since we know one doc has all of them
                assertEquals(f + ": Single segment LeafReader's value set should have had exactly expected size", EXPECTED_VALSET_SIZE, valSetSize);
            }
        }
    }
    // check the composite of all leaves: exact expectation of set size
    final LeafReader composite = SlowCompositeReaderWrapper.wrap(ir);
    TestUtil.checkReader(composite);
    for (String f : MULTI_VALUES) {
        final SortedSetDocValues v = composite.getSortedSetDocValues(f);
        final long valSetSize = v.getValueCount();
        assertEquals(f + ": Composite reader value set should have had exactly expected size", EXPECTED_VALSET_SIZE, valSetSize);
    }
    ir.close();
    dir.close();
}
Also used : LinkedHashSet(java.util.LinkedHashSet) LeafReader(org.apache.lucene.index.LeafReader) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) DirectoryReader(org.apache.lucene.index.DirectoryReader) Document(org.apache.lucene.document.Document) IntPoint(org.apache.lucene.document.IntPoint) LinkedHashMap(java.util.LinkedHashMap) Type(org.apache.solr.uninverting.UninvertingReader.Type) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) DocValuesType(org.apache.lucene.index.DocValuesType) SortedSetDocValues(org.apache.lucene.index.SortedSetDocValues) IndexWriter(org.apache.lucene.index.IndexWriter) LeafReaderContext(org.apache.lucene.index.LeafReaderContext) Directory(org.apache.lucene.store.Directory) LegacyIntField(org.apache.solr.legacy.LegacyIntField)

Example 3 with LegacyFieldType

use of org.apache.solr.legacy.LegacyFieldType in project lucene-solr by apache.

the class BBoxField method newSpatialStrategy.

@Override
protected BBoxStrategy newSpatialStrategy(String fieldName) {
    //if it's a dynamic field, we register the sub-fields now.
    FieldType numberType = schema.getFieldTypeByName(numberTypeName);
    FieldType booleanType = schema.getFieldTypeByName(booleanTypeName);
    if (schema.isDynamicField(fieldName)) {
        registerSubFields(schema, fieldName, numberType, booleanType);
    }
    //Solr's FieldType ought to expose Lucene FieldType. Instead as a hack we create a Field with a dummy value.
    //dummy temp
    final SchemaField solrNumField = new SchemaField("_", numberType);
    org.apache.lucene.document.FieldType luceneType = (org.apache.lucene.document.FieldType) solrNumField.createField(0.0).fieldType();
    luceneType.setStored(storeSubFields);
    //and annoyingly this Field isn't going to have a docValues format because Solr uses a separate Field for that
    if (solrNumField.hasDocValues()) {
        if (luceneType instanceof LegacyFieldType) {
            luceneType = new LegacyFieldType((LegacyFieldType) luceneType);
        } else {
            luceneType = new org.apache.lucene.document.FieldType(luceneType);
        }
        luceneType.setDocValuesType(DocValuesType.NUMERIC);
    }
    return new BBoxStrategy(ctx, fieldName, luceneType);
}
Also used : BBoxStrategy(org.apache.solr.legacy.BBoxStrategy) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType)

Example 4 with LegacyFieldType

use of org.apache.solr.legacy.LegacyFieldType in project lucene-solr by apache.

the class SpatialPointVectorFieldType method newSpatialStrategy.

@Override
protected PointVectorStrategy newSpatialStrategy(String fieldName) {
    // TODO update to how BBoxField does things
    if (this.getNumberType() != null) {
        // create strategy based on legacy numerics
        // todo remove in 7.0
        LegacyFieldType fieldType = new LegacyFieldType(PointVectorStrategy.LEGACY_FIELDTYPE);
        fieldType.setNumericPrecisionStep(precisionStep);
        return new PointVectorStrategy(ctx, fieldName, fieldType);
    } else {
        return PointVectorStrategy.newInstance(ctx, fieldName);
    }
}
Also used : PointVectorStrategy(org.apache.solr.legacy.PointVectorStrategy) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType)

Example 5 with LegacyFieldType

use of org.apache.solr.legacy.LegacyFieldType in project lucene-solr by apache.

the class TestNumericTerms32 method beforeClass.

@BeforeClass
public static void beforeClass() throws Exception {
    noDocs = atLeast(4096);
    distance = (1 << 30) / noDocs;
    directory = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)).setMergePolicy(newLogMergePolicy()));
    final LegacyFieldType storedInt = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
    storedInt.setStored(true);
    storedInt.freeze();
    final LegacyFieldType storedInt8 = new LegacyFieldType(storedInt);
    storedInt8.setNumericPrecisionStep(8);
    final LegacyFieldType storedInt4 = new LegacyFieldType(storedInt);
    storedInt4.setNumericPrecisionStep(4);
    final LegacyFieldType storedInt2 = new LegacyFieldType(storedInt);
    storedInt2.setNumericPrecisionStep(2);
    LegacyIntField field8 = new LegacyIntField("field8", 0, storedInt8), field4 = new LegacyIntField("field4", 0, storedInt4), field2 = new LegacyIntField("field2", 0, storedInt2);
    Document doc = new Document();
    // add fields, that have a distance to test general functionality
    doc.add(field8);
    doc.add(field4);
    doc.add(field2);
    // Add a series of noDocs docs with increasing int values
    for (int l = 0; l < noDocs; l++) {
        int val = distance * l + startOffset;
        field8.setIntValue(val);
        field4.setIntValue(val);
        field2.setIntValue(val);
        val = l - (noDocs / 2);
        writer.addDocument(doc);
    }
    Map<String, Type> map = new HashMap<>();
    map.put("field2", Type.LEGACY_INTEGER);
    map.put("field4", Type.LEGACY_INTEGER);
    map.put("field8", Type.LEGACY_INTEGER);
    reader = UninvertingReader.wrap(writer.getReader(), map);
    searcher = newSearcher(reader);
    writer.close();
}
Also used : Type(org.apache.solr.uninverting.UninvertingReader.Type) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) LegacyFieldType(org.apache.solr.legacy.LegacyFieldType) HashMap(java.util.HashMap) Document(org.apache.lucene.document.Document) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) LegacyIntField(org.apache.solr.legacy.LegacyIntField) BeforeClass(org.junit.BeforeClass)

Aggregations

LegacyFieldType (org.apache.solr.legacy.LegacyFieldType)7 LegacyIntField (org.apache.solr.legacy.LegacyIntField)4 Document (org.apache.lucene.document.Document)3 Type (org.apache.solr.uninverting.UninvertingReader.Type)3 HashMap (java.util.HashMap)2 MockAnalyzer (org.apache.lucene.analysis.MockAnalyzer)2 RandomIndexWriter (org.apache.lucene.index.RandomIndexWriter)2 SolrException (org.apache.solr.common.SolrException)2 LegacyLongField (org.apache.solr.legacy.LegacyLongField)2 BeforeClass (org.junit.BeforeClass)2 Date (java.util.Date)1 LinkedHashMap (java.util.LinkedHashMap)1 LinkedHashSet (java.util.LinkedHashSet)1 IntPoint (org.apache.lucene.document.IntPoint)1 DirectoryReader (org.apache.lucene.index.DirectoryReader)1 DocValuesType (org.apache.lucene.index.DocValuesType)1 IndexWriter (org.apache.lucene.index.IndexWriter)1 LeafReader (org.apache.lucene.index.LeafReader)1 LeafReaderContext (org.apache.lucene.index.LeafReaderContext)1 SortedSetDocValues (org.apache.lucene.index.SortedSetDocValues)1