Search in sources :

Example 36 with SolrInputDocument

use of org.apache.solr.common.SolrInputDocument in project lucene-solr by apache.

the class MetricUtils method toSolrInputDocuments.

/**
   * Provides a representation of the given metric registry as {@link SolrInputDocument}-s.
   Only those metrics
   * are converted which match at least one of the given MetricFilter instances.
   *
   * @param registry      the {@link MetricRegistry} to be converted
   * @param shouldMatchFilters a list of {@link MetricFilter} instances.
   *                           A metric must match <em>any one</em> of the filters from this list to be
   *                           included in the output
   * @param mustMatchFilter a {@link MetricFilter}.
   *                        A metric <em>must</em> match this filter to be included in the output.
   * @param propertyFilter limit what properties of a metric are returned
   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
   * @param compact use compact representation for counters and gauges.
   * @param metadata optional metadata. If not null and not empty then this map will be added under a
   *                 {@code _metadata_} key.
   * @param consumer consumer that accepts produced {@link SolrInputDocument}-s
   */
public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters, MetricFilter mustMatchFilter, PropertyFilter propertyFilter, boolean skipHistograms, boolean skipAggregateValues, boolean compact, Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
    boolean addMetadata = metadata != null && !metadata.isEmpty();
    toMaps(registry, shouldMatchFilters, mustMatchFilter, propertyFilter, skipHistograms, skipAggregateValues, compact, false, (k, v) -> {
        SolrInputDocument doc = new SolrInputDocument();
        doc.setField(METRIC_NAME, k);
        toSolrInputDocument(null, doc, v);
        if (addMetadata) {
            toSolrInputDocument(null, doc, metadata);
        }
        consumer.accept(doc);
    });
}
Also used : SolrInputDocument(org.apache.solr.common.SolrInputDocument)

Example 37 with SolrInputDocument

use of org.apache.solr.common.SolrInputDocument in project lucene-solr by apache.

the class TestHighlightDedupGrouping method addDoc.

private void addDoc(int docid, int group, int shard) throws IOException, SolrServerException {
    SolrInputDocument doc = new SolrInputDocument();
    doc.addField(id, docid);
    // string copy of the id for highlighting
    doc.addField(id_s1, docid);
    doc.addField(group_ti1, group);
    doc.addField(shard_i1, shard);
    clients.get(shard).add(doc);
}
Also used : SolrInputDocument(org.apache.solr.common.SolrInputDocument)

Example 38 with SolrInputDocument

use of org.apache.solr.common.SolrInputDocument in project lucene-solr by apache.

the class CursorPagingTest method buildRandomDocument.

/**
   * Creates a document with randomized field values, some of which be missing values, 
   * and some of which will be skewed so that small subsets of the ranges will be 
   * more common (resulting in an increased likelihood of duplicate values)
   * 
   * @see #buildRandomQuery
   */
public static SolrInputDocument buildRandomDocument(int id) {
    SolrInputDocument doc = sdoc("id", id);
    // (hopefully with lots of duplication)
    if (useField()) {
        doc.addField("int", skewed(random().nextInt(), TestUtil.nextInt(random(), 20, 50)));
    }
    if (useField()) {
        doc.addField("long", skewed(random().nextLong(), TestUtil.nextInt(random(), 5000, 5100)));
    }
    if (useField()) {
        doc.addField("float", skewed(random().nextFloat() * random().nextInt(), 1.0F / random().nextInt(23)));
    }
    if (useField()) {
        doc.addField("double", skewed(random().nextDouble() * random().nextInt(), 1.0D / random().nextInt(37)));
    }
    if (useField()) {
        doc.addField("str", skewed(randomXmlUsableUnicodeString(), TestUtil.randomSimpleString(random(), 1, 1)));
    }
    if (useField()) {
        int numBytes = (int) skewed(TestUtil.nextInt(random(), 20, 50), 2);
        byte[] randBytes = new byte[numBytes];
        random().nextBytes(randBytes);
        doc.addField("bin", ByteBuffer.wrap(randBytes));
    }
    if (useField()) {
        doc.addField("date", skewed(randomDate(), randomSkewedDate()));
    }
    if (useField()) {
        doc.addField("uuid", UUID.randomUUID().toString());
    }
    if (useField()) {
        doc.addField("currency", skewed("" + (random().nextInt() / 100.) + "," + randomCurrency(), "" + TestUtil.nextInt(random(), 250, 320) + ",USD"));
    }
    if (useField()) {
        doc.addField("bool", random().nextBoolean() ? "t" : "f");
    }
    if (useField()) {
        doc.addField("enum", randomEnumValue());
    }
    return doc;
}
Also used : SolrInputDocument(org.apache.solr.common.SolrInputDocument)

Example 39 with SolrInputDocument

use of org.apache.solr.common.SolrInputDocument in project lucene-solr by apache.

the class TestChildDocTransformer method createIndex.

private static void createIndex(String[] titleVals) {
    String[] parentIDS = new String[] { "1", "4" };
    String[] childDocIDS = new String[] { "2", "5" };
    String[] grandChildIDS = new String[] { "3", "6" };
    for (int i = 0; i < parentIDS.length; i++) {
        SolrInputDocument parentDocument = new SolrInputDocument();
        parentDocument.addField(ID_FIELD, parentIDS[i]);
        parentDocument.addField("subject", "parentDocument");
        parentDocument.addField("title", titleVals[i]);
        SolrInputDocument childDocument = new SolrInputDocument();
        childDocument.addField(ID_FIELD, childDocIDS[i]);
        childDocument.addField("cat", "childDocument");
        childDocument.addField("title", titleVals[i]);
        SolrInputDocument grandChildDocument = new SolrInputDocument();
        grandChildDocument.addField(ID_FIELD, grandChildIDS[i]);
        childDocument.addChildDocument(grandChildDocument);
        parentDocument.addChildDocument(childDocument);
        try {
            Long version = addAndGetVersion(parentDocument, null);
            assertNotNull(version);
        } catch (Exception e) {
            fail("Failed to add document to the index");
        }
        if (random().nextBoolean()) {
            assertU(commit());
        }
    }
    assertU(commit());
    assertQ(req("q", "*:*"), "//*[@numFound='" + (parentIDS.length + childDocIDS.length + grandChildIDS.length) + "']");
}
Also used : SolrInputDocument(org.apache.solr.common.SolrInputDocument)

Example 40 with SolrInputDocument

use of org.apache.solr.common.SolrInputDocument in project lucene-solr by apache.

the class TestRealTimeGet method testStressGetRealtime.

/***
    @Test
    public void testGetRealtime() throws Exception {
      SolrQueryRequest sr1 = req("q","foo");
      IndexReader r1 = sr1.getCore().getRealtimeReader();

      assertU(adoc("id","1"));

      IndexReader r2 = sr1.getCore().getRealtimeReader();
      assertNotSame(r1, r2);
      int refcount = r2.getRefCount();

      // make sure a new reader wasn't opened
      IndexReader r3 = sr1.getCore().getRealtimeReader();
      assertSame(r2, r3);
      assertEquals(refcount+1, r3.getRefCount());

      assertU(commit());

      // this is not critical, but currently a commit does not refresh the reader
      // if nothing has changed
      IndexReader r4 = sr1.getCore().getRealtimeReader();
      assertEquals(refcount+2, r4.getRefCount());


      r1.decRef();
      r2.decRef();
      r3.decRef();
      r4.decRef();
      sr1.close();
    }
    ***/
@Test
public void testStressGetRealtime() throws Exception {
    clearIndex();
    assertU(commit());
    // req().getCore().getUpdateHandler().getIndexWriterProvider().getIndexWriter(req().getCore()).setInfoStream(System.out);
    final int commitPercent = 5 + random().nextInt(20);
    // what percent of the commits are soft
    final int softCommitPercent = 30 + random().nextInt(75);
    final int deletePercent = 4 + random().nextInt(25);
    final int deleteByQueryPercent = 1 + random().nextInt(5);
    // percent change that an update uses optimistic locking
    final int optimisticPercent = 1 + random().nextInt(50);
    // percent change that a version specified will be correct
    final int optimisticCorrectPercent = 25 + random().nextInt(70);
    // percent of time that a get will be filtered... we normally don't want too high.
    final int filteredGetPercent = random().nextInt(random().nextInt(20) + 1);
    final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
    int nWriteThreads = 5 + random().nextInt(25);
    // number of committers at a time...
    final int maxConcurrentCommits = nWriteThreads;
    // query variables
    final int percentRealtimeQuery = 60;
    // number of query operations to perform in total
    final AtomicLong operations = new AtomicLong(50000);
    int nReadThreads = 5 + random().nextInt(25);
    verbose("commitPercent=", commitPercent);
    verbose("softCommitPercent=", softCommitPercent);
    verbose("deletePercent=", deletePercent);
    verbose("deleteByQueryPercent=", deleteByQueryPercent);
    verbose("ndocs=", ndocs);
    verbose("nWriteThreads=", nWriteThreads);
    verbose("nReadThreads=", nReadThreads);
    verbose("percentRealtimeQuery=", percentRealtimeQuery);
    verbose("maxConcurrentCommits=", maxConcurrentCommits);
    verbose("operations=", operations);
    initModel(ndocs);
    final AtomicInteger numCommitting = new AtomicInteger();
    List<Thread> threads = new ArrayList<>();
    for (int i = 0; i < nWriteThreads; i++) {
        Thread thread = new Thread("WRITER" + i) {

            Random rand = new Random(random().nextInt());

            @Override
            public void run() {
                try {
                    while (operations.get() > 0) {
                        int oper = rand.nextInt(100);
                        if (oper < commitPercent) {
                            if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
                                Map<Integer, DocInfo> newCommittedModel;
                                long version;
                                synchronized (TestRealTimeGet.this) {
                                    // take a snapshot
                                    newCommittedModel = new HashMap<>(model);
                                    version = snapshotCount++;
                                    verbose("took snapshot version=", version);
                                }
                                if (rand.nextInt(100) < softCommitPercent) {
                                    verbose("softCommit start");
                                    assertU(TestHarness.commit("softCommit", "true"));
                                    verbose("softCommit end");
                                } else {
                                    verbose("hardCommit start");
                                    assertU(commit());
                                    verbose("hardCommit end");
                                }
                                synchronized (TestRealTimeGet.this) {
                                    // install this model snapshot only if it's newer than the current one
                                    if (version >= committedModelClock) {
                                        if (VERBOSE) {
                                            verbose("installing new committedModel version=" + committedModelClock);
                                        }
                                        committedModel = newCommittedModel;
                                        committedModelClock = version;
                                    }
                                }
                            }
                            numCommitting.decrementAndGet();
                            continue;
                        }
                        int id = rand.nextInt(ndocs);
                        Object sync = syncArr[id];
                        // set the lastId before we actually change it sometimes to try and
                        // uncover more race conditions between writing and reading
                        boolean before = rand.nextBoolean();
                        if (before) {
                            lastId = id;
                        }
                        // Even with versions, we can't remove the sync because increasing versions does not mean increasing vals.
                        synchronized (sync) {
                            DocInfo info = model.get(id);
                            long val = info.val;
                            long nextVal = Math.abs(val) + 1;
                            if (oper < commitPercent + deletePercent) {
                                boolean opt = rand.nextInt() < optimisticPercent;
                                boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
                                long badVersion = correct ? 0 : badVersion(rand, info.version);
                                if (VERBOSE) {
                                    if (!opt) {
                                        verbose("deleting id", id, "val=", nextVal);
                                    } else {
                                        verbose("deleting id", id, "val=", nextVal, "existing_version=", info.version, (correct ? "" : (" bad_version=" + badVersion)));
                                    }
                                }
                                // assertU("<delete><id>" + id + "</id></delete>");
                                Long version = null;
                                if (opt) {
                                    if (correct) {
                                        version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)));
                                    } else {
                                        try {
                                            version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(badVersion)));
                                            fail();
                                        } catch (SolrException se) {
                                            assertEquals(409, se.code());
                                        }
                                    }
                                } else {
                                    version = deleteAndGetVersion(Integer.toString(id), null);
                                }
                                if (version != null) {
                                    model.put(id, new DocInfo(version, -nextVal));
                                }
                                if (VERBOSE) {
                                    verbose("deleting id", id, "val=", nextVal, "DONE");
                                }
                            } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
                                if (VERBOSE) {
                                    verbose("deleteByQuery id ", id, "val=", nextVal);
                                }
                                assertU("<delete><query>id:" + id + "</query></delete>");
                                model.put(id, new DocInfo(-1L, -nextVal));
                                if (VERBOSE) {
                                    verbose("deleteByQuery id", id, "val=", nextVal, "DONE");
                                }
                            } else {
                                boolean opt = rand.nextInt() < optimisticPercent;
                                boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
                                long badVersion = correct ? 0 : badVersion(rand, info.version);
                                if (VERBOSE) {
                                    if (!opt) {
                                        verbose("adding id", id, "val=", nextVal);
                                    } else {
                                        verbose("adding id", id, "val=", nextVal, "existing_version=", info.version, (correct ? "" : (" bad_version=" + badVersion)));
                                    }
                                }
                                Long version = null;
                                SolrInputDocument sd = sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal));
                                if (opt) {
                                    if (correct) {
                                        version = addAndGetVersion(sd, params("_version_", Long.toString(info.version)));
                                    } else {
                                        try {
                                            version = addAndGetVersion(sd, params("_version_", Long.toString(badVersion)));
                                            fail();
                                        } catch (SolrException se) {
                                            assertEquals(409, se.code());
                                        }
                                    }
                                } else {
                                    version = addAndGetVersion(sd, null);
                                }
                                if (version != null) {
                                    model.put(id, new DocInfo(version, nextVal));
                                }
                                if (VERBOSE) {
                                    verbose("adding id", id, "val=", nextVal, "DONE");
                                }
                            }
                        }
                        if (!before) {
                            lastId = id;
                        }
                    }
                } catch (Throwable e) {
                    operations.set(-1L);
                    throw new RuntimeException(e);
                }
            }
        };
        threads.add(thread);
    }
    for (int i = 0; i < nReadThreads; i++) {
        Thread thread = new Thread("READER" + i) {

            Random rand = new Random(random().nextInt());

            @Override
            public void run() {
                try {
                    while (operations.decrementAndGet() >= 0) {
                        // bias toward a recently changed doc
                        int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
                        // when indexing, we update the index, then the model
                        // so when querying, we should first check the model, and then the index
                        boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
                        DocInfo info;
                        if (realTime) {
                            info = model.get(id);
                        } else {
                            synchronized (TestRealTimeGet.this) {
                                info = committedModel.get(id);
                            }
                        }
                        if (VERBOSE) {
                            verbose("querying id", id);
                        }
                        boolean filteredOut = false;
                        SolrQueryRequest sreq;
                        if (realTime) {
                            ModifiableSolrParams p = params("wt", "json", "qt", "/get", "ids", Integer.toString(id));
                            if (rand.nextInt(100) < filteredGetPercent) {
                                int idToFilter = rand.nextBoolean() ? id : rand.nextInt(ndocs);
                                filteredOut = idToFilter != id;
                                p.add("fq", "id:" + idToFilter);
                            }
                            sreq = req(p);
                        } else {
                            sreq = req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true");
                        }
                        String response = h.query(sreq);
                        Map rsp = (Map) ObjectBuilder.fromJSON(response);
                        List doclist = (List) (((Map) rsp.get("response")).get("docs"));
                        if (doclist.size() == 0) {
                        // there's no info we can get back with a delete, so not much we can check without further synchronization
                        // This is also correct when filteredOut==true
                        } else {
                            assertEquals(1, doclist.size());
                            long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD));
                            long foundVer = (Long) (((Map) doclist.get(0)).get("_version_"));
                            if (filteredOut || foundVal < Math.abs(info.val) || (foundVer == info.version && foundVal != info.val)) {
                                // if the version matches, the val must
                                verbose("ERROR, id=", id, "found=", response, "model", info);
                                assertTrue(false);
                            }
                        }
                    }
                } catch (Throwable e) {
                    operations.set(-1L);
                    throw new RuntimeException(e);
                }
            }
        };
        threads.add(thread);
    }
    for (Thread thread : threads) {
        thread.start();
    }
    for (Thread thread : threads) {
        thread.join();
    }
}
Also used : ArrayList(java.util.ArrayList) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicLong(java.util.concurrent.atomic.AtomicLong) SolrInputDocument(org.apache.solr.common.SolrInputDocument) SolrQueryRequest(org.apache.solr.request.SolrQueryRequest) Random(java.util.Random) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicLong(java.util.concurrent.atomic.AtomicLong) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) SolrException(org.apache.solr.common.SolrException) Test(org.junit.Test)

Aggregations

SolrInputDocument (org.apache.solr.common.SolrInputDocument)520 Test (org.junit.Test)166 ArrayList (java.util.ArrayList)98 UpdateRequest (org.apache.solr.client.solrj.request.UpdateRequest)76 QueryResponse (org.apache.solr.client.solrj.response.QueryResponse)69 HttpSolrClient (org.apache.solr.client.solrj.impl.HttpSolrClient)63 ModifiableSolrParams (org.apache.solr.common.params.ModifiableSolrParams)55 SolrQuery (org.apache.solr.client.solrj.SolrQuery)54 IOException (java.io.IOException)47 SolrException (org.apache.solr.common.SolrException)47 IndexSchema (org.apache.solr.schema.IndexSchema)41 AddUpdateCommand (org.apache.solr.update.AddUpdateCommand)41 HashMap (java.util.HashMap)39 SolrQueryRequest (org.apache.solr.request.SolrQueryRequest)34 List (java.util.List)33 SolrServerException (org.apache.solr.client.solrj.SolrServerException)32 SolrDocument (org.apache.solr.common.SolrDocument)31 NamedList (org.apache.solr.common.util.NamedList)31 Map (java.util.Map)30 CloudSolrClient (org.apache.solr.client.solrj.impl.CloudSolrClient)29