Search in sources :

Example 11 with StreamFactory

use of org.apache.solr.client.solrj.io.stream.expr.StreamFactory in project lucene-solr by apache.

the class StreamExpressionTest method testStatsStream.

@Test
public void testStatsStream() throws Exception {
    new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1").add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4").add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5").add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6").add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7").add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8").add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9").add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
    StreamFactory factory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()).withFunctionName("stats", StatsStream.class).withFunctionName("sum", SumMetric.class).withFunctionName("min", MinMetric.class).withFunctionName("max", MaxMetric.class).withFunctionName("avg", MeanMetric.class).withFunctionName("count", CountMetric.class);
    StreamExpression expression;
    TupleStream stream;
    List<Tuple> tuples;
    StreamContext streamContext = new StreamContext();
    SolrClientCache cache = new SolrClientCache();
    try {
        streamContext.setSolrClientCache(cache);
        String expr = "stats(" + COLLECTIONORALIAS + ", q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))";
        expression = StreamExpressionParser.parse(expr);
        stream = factory.constructStream(expression);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 1);
        //Test Long and Double Sums
        Tuple tuple = tuples.get(0);
        Double sumi = tuple.getDouble("sum(a_i)");
        Double sumf = tuple.getDouble("sum(a_f)");
        Double mini = tuple.getDouble("min(a_i)");
        Double minf = tuple.getDouble("min(a_f)");
        Double maxi = tuple.getDouble("max(a_i)");
        Double maxf = tuple.getDouble("max(a_f)");
        Double avgi = tuple.getDouble("avg(a_i)");
        Double avgf = tuple.getDouble("avg(a_f)");
        Double count = tuple.getDouble("count(*)");
        assertTrue(sumi.longValue() == 70);
        assertTrue(sumf.doubleValue() == 55.0D);
        assertTrue(mini.doubleValue() == 0.0D);
        assertTrue(minf.doubleValue() == 1.0D);
        assertTrue(maxi.doubleValue() == 14.0D);
        assertTrue(maxf.doubleValue() == 10.0D);
        assertTrue(avgi.doubleValue() == 7.0D);
        assertTrue(avgf.doubleValue() == 5.5D);
        assertTrue(count.doubleValue() == 10);
        //Test with shards parameter
        List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
        expr = "stats(myCollection, q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))";
        Map<String, List<String>> shardsMap = new HashMap();
        shardsMap.put("myCollection", shardUrls);
        StreamContext context = new StreamContext();
        context.put("shards", shardsMap);
        context.setSolrClientCache(cache);
        stream = factory.constructStream(expr);
        stream.setStreamContext(context);
        tuples = getTuples(stream);
        assert (tuples.size() == 1);
        //Test Long and Double Sums
        tuple = tuples.get(0);
        sumi = tuple.getDouble("sum(a_i)");
        sumf = tuple.getDouble("sum(a_f)");
        mini = tuple.getDouble("min(a_i)");
        minf = tuple.getDouble("min(a_f)");
        maxi = tuple.getDouble("max(a_i)");
        maxf = tuple.getDouble("max(a_f)");
        avgi = tuple.getDouble("avg(a_i)");
        avgf = tuple.getDouble("avg(a_f)");
        count = tuple.getDouble("count(*)");
        assertTrue(sumi.longValue() == 70);
        assertTrue(sumf.doubleValue() == 55.0D);
        assertTrue(mini.doubleValue() == 0.0D);
        assertTrue(minf.doubleValue() == 1.0D);
        assertTrue(maxi.doubleValue() == 14.0D);
        assertTrue(maxf.doubleValue() == 10.0D);
        assertTrue(avgi.doubleValue() == 7.0D);
        assertTrue(avgf.doubleValue() == 5.5D);
        assertTrue(count.doubleValue() == 10);
        //Execersise the /stream hander
        //Add the shards http parameter for the myCollection
        StringBuilder buf = new StringBuilder();
        for (String shardUrl : shardUrls) {
            if (buf.length() > 0) {
                buf.append(",");
            }
            buf.append(shardUrl);
        }
        ModifiableSolrParams solrParams = new ModifiableSolrParams();
        solrParams.add("qt", "/stream");
        solrParams.add("expr", expr);
        solrParams.add("myCollection.shards", buf.toString());
        SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
        tuples = getTuples(solrStream);
        assert (tuples.size() == 1);
        tuple = tuples.get(0);
        sumi = tuple.getDouble("sum(a_i)");
        sumf = tuple.getDouble("sum(a_f)");
        mini = tuple.getDouble("min(a_i)");
        minf = tuple.getDouble("min(a_f)");
        maxi = tuple.getDouble("max(a_i)");
        maxf = tuple.getDouble("max(a_f)");
        avgi = tuple.getDouble("avg(a_i)");
        avgf = tuple.getDouble("avg(a_f)");
        count = tuple.getDouble("count(*)");
        assertTrue(sumi.longValue() == 70);
        assertTrue(sumf.doubleValue() == 55.0D);
        assertTrue(mini.doubleValue() == 0.0D);
        assertTrue(minf.doubleValue() == 1.0D);
        assertTrue(maxi.doubleValue() == 14.0D);
        assertTrue(maxf.doubleValue() == 10.0D);
        assertTrue(avgi.doubleValue() == 7.0D);
        assertTrue(avgf.doubleValue() == 5.5D);
        assertTrue(count.doubleValue() == 10);
        try {
            ModifiableSolrParams solrParamsBad = new ModifiableSolrParams();
            solrParamsBad.add("qt", "/stream");
            solrParamsBad.add("expr", expr);
            solrStream = new SolrStream(shardUrls.get(0), solrParamsBad);
            tuples = getTuples(solrStream);
            throw new Exception("Exception should have been thrown above");
        } catch (IOException e) {
            assertTrue(e.getMessage().contains("Collection not found: myCollection"));
        }
    } finally {
        cache.close();
    }
}
Also used : UpdateRequest(org.apache.solr.client.solrj.request.UpdateRequest) HashMap(java.util.HashMap) MinMetric(org.apache.solr.client.solrj.io.stream.metrics.MinMetric) IOException(java.io.IOException) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) IOException(java.io.IOException) StreamExpression(org.apache.solr.client.solrj.io.stream.expr.StreamExpression) MeanMetric(org.apache.solr.client.solrj.io.stream.metrics.MeanMetric) StreamFactory(org.apache.solr.client.solrj.io.stream.expr.StreamFactory) SolrClientCache(org.apache.solr.client.solrj.io.SolrClientCache) ArrayList(java.util.ArrayList) List(java.util.List) Tuple(org.apache.solr.client.solrj.io.Tuple) Test(org.junit.Test)

Example 12 with StreamFactory

use of org.apache.solr.client.solrj.io.stream.expr.StreamFactory in project lucene-solr by apache.

the class StreamExpressionTest method testInnerJoinStream.

@Test
public void testInnerJoinStream() throws Exception {
    new UpdateRequest().add(id, "1", "side_s", "left", "join1_i", "0", "join2_s", "a", "ident_s", // 8, 9
    "left_1").add(id, "15", "side_s", "left", "join1_i", "0", "join2_s", "a", "ident_s", // 8, 9
    "left_1").add(id, "2", "side_s", "left", "join1_i", "0", "join2_s", "b", "ident_s", "left_2").add(id, "3", "side_s", "left", "join1_i", "1", "join2_s", "a", "ident_s", // 10
    "left_3").add(id, "4", "side_s", "left", "join1_i", "1", "join2_s", "b", "ident_s", // 11
    "left_4").add(id, "5", "side_s", "left", "join1_i", "1", "join2_s", "c", "ident_s", // 12
    "left_5").add(id, "6", "side_s", "left", "join1_i", "2", "join2_s", "d", "ident_s", "left_6").add(id, "7", "side_s", "left", "join1_i", "3", "join2_s", "e", "ident_s", // 14
    "left_7").add(id, "8", "side_s", "right", "join1_i", "0", "join2_s", "a", "ident_s", "right_1", "join3_i", // 1,15
    "0").add(id, "9", "side_s", "right", "join1_i", "0", "join2_s", "a", "ident_s", "right_2", "join3_i", // 1,15
    "0").add(id, "10", "side_s", "right", "join1_i", "1", "join2_s", "a", "ident_s", "right_3", "join3_i", // 3
    "1").add(id, "11", "side_s", "right", "join1_i", "1", "join2_s", "b", "ident_s", "right_4", "join3_i", // 4
    "1").add(id, "12", "side_s", "right", "join1_i", "1", "join2_s", "c", "ident_s", "right_5", "join3_i", // 5
    "1").add(id, "13", "side_s", "right", "join1_i", "2", "join2_s", "dad", "ident_s", "right_6", "join3_i", "2").add(id, "14", "side_s", "right", "join1_i", "3", "join2_s", "e", "ident_s", "right_7", "join3_i", // 7
    "3").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
    StreamExpression expression;
    TupleStream stream;
    List<Tuple> tuples;
    StreamContext streamContext = new StreamContext();
    SolrClientCache solrClientCache = new SolrClientCache();
    streamContext.setSolrClientCache(solrClientCache);
    StreamFactory factory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("innerJoin", InnerJoinStream.class);
    try {
        // Basic test
        expression = StreamExpressionParser.parse("innerJoin(" + "search(" + COLLECTIONORALIAS + ", q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc, id asc\")," + "search(" + COLLECTIONORALIAS + ", q=\"side_s:right\", fl=\"join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc\")," + "on=\"join1_i=join1_i, join2_s=join2_s\")");
        stream = new InnerJoinStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 8);
        assertOrder(tuples, 1, 1, 15, 15, 3, 4, 5, 7);
        // Basic desc
        expression = StreamExpressionParser.parse("innerJoin(" + "search(" + COLLECTIONORALIAS + ", q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i desc, join2_s asc\")," + "search(" + COLLECTIONORALIAS + ", q=\"side_s:right\", fl=\"join1_i,join2_s,ident_s\", sort=\"join1_i desc, join2_s asc\")," + "on=\"join1_i=join1_i, join2_s=join2_s\")");
        stream = new InnerJoinStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 8);
        assertOrder(tuples, 7, 3, 4, 5, 1, 1, 15, 15);
        // Results in both searches, no join matches
        expression = StreamExpressionParser.parse("innerJoin(" + "search(" + COLLECTIONORALIAS + ", q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"ident_s asc\")," + "search(" + COLLECTIONORALIAS + ", q=\"side_s:right\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"ident_s asc\", aliases=\"id=right.id, join1_i=right.join1_i, join2_s=right.join2_s, ident_s=right.ident_s\")," + "on=\"ident_s=right.ident_s\")");
        stream = new InnerJoinStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 0);
        // Differing field names
        expression = StreamExpressionParser.parse("innerJoin(" + "search(" + COLLECTIONORALIAS + ", q=\"side_s:left\", fl=\"id,join1_i,join2_s,ident_s\", sort=\"join1_i asc, join2_s asc, id asc\")," + "search(" + COLLECTIONORALIAS + ", q=\"side_s:right\", fl=\"join3_i,join2_s,ident_s\", sort=\"join3_i asc, join2_s asc\", aliases=\"join3_i=aliasesField\")," + "on=\"join1_i=aliasesField, join2_s=join2_s\")");
        stream = new InnerJoinStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 8);
        assertOrder(tuples, 1, 1, 15, 15, 3, 4, 5, 7);
    } finally {
        solrClientCache.close();
    }
}
Also used : StreamExpression(org.apache.solr.client.solrj.io.stream.expr.StreamExpression) UpdateRequest(org.apache.solr.client.solrj.request.UpdateRequest) StreamFactory(org.apache.solr.client.solrj.io.stream.expr.StreamFactory) SolrClientCache(org.apache.solr.client.solrj.io.SolrClientCache) Tuple(org.apache.solr.client.solrj.io.Tuple) Test(org.junit.Test)

Example 13 with StreamFactory

use of org.apache.solr.client.solrj.io.stream.expr.StreamFactory in project lucene-solr by apache.

the class StreamExpressionTest method testNulls.

@Test
public void testNulls() throws Exception {
    new UpdateRequest().add(id, "0", "a_i", "1", "a_f", "0", "s_multi", "aaa", "s_multi", "bbb", "i_multi", "100", "i_multi", "200").add(id, "2", "a_s", "hello2", "a_i", "3", "a_f", "0").add(id, "3", "a_s", "hello3", "a_i", "4", "a_f", "3").add(id, "4", "a_s", "hello4", "a_f", "4").add(id, "1", "a_s", "hello1", "a_i", "2", "a_f", "1").commit(cluster.getSolrClient(), COLLECTIONORALIAS);
    StreamExpression expression;
    TupleStream stream;
    List<Tuple> tuples;
    Tuple tuple;
    StreamContext streamContext = new StreamContext();
    SolrClientCache solrClientCache = new SolrClientCache();
    streamContext.setSolrClientCache(solrClientCache);
    StreamFactory factory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class);
    try {
        // Basic test
        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_i asc\")");
        stream = new CloudSolrStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 5);
        assertOrder(tuples, 4, 0, 1, 2, 3);
        tuple = tuples.get(0);
        assertTrue("hello4".equals(tuple.getString("a_s")));
        assertNull(tuple.get("s_multi"));
        assertNull(tuple.get("i_multi"));
        assertNull(tuple.getLong("a_i"));
        tuple = tuples.get(1);
        assertNull(tuple.get("a_s"));
        List<String> strings = tuple.getStrings("s_multi");
        assertNotNull(strings);
        assertEquals("aaa", strings.get(0));
        assertEquals("bbb", strings.get(1));
        List<Long> longs = tuple.getLongs("i_multi");
        assertNotNull(longs);
        //test sort (asc) with null string field. Null should sort to the top.
        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s asc\")");
        stream = new CloudSolrStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 5);
        assertOrder(tuples, 0, 1, 2, 3, 4);
        //test sort(desc) with null string field.  Null should sort to the bottom.
        expression = StreamExpressionParser.parse("search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f, s_multi, i_multi\", qt=\"/export\", sort=\"a_s desc\")");
        stream = new CloudSolrStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 5);
        assertOrder(tuples, 4, 3, 2, 1, 0);
    } finally {
        solrClientCache.close();
    }
}
Also used : UpdateRequest(org.apache.solr.client.solrj.request.UpdateRequest) StreamExpression(org.apache.solr.client.solrj.io.stream.expr.StreamExpression) StreamFactory(org.apache.solr.client.solrj.io.stream.expr.StreamFactory) SolrClientCache(org.apache.solr.client.solrj.io.SolrClientCache) Tuple(org.apache.solr.client.solrj.io.Tuple) Test(org.junit.Test)

Example 14 with StreamFactory

use of org.apache.solr.client.solrj.io.stream.expr.StreamFactory in project lucene-solr by apache.

the class StreamExpressionTest method testBasicTextLogitStream.

@Test
public void testBasicTextLogitStream() throws Exception {
    Assume.assumeTrue(!useAlias);
    CollectionAdminRequest.createCollection("destinationCollection", "ml", 2, 1).process(cluster.getSolrClient());
    AbstractDistribZkTestBase.waitForRecoveriesToFinish("destinationCollection", cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
    UpdateRequest updateRequest = new UpdateRequest();
    for (int i = 0; i < 5000; i += 2) {
        updateRequest.add(id, String.valueOf(i), "tv_text", "a b c c d", "out_i", "1");
        updateRequest.add(id, String.valueOf(i + 1), "tv_text", "a b e e f", "out_i", "0");
    }
    updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
    StreamExpression expression;
    TupleStream stream;
    List<Tuple> tuples;
    StreamContext streamContext = new StreamContext();
    SolrClientCache solrClientCache = new SolrClientCache();
    streamContext.setSolrClientCache(solrClientCache);
    StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withCollectionZkHost("destinationCollection", cluster.getZkServer().getZkAddress()).withFunctionName("features", FeaturesSelectionStream.class).withFunctionName("train", TextLogitStream.class).withFunctionName("search", CloudSolrStream.class).withFunctionName("update", UpdateStream.class);
    try {
        expression = StreamExpressionParser.parse("features(collection1, q=\"*:*\", featureSet=\"first\", field=\"tv_text\", outcome=\"out_i\", numTerms=4)");
        stream = new FeaturesSelectionStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assert (tuples.size() == 4);
        HashSet<String> terms = new HashSet<>();
        for (Tuple tuple : tuples) {
            terms.add((String) tuple.get("term_s"));
        }
        assertTrue(terms.contains("d"));
        assertTrue(terms.contains("c"));
        assertTrue(terms.contains("e"));
        assertTrue(terms.contains("f"));
        String textLogitExpression = "train(" + "collection1, " + "features(collection1, q=\"*:*\", featureSet=\"first\", field=\"tv_text\", outcome=\"out_i\", numTerms=4)," + "q=\"*:*\", " + "name=\"model\", " + "field=\"tv_text\", " + "outcome=\"out_i\", " + "maxIterations=100)";
        stream = factory.constructStream(textLogitExpression);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        Tuple lastTuple = tuples.get(tuples.size() - 1);
        List<Double> lastWeights = lastTuple.getDoubles("weights_ds");
        Double[] lastWeightsArray = lastWeights.toArray(new Double[lastWeights.size()]);
        // first feature is bias value
        Double[] testRecord = { 1.0, 1.17, 0.691, 0.0, 0.0 };
        double d = sum(multiply(testRecord, lastWeightsArray));
        double prob = sigmoid(d);
        assertEquals(prob, 1.0, 0.1);
        // first feature is bias value
        Double[] testRecord2 = { 1.0, 0.0, 0.0, 1.17, 0.691 };
        d = sum(multiply(testRecord2, lastWeightsArray));
        prob = sigmoid(d);
        assertEquals(prob, 0, 0.1);
        stream = factory.constructStream("update(destinationCollection, batchSize=5, " + textLogitExpression + ")");
        getTuples(stream);
        cluster.getSolrClient().commit("destinationCollection");
        stream = factory.constructStream("search(destinationCollection, " + "q=*:*, " + "fl=\"iteration_i,* \", " + "rows=100, " + "sort=\"iteration_i desc\")");
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assertEquals(100, tuples.size());
        Tuple lastModel = tuples.get(0);
        ClassificationEvaluation evaluation = ClassificationEvaluation.create(lastModel.fields);
        assertTrue(evaluation.getF1() >= 1.0);
        assertEquals(Math.log(5000.0 / (2500 + 1)), lastModel.getDoubles("idfs_ds").get(0), 0.0001);
        // make sure the tuples is retrieved in correct order
        Tuple firstTuple = tuples.get(99);
        assertEquals(1L, (long) firstTuple.getLong("iteration_i"));
    } finally {
        CollectionAdminRequest.deleteCollection("destinationCollection").process(cluster.getSolrClient());
        solrClientCache.close();
    }
}
Also used : UpdateRequest(org.apache.solr.client.solrj.request.UpdateRequest) ClassificationEvaluation(org.apache.solr.client.solrj.io.ClassificationEvaluation) StreamExpression(org.apache.solr.client.solrj.io.stream.expr.StreamExpression) StreamFactory(org.apache.solr.client.solrj.io.stream.expr.StreamFactory) SolrClientCache(org.apache.solr.client.solrj.io.SolrClientCache) Tuple(org.apache.solr.client.solrj.io.Tuple) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 15 with StreamFactory

use of org.apache.solr.client.solrj.io.stream.expr.StreamFactory in project lucene-solr by apache.

the class StreamExpressionTest method testUpdateStream.

@Test
public void testUpdateStream() throws Exception {
    CollectionAdminRequest.createCollection("destinationCollection", "conf", 2, 1).process(cluster.getSolrClient());
    AbstractDistribZkTestBase.waitForRecoveriesToFinish("destinationCollection", cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
    new UpdateRequest().add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0", "s_multi", "aaaa", "s_multi", "bbbb", "i_multi", "4", "i_multi", "7").add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0", "s_multi", "aaaa1", "s_multi", "bbbb1", "i_multi", "44", "i_multi", "77").add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3", "s_multi", "aaaa2", "s_multi", "bbbb2", "i_multi", "444", "i_multi", "777").add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4", "s_multi", "aaaa3", "s_multi", "bbbb3", "i_multi", "4444", "i_multi", "7777").add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1", "s_multi", "aaaa4", "s_multi", "bbbb4", "i_multi", "44444", "i_multi", "77777").commit(cluster.getSolrClient(), "collection1");
    StreamExpression expression;
    TupleStream stream;
    Tuple t;
    StreamContext streamContext = new StreamContext();
    SolrClientCache solrClientCache = new SolrClientCache();
    streamContext.setSolrClientCache(solrClientCache);
    StreamFactory factory = new StreamFactory().withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress()).withCollectionZkHost("destinationCollection", cluster.getZkServer().getZkAddress()).withFunctionName("search", CloudSolrStream.class).withFunctionName("update", UpdateStream.class);
    try {
        //Copy all docs to destinationCollection
        expression = StreamExpressionParser.parse("update(destinationCollection, batchSize=5, search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", sort=\"a_f asc, a_i asc\"))");
        stream = new UpdateStream(expression, factory);
        stream.setStreamContext(streamContext);
        List<Tuple> tuples = getTuples(stream);
        cluster.getSolrClient().commit("destinationCollection");
        //Ensure that all UpdateStream tuples indicate the correct number of copied/indexed docs
        assert (tuples.size() == 1);
        t = tuples.get(0);
        assert (t.EOF == false);
        assertEquals(5, t.get("batchIndexed"));
        //Ensure that destinationCollection actually has the new docs.
        expression = StreamExpressionParser.parse("search(destinationCollection, q=*:*, fl=\"id,a_s,a_i,a_f,s_multi,i_multi\", sort=\"a_i asc\")");
        stream = new CloudSolrStream(expression, factory);
        stream.setStreamContext(streamContext);
        tuples = getTuples(stream);
        assertEquals(5, tuples.size());
        Tuple tuple = tuples.get(0);
        assert (tuple.getLong("id") == 0);
        assert (tuple.get("a_s").equals("hello0"));
        assert (tuple.getLong("a_i") == 0);
        assert (tuple.getDouble("a_f") == 0.0);
        assertList(tuple.getStrings("s_multi"), "aaaa", "bbbb");
        assertList(tuple.getLongs("i_multi"), Long.parseLong("4"), Long.parseLong("7"));
        tuple = tuples.get(1);
        assert (tuple.getLong("id") == 1);
        assert (tuple.get("a_s").equals("hello1"));
        assert (tuple.getLong("a_i") == 1);
        assert (tuple.getDouble("a_f") == 1.0);
        assertList(tuple.getStrings("s_multi"), "aaaa4", "bbbb4");
        assertList(tuple.getLongs("i_multi"), Long.parseLong("44444"), Long.parseLong("77777"));
        tuple = tuples.get(2);
        assert (tuple.getLong("id") == 2);
        assert (tuple.get("a_s").equals("hello2"));
        assert (tuple.getLong("a_i") == 2);
        assert (tuple.getDouble("a_f") == 0.0);
        assertList(tuple.getStrings("s_multi"), "aaaa1", "bbbb1");
        assertList(tuple.getLongs("i_multi"), Long.parseLong("44"), Long.parseLong("77"));
        tuple = tuples.get(3);
        assert (tuple.getLong("id") == 3);
        assert (tuple.get("a_s").equals("hello3"));
        assert (tuple.getLong("a_i") == 3);
        assert (tuple.getDouble("a_f") == 3.0);
        assertList(tuple.getStrings("s_multi"), "aaaa2", "bbbb2");
        assertList(tuple.getLongs("i_multi"), Long.parseLong("444"), Long.parseLong("777"));
        tuple = tuples.get(4);
        assert (tuple.getLong("id") == 4);
        assert (tuple.get("a_s").equals("hello4"));
        assert (tuple.getLong("a_i") == 4);
        assert (tuple.getDouble("a_f") == 4.0);
        assertList(tuple.getStrings("s_multi"), "aaaa3", "bbbb3");
        assertList(tuple.getLongs("i_multi"), Long.parseLong("4444"), Long.parseLong("7777"));
    } finally {
        CollectionAdminRequest.deleteCollection("destinationCollection").process(cluster.getSolrClient());
        solrClientCache.close();
    }
}
Also used : StreamExpression(org.apache.solr.client.solrj.io.stream.expr.StreamExpression) UpdateRequest(org.apache.solr.client.solrj.request.UpdateRequest) StreamFactory(org.apache.solr.client.solrj.io.stream.expr.StreamFactory) SolrClientCache(org.apache.solr.client.solrj.io.SolrClientCache) Tuple(org.apache.solr.client.solrj.io.Tuple) Test(org.junit.Test)

Aggregations

StreamFactory (org.apache.solr.client.solrj.io.stream.expr.StreamFactory)69 Tuple (org.apache.solr.client.solrj.io.Tuple)65 UpdateRequest (org.apache.solr.client.solrj.request.UpdateRequest)65 Test (org.junit.Test)64 SolrClientCache (org.apache.solr.client.solrj.io.SolrClientCache)61 StreamExpression (org.apache.solr.client.solrj.io.stream.expr.StreamExpression)37 ModifiableSolrParams (org.apache.solr.common.params.ModifiableSolrParams)14 MeanMetric (org.apache.solr.client.solrj.io.stream.metrics.MeanMetric)10 FieldComparator (org.apache.solr.client.solrj.io.comp.FieldComparator)9 MinMetric (org.apache.solr.client.solrj.io.stream.metrics.MinMetric)9 IOException (java.io.IOException)8 HashMap (java.util.HashMap)7 StreamContext (org.apache.solr.client.solrj.io.stream.StreamContext)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 Connection (java.sql.Connection)4 Statement (java.sql.Statement)4 List (java.util.List)4 JettySolrRunner (org.apache.solr.client.solrj.embedded.JettySolrRunner)4 AndEvaluator (org.apache.solr.client.solrj.io.eval.AndEvaluator)3