Search in sources :

Example 61 with JsonArrayBuilder

use of javax.json.JsonArrayBuilder in project activemq-artemis by apache.

the class AddressControlImpl method getRoutingTypesAsJSON.

@Override
public String getRoutingTypesAsJSON() throws Exception {
    clearIO();
    try {
        JsonArrayBuilder json = JsonLoader.createArrayBuilder();
        String[] routingTypes = getRoutingTypes();
        for (String routingType : routingTypes) {
            json.add(routingType);
        }
        return json.build().toString();
    } finally {
        blockOnIO();
    }
}
Also used : JsonArrayBuilder(javax.json.JsonArrayBuilder) SimpleString(org.apache.activemq.artemis.api.core.SimpleString)

Example 62 with JsonArrayBuilder

use of javax.json.JsonArrayBuilder in project activemq-artemis by apache.

the class QueueControlImpl method toJSON.

private static String toJSON(final Map<String, Map<String, Object>[]> messages) {
    JsonArrayBuilder arrayReturn = JsonLoader.createArrayBuilder();
    for (Map.Entry<String, Map<String, Object>[]> entry : messages.entrySet()) {
        JsonObjectBuilder objectItem = JsonLoader.createObjectBuilder();
        objectItem.add("consumerName", entry.getKey());
        objectItem.add("elements", toJSONMsgArray(entry.getValue()));
        arrayReturn.add(objectItem);
    }
    return arrayReturn.build().toString();
}
Also used : JsonArrayBuilder(javax.json.JsonArrayBuilder) SimpleString(org.apache.activemq.artemis.api.core.SimpleString) JsonObjectBuilder(javax.json.JsonObjectBuilder) HashMap(java.util.HashMap) Map(java.util.Map)

Example 63 with JsonArrayBuilder

use of javax.json.JsonArrayBuilder in project activemq-artemis by apache.

the class QueueControlImpl method listConsumersAsJSON.

@Override
public String listConsumersAsJSON() throws Exception {
    checkStarted();
    clearIO();
    try {
        Collection<Consumer> consumers = queue.getConsumers();
        JsonArrayBuilder jsonArray = JsonLoader.createArrayBuilder();
        for (Consumer consumer : consumers) {
            if (consumer instanceof ServerConsumer) {
                ServerConsumer serverConsumer = (ServerConsumer) consumer;
                JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", serverConsumer.getID()).add("connectionID", serverConsumer.getConnectionID().toString()).add("sessionID", serverConsumer.getSessionID()).add("browseOnly", serverConsumer.isBrowseOnly()).add("creationTime", serverConsumer.getCreationTime());
                jsonArray.add(obj);
            }
        }
        return jsonArray.build().toString();
    } finally {
        blockOnIO();
    }
}
Also used : ServerConsumer(org.apache.activemq.artemis.core.server.ServerConsumer) Consumer(org.apache.activemq.artemis.core.server.Consumer) JsonArrayBuilder(javax.json.JsonArrayBuilder) ServerConsumer(org.apache.activemq.artemis.core.server.ServerConsumer) JsonObjectBuilder(javax.json.JsonObjectBuilder)

Example 64 with JsonArrayBuilder

use of javax.json.JsonArrayBuilder in project CoreNLP by stanfordnlp.

the class FastNeuralCorefDataExporter method getSentenceArray.

private static JsonArray getSentenceArray(List<CoreLabel> sentence) {
    JsonArrayBuilder sentenceBuilder = Json.createArrayBuilder();
    sentence.stream().map(CoreLabel::word).map(w -> w.equals("/.") ? "." : w).map(w -> w.equals("/?") ? "?" : w).forEach(sentenceBuilder::add);
    return sentenceBuilder.build();
}
Also used : SentencesAnnotation(edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation) Arrays(java.util.Arrays) JsonArrayBuilder(javax.json.JsonArrayBuilder) HashMap(java.util.HashMap) Example(edu.stanford.nlp.coref.statistical.Example) Dictionaries(edu.stanford.nlp.coref.data.Dictionaries) CorefCluster(edu.stanford.nlp.coref.data.CorefCluster) Mention(edu.stanford.nlp.coref.data.Mention) Map(java.util.Map) Json(javax.json.Json) DocumentExamples(edu.stanford.nlp.coref.statistical.DocumentExamples) Pair(edu.stanford.nlp.util.Pair) CoreMap(edu.stanford.nlp.util.CoreMap) FeatureExtractor(edu.stanford.nlp.coref.statistical.FeatureExtractor) PrintWriter(java.io.PrintWriter) JsonObject(javax.json.JsonObject) Dataset(edu.stanford.nlp.coref.CorefProperties.Dataset) CoreLabel(edu.stanford.nlp.ling.CoreLabel) Properties(java.util.Properties) CoreAnnotations(edu.stanford.nlp.ling.CoreAnnotations) SemanticGraphEdge(edu.stanford.nlp.semgraph.SemanticGraphEdge) Iterator(java.util.Iterator) Compressor(edu.stanford.nlp.coref.statistical.Compressor) IOUtils(edu.stanford.nlp.io.IOUtils) JsonArray(javax.json.JsonArray) File(java.io.File) List(java.util.List) StatisticalCorefProperties(edu.stanford.nlp.coref.statistical.StatisticalCorefProperties) StringUtils(edu.stanford.nlp.util.StringUtils) CorefDocumentProcessor(edu.stanford.nlp.coref.CorefDocumentProcessor) CorefProperties(edu.stanford.nlp.coref.CorefProperties) Document(edu.stanford.nlp.coref.data.Document) CorefUtils(edu.stanford.nlp.coref.CorefUtils) JsonObjectBuilder(javax.json.JsonObjectBuilder) CompressedFeatureVector(edu.stanford.nlp.coref.statistical.CompressedFeatureVector) JsonArrayBuilder(javax.json.JsonArrayBuilder)

Example 65 with JsonArrayBuilder

use of javax.json.JsonArrayBuilder in project CoreNLP by stanfordnlp.

the class ScorePhrases method learnNewPhrasesPrivate.

private Counter<CandidatePhrase> learnNewPhrasesPrivate(String label, PatternsForEachToken patternsForEachToken, Counter<E> patternsLearnedThisIter, Counter<E> allSelectedPatterns, Set<CandidatePhrase> alreadyIdentifiedWords, CollectionValuedMap<E, Triple<String, Integer, Integer>> matchedTokensByPat, Counter<CandidatePhrase> scoreForAllWordsThisIteration, TwoDimensionalCounter<CandidatePhrase, E> terms, TwoDimensionalCounter<CandidatePhrase, E> wordsPatExtracted, TwoDimensionalCounter<E, CandidatePhrase> patternsAndWords4Label, String identifier, Set<CandidatePhrase> ignoreWords, boolean computeProcDataFreq) throws IOException, ClassNotFoundException {
    Set<CandidatePhrase> alreadyLabeledWords = new HashSet<>();
    if (constVars.doNotApplyPatterns) {
        // if want to get the stats by the lossy way of just counting without
        // applying the patterns
        ConstantsAndVariables.DataSentsIterator sentsIter = new ConstantsAndVariables.DataSentsIterator(constVars.batchProcessSents);
        while (sentsIter.hasNext()) {
            Pair<Map<String, DataInstance>, File> sentsf = sentsIter.next();
            this.statsWithoutApplyingPatterns(sentsf.first(), patternsForEachToken, patternsLearnedThisIter, wordsPatExtracted);
        }
    } else {
        if (patternsLearnedThisIter.size() > 0) {
            this.applyPats(patternsLearnedThisIter, label, wordsPatExtracted, matchedTokensByPat, alreadyLabeledWords);
        }
    }
    if (computeProcDataFreq) {
        if (!phraseScorer.wordFreqNorm.equals(Normalization.NONE)) {
            Redwood.log(Redwood.DBG, "computing processed freq");
            for (Entry<CandidatePhrase, Double> fq : Data.rawFreq.entrySet()) {
                Double in = fq.getValue();
                if (phraseScorer.wordFreqNorm.equals(Normalization.SQRT))
                    in = Math.sqrt(in);
                else if (phraseScorer.wordFreqNorm.equals(Normalization.LOG))
                    in = 1 + Math.log(in);
                else
                    throw new RuntimeException("can't understand the normalization");
                assert !in.isNaN() : "Why is processed freq nan when rawfreq is " + in;
                Data.processedDataFreq.setCount(fq.getKey(), in);
            }
        } else
            Data.processedDataFreq = Data.rawFreq;
    }
    if (constVars.wordScoring.equals(WordScoring.WEIGHTEDNORM)) {
        for (CandidatePhrase en : wordsPatExtracted.firstKeySet()) {
            if (!constVars.getOtherSemanticClassesWords().contains(en) && (en.getPhraseLemma() == null || !constVars.getOtherSemanticClassesWords().contains(CandidatePhrase.createOrGet(en.getPhraseLemma()))) && !alreadyLabeledWords.contains(en)) {
                terms.addAll(en, wordsPatExtracted.getCounter(en));
            }
        }
        removeKeys(terms, constVars.getStopWords());
        Counter<CandidatePhrase> phraseScores = phraseScorer.scorePhrases(label, terms, wordsPatExtracted, allSelectedPatterns, alreadyIdentifiedWords, false);
        System.out.println("count for word U.S. is " + phraseScores.getCount(CandidatePhrase.createOrGet("U.S.")));
        Set<CandidatePhrase> ignoreWordsAll;
        if (ignoreWords != null && !ignoreWords.isEmpty()) {
            ignoreWordsAll = CollectionUtils.unionAsSet(ignoreWords, constVars.getOtherSemanticClassesWords());
        } else
            ignoreWordsAll = new HashSet<>(constVars.getOtherSemanticClassesWords());
        ignoreWordsAll.addAll(constVars.getSeedLabelDictionary().get(label));
        ignoreWordsAll.addAll(constVars.getLearnedWords(label).keySet());
        System.out.println("ignoreWordsAll contains word U.S. is " + ignoreWordsAll.contains(CandidatePhrase.createOrGet("U.S.")));
        Counter<CandidatePhrase> finalwords = chooseTopWords(phraseScores, terms, phraseScores, ignoreWordsAll, constVars.thresholdWordExtract);
        phraseScorer.printReasonForChoosing(finalwords);
        scoreForAllWordsThisIteration.clear();
        Counters.addInPlace(scoreForAllWordsThisIteration, phraseScores);
        Redwood.log(ConstantsAndVariables.minimaldebug, "\n\n## Selected Words for " + label + " : " + Counters.toSortedString(finalwords, finalwords.size(), "%1$s:%2$.2f", "\t"));
        if (constVars.goldEntities != null) {
            Map<String, Boolean> goldEntities4Label = constVars.goldEntities.get(label);
            if (goldEntities4Label != null) {
                StringBuilder s = new StringBuilder();
                finalwords.keySet().stream().forEach(x -> s.append(x.getPhrase() + (goldEntities4Label.containsKey(x.getPhrase()) ? ":" + goldEntities4Label.get(x.getPhrase()) : ":UKNOWN") + "\n"));
                Redwood.log(ConstantsAndVariables.minimaldebug, "\n\n## Gold labels for selected words for label " + label + " : " + s.toString());
            } else
                Redwood.log(Redwood.DBG, "No gold entities provided for label " + label);
        }
        if (constVars.outDir != null && !constVars.outDir.isEmpty()) {
            String outputdir = constVars.outDir + "/" + identifier + "/" + label;
            IOUtils.ensureDir(new File(outputdir));
            TwoDimensionalCounter<CandidatePhrase, CandidatePhrase> reasonForWords = new TwoDimensionalCounter<>();
            for (CandidatePhrase word : finalwords.keySet()) {
                for (E l : wordsPatExtracted.getCounter(word).keySet()) {
                    for (CandidatePhrase w2 : patternsAndWords4Label.getCounter(l)) {
                        reasonForWords.incrementCount(word, w2);
                    }
                }
            }
            Redwood.log(ConstantsAndVariables.minimaldebug, "Saving output in " + outputdir);
            String filename = outputdir + "/words.json";
            // the json object is an array corresponding to each iteration - of list
            // of objects,
            // each of which is a bean of entity and reasons
            JsonArrayBuilder obj = Json.createArrayBuilder();
            if (writtenInJustification.containsKey(label) && writtenInJustification.get(label)) {
                JsonReader jsonReader = Json.createReader(new BufferedInputStream(new FileInputStream(filename)));
                JsonArray objarr = jsonReader.readArray();
                for (JsonValue o : objarr) obj.add(o);
                jsonReader.close();
            }
            JsonArrayBuilder objThisIter = Json.createArrayBuilder();
            for (CandidatePhrase w : reasonForWords.firstKeySet()) {
                JsonObjectBuilder objinner = Json.createObjectBuilder();
                JsonArrayBuilder l = Json.createArrayBuilder();
                for (CandidatePhrase w2 : reasonForWords.getCounter(w).keySet()) {
                    l.add(w2.getPhrase());
                }
                JsonArrayBuilder pats = Json.createArrayBuilder();
                for (E p : wordsPatExtracted.getCounter(w)) {
                    pats.add(p.toStringSimple());
                }
                objinner.add("reasonwords", l);
                objinner.add("patterns", pats);
                objinner.add("score", finalwords.getCount(w));
                objinner.add("entity", w.getPhrase());
                objThisIter.add(objinner.build());
            }
            obj.add(objThisIter);
            // Redwood.log(ConstantsAndVariables.minimaldebug, channelNameLogger,
            // "Writing justification at " + filename);
            IOUtils.writeStringToFile(StringUtils.normalize(StringUtils.toAscii(obj.build().toString())), filename, "ASCII");
            writtenInJustification.put(label, true);
        }
        if (constVars.justify) {
            Redwood.log(Redwood.DBG, "\nJustification for phrases:\n");
            for (CandidatePhrase word : finalwords.keySet()) {
                Redwood.log(Redwood.DBG, "Phrase " + word + " extracted because of patterns: \t" + Counters.toSortedString(wordsPatExtracted.getCounter(word), wordsPatExtracted.getCounter(word).size(), "%1$s:%2$f", "\n"));
            }
        }
        return finalwords;
    } else if (constVars.wordScoring.equals(WordScoring.BPB)) {
        Counters.addInPlace(terms, wordsPatExtracted);
        Counter<CandidatePhrase> maxPatWeightTerms = new ClassicCounter<>();
        Map<CandidatePhrase, E> wordMaxPat = new HashMap<>();
        for (Entry<CandidatePhrase, ClassicCounter<E>> en : terms.entrySet()) {
            Counter<E> weights = new ClassicCounter<>();
            for (E k : en.getValue().keySet()) weights.setCount(k, patternsLearnedThisIter.getCount(k));
            maxPatWeightTerms.setCount(en.getKey(), Counters.max(weights));
            wordMaxPat.put(en.getKey(), Counters.argmax(weights));
        }
        Counters.removeKeys(maxPatWeightTerms, alreadyIdentifiedWords);
        double maxvalue = Counters.max(maxPatWeightTerms);
        Set<CandidatePhrase> words = Counters.keysAbove(maxPatWeightTerms, maxvalue - 1e-10);
        CandidatePhrase bestw = null;
        if (words.size() > 1) {
            double max = Double.NEGATIVE_INFINITY;
            for (CandidatePhrase w : words) {
                if (terms.getCount(w, wordMaxPat.get(w)) > max) {
                    max = terms.getCount(w, wordMaxPat.get(w));
                    bestw = w;
                }
            }
        } else if (words.size() == 1)
            bestw = words.iterator().next();
        else
            return new ClassicCounter<>();
        Redwood.log(ConstantsAndVariables.minimaldebug, "Selected Words: " + bestw);
        return Counters.asCounter(Arrays.asList(bestw));
    } else
        throw new RuntimeException("wordscoring " + constVars.wordScoring + " not identified");
}
Also used : Entry(java.util.Map.Entry) Counter(edu.stanford.nlp.stats.Counter) ClassicCounter(edu.stanford.nlp.stats.ClassicCounter) TwoDimensionalCounter(edu.stanford.nlp.stats.TwoDimensionalCounter) BufferedInputStream(java.io.BufferedInputStream) JsonReader(javax.json.JsonReader) JsonArrayBuilder(javax.json.JsonArrayBuilder) JsonObjectBuilder(javax.json.JsonObjectBuilder) JsonValue(javax.json.JsonValue) TwoDimensionalCounter(edu.stanford.nlp.stats.TwoDimensionalCounter) FileInputStream(java.io.FileInputStream) JsonArray(javax.json.JsonArray) File(java.io.File)

Aggregations

JsonArrayBuilder (javax.json.JsonArrayBuilder)177 JsonObjectBuilder (javax.json.JsonObjectBuilder)103 JsonObject (javax.json.JsonObject)36 Map (java.util.Map)29 Path (javax.ws.rs.Path)26 GET (javax.ws.rs.GET)24 HashMap (java.util.HashMap)19 JsonArray (javax.json.JsonArray)18 ArrayList (java.util.ArrayList)15 List (java.util.List)15 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)12 IOException (java.io.IOException)12 Dataverse (edu.harvard.iq.dataverse.Dataverse)10 Dataset (edu.harvard.iq.dataverse.Dataset)9 User (edu.harvard.iq.dataverse.authorization.users.User)9 JsonValue (javax.json.JsonValue)9 StringWriter (java.io.StringWriter)8 JsonString (javax.json.JsonString)7 Date (java.util.Date)6 JsonException (javax.json.JsonException)6