use of structures._ChildDoc in project IR_Base by Linda-sunshine.
the class ParentChildAnalyzer method loadChildDoc.
public void loadChildDoc(String fileName) {
if (fileName == null || fileName.isEmpty())
return;
JSONObject json = LoadJSON(fileName);
String content = Utils.getJSONValue(json, "content");
String name = Utils.getJSONValue(json, "name");
String parent = Utils.getJSONValue(json, "parent");
String title = Utils.getJSONValue(json, "title");
//
// _ChildDoc4BaseWithPhi d = new _ChildDoc4BaseWithPhi(m_corpus.getSize(),
// name, "", content, 0);
// _ChildDoc4BaseWithPhi_Hard d = new _ChildDoc4BaseWithPhi_Hard(m_corpus.getSize(), name, "", content, 0) ;
// _ChildDoc4ChildPhi d = new _ChildDoc4ChildPhi(m_corpus.getSize(),
// name,
// "", content, 0);
// _ChildDoc4TwoPhi d = new _ChildDoc4TwoPhi(m_corpus.getSize(), name, "", content, 0);
// _ChildDoc4ThreePhi d = new _ChildDoc4ThreePhi(m_corpus.getSize(), name,
// "", content, 0);
// _ChildDoc4OneTopicProportion d = new _ChildDoc4OneTopicProportion(m_corpus.getSize(), name, "", content, 0);
_ChildDoc d = new _ChildDoc(m_corpus.getSize(), name, "", content, 0);
if (parentHashMap.containsKey(parent)) {
if (AnalyzeDoc(d)) {
// this is a valid child document
// if (parentHashMap.containsKey(parent)) {
_ParentDoc pDoc = parentHashMap.get(parent);
d.setParentDoc(pDoc);
pDoc.addChildDoc(d);
} else {
// System.err.format("filtering comments %s!\n", parent);
}
} else {
// System.err.format("[Warning]Missing parent document %s!\n", parent);
}
}
use of structures._ChildDoc in project IR_Base by Linda-sunshine.
the class LDAGibbs4AC method Evaluation.
public double Evaluation(int i) {
m_collectCorpusStats = false;
double perplexity = 0, loglikelihood, totalWords = 0, sumLikelihood = 0;
System.out.println("In Normal");
for (_Doc d : m_testSet) {
loglikelihood = inference(d);
sumLikelihood += loglikelihood;
perplexity += loglikelihood;
totalWords += d.getDocTestLength();
for (_ChildDoc cDoc : ((_ParentDoc) d).m_childDocs) {
totalWords += cDoc.getDocTestLength();
}
}
System.out.println("total Words\t" + totalWords + "perplexity\t" + perplexity);
infoWriter.println("total Words\t" + totalWords + "perplexity\t" + perplexity);
perplexity /= totalWords;
perplexity = Math.exp(-perplexity);
sumLikelihood /= m_testSet.size();
System.out.format("Test set perplexity is %.3f and log-likelihood is %.3f\n", perplexity, sumLikelihood);
infoWriter.format("Test set perplexity is %.3f and log-likelihood is %.3f\n", perplexity, sumLikelihood);
return perplexity;
}
use of structures._ChildDoc in project IR_Base by Linda-sunshine.
the class LDAGibbs4AC method initialize_probability.
protected void initialize_probability(Collection<_Doc> collection) {
createSpace();
for (int i = 0; i < number_of_topics; i++) {
Arrays.fill(topic_term_probabilty[i], 0);
Arrays.fill(word_topic_sstat[i], d_beta);
}
Arrays.fill(m_sstat, d_beta * vocabulary_size);
for (_Doc d : collection) {
if (d instanceof _ParentDoc) {
for (_Stn stnObj : d.getSentences()) {
stnObj.setTopicsVct(number_of_topics);
}
d.setTopics4Gibbs(number_of_topics, d_alpha);
} else if (d instanceof _ChildDoc) {
((_ChildDoc) d).setTopics4Gibbs_LDA(number_of_topics, d_alpha);
}
for (_Word w : d.getWords()) {
word_topic_sstat[w.getTopic()][w.getIndex()]++;
m_sstat[w.getTopic()]++;
}
}
imposePrior();
}
use of structures._ChildDoc in project IR_Base by Linda-sunshine.
the class LDAGibbs4AC_test method rankChild4ParentByLikelihood.
protected double rankChild4ParentByLikelihood(_ChildDoc cDoc, _ParentDoc pDoc) {
int cDocLen = cDoc.getTotalDocLength();
_SparseFeature[] fv = pDoc.getSparse();
double docLogLikelihood = 0;
for (_SparseFeature i : fv) {
int wid = i.getIndex();
double value = i.getValue();
double wordLogLikelihood = 0;
for (int k = 0; k < number_of_topics; k++) {
double wordPerTopicLikelihood = (word_topic_sstat[k][wid] / m_sstat[k]) * ((cDoc.m_sstat[k] + d_alpha) / (d_alpha * number_of_topics + cDocLen));
wordLogLikelihood += wordPerTopicLikelihood;
}
docLogLikelihood += value * Math.log(wordLogLikelihood);
}
return docLogLikelihood;
}
use of structures._ChildDoc in project IR_Base by Linda-sunshine.
the class LDAGibbs4AC_test method discoverSpecificComments.
protected void discoverSpecificComments(String similarityFile) {
System.out.println("topic similarity");
try {
PrintWriter pw = new PrintWriter(new File(similarityFile));
for (_Doc doc : m_trainSet) {
if (doc instanceof _ParentDoc) {
pw.print(doc.getName() + "\t");
double stnTopicSimilarity = 0.0;
double docTopicSimilarity = 0.0;
for (_ChildDoc cDoc : ((_ParentDoc) doc).m_childDocs) {
pw.print(cDoc.getName() + ":");
docTopicSimilarity = computeSimilarity(((_ParentDoc) doc).m_topics, cDoc.m_topics);
pw.print(docTopicSimilarity);
for (_Stn stnObj : doc.getSentences()) {
stnTopicSimilarity = computeSimilarity(stnObj.m_topics, cDoc.m_topics);
pw.print(":" + (stnObj.getIndex() + 1) + ":" + stnTopicSimilarity);
}
pw.print("\t");
}
pw.println();
}
}
pw.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
Aggregations