use of org.grupolys.samulan.util.SentimentInformation in project uuusa by aghie.
the class DefaultSentimentJoiner method join.
@Override
public SentimentInformation join(SentimentInformation head, List<SentimentInformation> children) {
for (SentimentInformation siChild : children) {
head.setPositiveSentiment(head.getPositiveSentiment() + siChild.getPositiveSentiment());
head.setNegativeSentiment(head.getNegativeSentiment() + siChild.getNegativeSentiment());
}
head.setSemanticOrientation(head.getPositiveSentiment() - head.getNegativeSentiment());
return head;
}
use of org.grupolys.samulan.util.SentimentInformation in project uuusa by aghie.
the class Samulan method analyse.
// Obtains a list of SentimentDependencyGraph given a CoNLL file
// private static List<SentimentDependencyGraph> getGraphs(String path, String encoding){
// CoNLLReader conllReader = new CoNLLReader();
// List<DependencyGraph> graphs =conllReader.read(path, encoding);
// List<SentimentDependencyGraph> sgraphs = new ArrayList<SentimentDependencyGraph>();
//
// for (DependencyGraph dg: graphs){
//
// HashMap<Short, DependencyNode> nodes = dg.getNodes();
// HashMap<Short, DependencyNode> snodes = new HashMap<Short, DependencyNode>();
// for (short address: nodes.keySet()){
// snodes.put(address,new SentimentDependencyNode(nodes.get(address), null));
// }
//
// sgraphs.add(new SentimentDependencyGraph(snodes));
// }
// return sgraphs;
// }
// Obtains the sentiment classification for each graph in a CoNLL file
private static void analyse(String conllFile, String encoding, RuleBasedAnalyser rba, String pathOutput, String scale, boolean verbose) {
BufferedReader br = null;
String line, conll = null, textID = null, previousTextID = null;
boolean newFileGraphs = false;
boolean first = true;
CoNLLReader conllReader = new CoNLLReader();
List<SentimentDependencyGraph> sdgs = new ArrayList<SentimentDependencyGraph>();
double totalAnalyseTime = 0, textAnalyseTime = 0;
try {
br = new BufferedReader(new FileReader(conllFile));
} catch (FileNotFoundException e1) {
System.err.println("File or directory: " + conllFile + " not found");
e1.printStackTrace();
}
Writer writer = null;
try {
if (pathOutput != null)
writer = new PrintWriter(pathOutput, encoding);
else
writer = new BufferedWriter(new OutputStreamWriter(System.out));
} catch (FileNotFoundException | UnsupportedEncodingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
try {
line = br.readLine();
while (line != null) {
String[] ls = line.split("\t");
if (newFileGraphs && (previousTextID != null)) {
long initAnalyseTextTime = System.nanoTime();
List<SentimentInformation> sis = sdgs.stream().map((SentimentDependencyGraph dg) -> (rba.analyse(dg, (short) 0))).collect(Collectors.toList());
long stopAnalyseTextTime = System.nanoTime();
String text = String.join(" ", sdgs.stream().map((SentimentDependencyGraph dg) -> dg.subgraphToString((short) 0)).collect(Collectors.toList()));
SentimentInformation siFinal = rba.merge(sis);
try {
textAnalyseTime = (stopAnalyseTextTime - initAnalyseTextTime) / 1000000000.0;
totalAnalyseTime += textAnalyseTime;
writer.write(printOutputScaled(siFinal, scale, rba.getAc().isBinaryNeutralAsNegative()) + "\t" + "\t" + text + "\t" + " [The analysis took: " + textAnalyseTime + " seg.] [Accumulated time is: " + totalAnalyseTime + "]\n");
// writer.write(previousTextID+"\t"+printOutputScaled(siFinal,scale, rba.getAc().isBinaryNeutralAsNegative())+"\t"+"\t"+text+"\t"+" [The analysis took: "+textAnalyseTime+" seg.] [Accumulated time is: "+totalAnalyseTime+"]\n");
writer.flush();
if (verbose) {
sdgs.stream().forEach(sdg -> sdg.printLandscapeGraph((short) 0));
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
sdgs.clear();
previousTextID = null;
newFileGraphs = false;
}
// We process the line
if (line.startsWith(CONLL_IDENTIFIER_SYMBOL)) {
String lcleaned = line.replace(CONLL_IDENTIFIER_SYMBOL, "").replace(" ", "").replace("\n", "");
if (!first) {
previousTextID = textID;
}
first = false;
textID = lcleaned.split("\t")[1];
conll = "";
newFileGraphs = true;
} else // We are still reading conll graphs from the same text
{
// We are reading a new conll graph, but from the same text
if (line.equals("")) {
HashMap<Short, DependencyNode> nodes = conllReader.read(conll).getNodes();
HashMap<Short, DependencyNode> snodes = new HashMap<Short, DependencyNode>();
for (short address : nodes.keySet()) {
snodes.put(address, new SentimentDependencyNode(nodes.get(address), null));
}
sdgs.add(new SentimentDependencyGraph(snodes));
conll = "";
} else {
conll = conll.concat(line + "\n");
}
}
line = br.readLine();
}
// Last graph
if (!sdgs.isEmpty()) {
List<SentimentInformation> sis = sdgs.stream().map((SentimentDependencyGraph dg) -> (rba.analyse(dg, (short) 0))).collect(Collectors.toList());
;
String text = String.join(" ", sdgs.stream().map((SentimentDependencyGraph dg) -> dg.subgraphToString((short) 0)).collect(Collectors.toList()));
SentimentInformation siFinal = rba.merge(sis);
try {
writer.write(printOutputScaled(siFinal, scale, rba.getAc().isBinaryNeutralAsNegative()) + "\t" + "\t" + text + "\t" + " [The analysis took: " + textAnalyseTime + " seg.] [Accumulated time is: " + totalAnalyseTime + "]\n");
// writer.write(textID+"\t"+printOutputScaled(siFinal,scale,rba.getAc().isBinaryNeutralAsNegative())+"\t"+"\t"+text+"\t"+" [The analysis took: "+textAnalyseTime+" seg.] [Accumulated time is: "+totalAnalyseTime+"]\n");
writer.flush();
if (verbose) {
sdgs.stream().forEach(sdg -> sdg.printLandscapeGraph((short) 0));
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
sdgs.clear();
textID = null;
newFileGraphs = false;
}
br.close();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.grupolys.samulan.util.SentimentInformation in project uuusa by aghie.
the class SyntacticRuleBasedAnalyser method calculate.
/**
* Given the SentimentInformation of a head term and its children, it computes the merged SentimentInformation
* that results after computing all operations to be applied at that stage.
* @param head: SentimentInformation object corresponding to the head (as a single node).
* @param children: List of SentimentInformation objects corresponding to the computed/merged SentimentInformation rooted at each child of the head term
* @return A new SentimentInformation corresponding to the computed/merged SentimentInformation rooted at the head term
*/
public SentimentInformation calculate(SentimentInformation head, List<SentimentInformation> children) {
List<QueuedOperationInformation> allOperations, qOperations;
PriorityQueue<QueuedOperationInformation> aOperations;
QueuedOperationInformation i;
OperationValue ov;
SentimentInformation newHead = new SentimentInformation(head);
allOperations = getAllQueuedOperations(newHead, children);
qOperations = getOperationsToQueue(allOperations);
aOperations = getOperationsToApply(allOperations);
String appliedOperations = "";
while ((i = aOperations.poll()) != null) {
ov = i.getOperation().apply(newHead, children);
// Logging the applied operation at node i
// TODO: Improve how we track.
appliedOperations = appliedOperations.concat(ov.appliedOperation() == null ? "" : ov.appliedOperation() + ",");
newHead = ov.getHead();
children = ov.getChildren();
}
this.ac.getSentimentJoiner().join(newHead, children);
// newHead has the reference to its graph
newHead.setSentimentInformationInGraph();
newHead.setOperationExplanation(appliedOperations);
// We add q(eued)Operations comming from the children to the head, to spread them through the tree
for (QueuedOperationInformation pd : qOperations) {
if (!newHead.getQueuedOperations().contains(pd)) {
newHead.getQueuedOperations().add(pd);
}
}
List<QueuedOperationInformation> aux = new ArrayList<QueuedOperationInformation>();
for (QueuedOperationInformation pd : newHead.getQueuedOperations()) {
if (isPendingOperation(pd)) {
aux.add(pd);
}
}
newHead.setQueuedOperations(aux);
updateLevelsUp(newHead.getQueuedOperations());
return newHead;
}
use of org.grupolys.samulan.util.SentimentInformation in project uuusa by aghie.
the class SyntacticRuleBasedAnalyser method merge.
@Override
public /**
* It computes the final SentimentInformation of a sample, given a list of SentimentInformation objects.
* It is intended for running document-level sentiment classification.
* @param sis: One SentimentInformation object per sentence.
* @return A SentimentInformation object resulting for summing the SentimentInformation coming from each sentence.
*/
SentimentInformation merge(List<SentimentInformation> sis) {
SentimentInformation si = new SentimentInformation();
float posSentiment = 0;
float negSentiment = 0;
for (SentimentInformation siAux : sis) {
posSentiment += siAux.getPositiveSentiment();
negSentiment += siAux.getNegativeSentiment();
}
si.setNegativeSentiment(negSentiment);
si.setPositiveSentiment(posSentiment);
si.setSemanticOrientation(posSentiment - negSentiment);
return si;
}
use of org.grupolys.samulan.util.SentimentInformation in project uuusa by aghie.
the class SyntacticRuleBasedAnalyser method getAllQueuedOperations.
private List<QueuedOperationInformation> getAllQueuedOperations(SentimentInformation head, List<SentimentInformation> children) {
List<QueuedOperationInformation> allQueuedOperations = new ArrayList<QueuedOperationInformation>(head.getQueuedOperations());
for (SentimentInformation siChild : children) {
for (QueuedOperationInformation oChild : siChild.getQueuedOperations()) {
// Nesting weighting operations
// TODO only supports double nesting
short headAddress = siChild.getSentimentDependencyNode().getHead();
SentimentDependencyGraph sdgChild = siChild.getSentimentDependencyGraph();
SentimentDependencyNode headNode = sdgChild.getNode(headAddress);
String headLemma = this.rm.getD().getLemma(headNode.getCpostag(), headNode.getWord());
SentimentDependencyNode grandPaNode = sdgChild.getNode(headNode.getHead());
String grandPaLemma = this.rm.getD().getLemma(grandPaNode.getCpostag(), grandPaNode.getWord());
boolean grandPaIsSubjective = this.rm.getD().getValue(grandPaNode.getCpostag(), grandPaLemma, true) != 0;
if (this.rm.getD().isWeight(headLemma) && grandPaIsSubjective) {
oChild.setLevelsUp((short) (oChild.getLevelsUp() + 1));
}
allQueuedOperations.add(oChild);
}
}
return allQueuedOperations;
}
Aggregations