use of de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma in project webanno by webanno.
the class ConstraintsVerifierTest method test.
@Test
public void test() throws Exception {
ConstraintsGrammar parser = new ConstraintsGrammar(new FileInputStream("src/test/resources/rules/6.rules"));
Parse p = parser.Parse();
ParsedConstraints constraints = p.accept(new ParserVisitor());
// Get imports
Map<String, String> imports = new LinkedHashMap<>();
imports = constraints.getImports();
// Get rules
// List<Rule> rules = new ArrayList<>();
JCas jcas = JCasFactory.createJCas();
jcas.setDocumentText("Just some text.");
Lemma lemma1 = new Lemma(jcas, 0, 1);
lemma1.setValue("good");
lemma1.addToIndexes();
Lemma lemma2 = new Lemma(jcas, 1, 2);
lemma2.setValue("bad");
lemma2.addToIndexes();
Verifiable cVerifier = new ConstraintsVerifier();
for (Lemma lemma : select(jcas, Lemma.class)) {
if (lemma == lemma1) {
assertEquals(true, cVerifier.verify(lemma, constraints));
}
if (lemma == lemma2) {
assertEquals(false, cVerifier.verify(lemma, constraints));
}
}
}
use of de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma in project webanno by webanno.
the class WebAnnoTsv3WriterTestBase method testTokenAttachedAnnotationsWithValues.
@Test
public void testTokenAttachedAnnotationsWithValues() throws Exception {
JCas jcas = makeJCasOneSentence();
List<Token> tokens = new ArrayList<>(select(jcas, Token.class));
Token t1 = tokens.get(0);
Lemma l1 = new Lemma(jcas, t1.getBegin(), t1.getEnd());
l1.setValue("lemma1");
l1.addToIndexes();
t1.setLemma(l1);
MorphologicalFeatures m1 = new MorphologicalFeatures(jcas, t1.getBegin(), t1.getEnd());
m1.setValue("morph");
m1.setTense("tense1");
m1.addToIndexes();
t1.setMorph(m1);
POS p1 = new POS(jcas, t1.getBegin(), t1.getEnd());
p1.setPosValue("pos1");
p1.addToIndexes();
t1.setPos(p1);
Stem s1 = new Stem(jcas, t1.getBegin(), t1.getEnd());
s1.setValue("stem1");
s1.addToIndexes();
t1.setStem(s1);
writeAndAssertEquals(jcas, WebannoTsv3Writer.PARAM_SPAN_LAYERS, asList(MorphologicalFeatures.class, POS.class, Lemma.class, Stem.class));
}
use of de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma in project dkpro-tc by dkpro.
the class TypeTokenPairFeatureExtractorTest method setUp.
@Before
public void setUp() throws ResourceInitializationException, AnalysisEngineProcessException {
AnalysisEngineDescription desc = createEngineDescription(BreakIteratorSegmenter.class);
AnalysisEngine engine = createEngine(desc);
jcas1 = engine.newJCas();
jcas1.setDocumentLanguage("en");
jcas1.setDocumentText("This is text");
engine.process(jcas1);
Lemma lemma1 = new Lemma(jcas1, 0, 4);
lemma1.setValue("text");
lemma1.addToIndexes();
Lemma lemma2 = new Lemma(jcas1, 5, 7);
lemma2.setValue("is");
lemma2.addToIndexes();
Lemma lemma3 = new Lemma(jcas1, 8, 10);
lemma3.setValue("text");
lemma3.addToIndexes();
jcas2 = engine.newJCas();
jcas2.setDocumentLanguage("en");
jcas2.setDocumentText("Text is text");
engine.process(jcas2);
Lemma lemma4 = new Lemma(jcas2, 0, 4);
lemma4.setValue("text");
lemma4.addToIndexes();
Lemma lemma5 = new Lemma(jcas2, 8, 10);
lemma5.setValue("text");
lemma5.addToIndexes();
}
use of de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma in project webanno by webanno.
the class MergeCas method reMergeCas.
/**
* Using {@code DiffResult}, determine the annotations to be deleted from the randomly generated
* MergeCase. The initial Merge CAs is stored under a name {@code CurationPanel#CURATION_USER}.
* <p>
* Any similar annotations stacked in a {@code CasDiff2.Position} will be assumed a difference
* <p>
* Any two annotation with different value will be assumed a difference
* <p>
* Any non stacked empty/null annotations are assumed agreement
* <p>
* Any non stacked annotations with similar values for each of the features are assumed
* agreement
* <p>
* Any two link mode / slotable annotations which agree on the base features are assumed
* agreement
*
* @param aDiff
* the {@code CasDiff2.DiffResult}
* @param aJCases
* a map of{@code JCas}s for each users and the random merge
* @return the actual merge {@code JCas}
*/
public static JCas reMergeCas(DiffResult aDiff, Map<String, JCas> aJCases) {
Set<FeatureStructure> slotFeaturesToReset = new HashSet<>();
Set<FeatureStructure> annotationsToDelete = new HashSet<>();
Set<String> users = aJCases.keySet();
for (Position position : aDiff.getPositions()) {
Map<String, List<FeatureStructure>> annosPerUser = new HashMap<>();
ConfigurationSet cfgs = aDiff.getConfigurtionSet(position);
if (cfgs.getConfigurations(WebAnnoConst.CURATION_USER).isEmpty()) {
// annotations
continue;
}
AnnotationFS mergeAnno = (AnnotationFS) cfgs.getConfigurations(WebAnnoConst.CURATION_USER).get(0).getFs(WebAnnoConst.CURATION_USER, aJCases);
// Get Annotations per user in this position
getAllAnnosOnPosition(aJCases, annosPerUser, users, mergeAnno);
for (FeatureStructure mergeFs : annosPerUser.get(WebAnnoConst.CURATION_USER)) {
// incomplete annotations
if (aJCases.size() != annosPerUser.size()) {
annotationsToDelete.add(mergeFs);
} else // agreed and not stacked
if (isAgree(mergeFs, annosPerUser)) {
Type t = mergeFs.getType();
Feature sourceFeat = t.getFeatureByBaseName(WebAnnoConst.FEAT_REL_SOURCE);
Feature targetFeat = t.getFeatureByBaseName(WebAnnoConst.FEAT_REL_TARGET);
// Is this a relation?
if (sourceFeat != null && targetFeat != null) {
AnnotationFS source = (AnnotationFS) mergeFs.getFeatureValue(sourceFeat);
AnnotationFS target = (AnnotationFS) mergeFs.getFeatureValue(targetFeat);
// all span anno on this source positions
Map<String, List<FeatureStructure>> sourceAnnosPerUser = new HashMap<>();
// all span anno on this target positions
Map<String, List<FeatureStructure>> targetAnnosPerUser = new HashMap<>();
getAllAnnosOnPosition(aJCases, sourceAnnosPerUser, users, source);
getAllAnnosOnPosition(aJCases, targetAnnosPerUser, users, target);
if (isAgree(source, sourceAnnosPerUser) && isAgree(target, targetAnnosPerUser)) {
slotFeaturesToReset.add(mergeFs);
} else {
annotationsToDelete.add(mergeFs);
}
} else {
slotFeaturesToReset.add(mergeFs);
}
} else // disagree or stacked annotations
{
annotationsToDelete.add(mergeFs);
}
// remove dangling rels
// setDanglingRelToDel(aJCases.get(CurationPanel.CURATION_USER),
// mergeFs, annotationsToDelete);
}
}
// remove annotations that do not agree or are a stacked ones
for (FeatureStructure fs : annotationsToDelete) {
if (!slotFeaturesToReset.contains(fs)) {
JCas mergeCas = aJCases.get(WebAnnoConst.CURATION_USER);
// Check if this difference is on POS, STEM and LEMMA (so remove from the token too)
Type type = fs.getType();
int fsBegin = ((AnnotationFS) fs).getBegin();
int fsEnd = ((AnnotationFS) fs).getEnd();
if (type.getName().equals(POS.class.getName())) {
Token t = JCasUtil.selectCovered(mergeCas, Token.class, fsBegin, fsEnd).get(0);
t.setPos(null);
}
if (type.getName().equals(Stem.class.getName())) {
Token t = JCasUtil.selectCovered(mergeCas, Token.class, fsBegin, fsEnd).get(0);
t.setStem(null);
}
if (type.getName().equals(Lemma.class.getName())) {
Token t = JCasUtil.selectCovered(mergeCas, Token.class, fsBegin, fsEnd).get(0);
t.setLemma(null);
}
if (type.getName().equals(MorphologicalFeatures.class.getName())) {
Token t = JCasUtil.selectCovered(mergeCas, Token.class, fsBegin, fsEnd).get(0);
t.setMorph(null);
}
mergeCas.removeFsFromIndexes(fs);
}
}
// if slot bearing annotation, clean
for (FeatureStructure baseFs : slotFeaturesToReset) {
for (Feature roleFeature : baseFs.getType().getFeatures()) {
if (isLinkMode(baseFs, roleFeature)) {
// FeatureStructure roleFs = baseFs.getFeatureValue(f);
ArrayFS roleFss = (ArrayFS) WebAnnoCasUtil.getFeatureFS(baseFs, roleFeature.getShortName());
if (roleFss == null) {
continue;
}
Map<String, ArrayFS> roleAnnosPerUser = new HashMap<>();
setAllRoleAnnosOnPosition(aJCases, roleAnnosPerUser, users, baseFs, roleFeature);
List<FeatureStructure> linkFSes = new LinkedList<>(Arrays.asList(roleFss.toArray()));
for (FeatureStructure roleFs : roleFss.toArray()) {
if (isRoleAgree(roleFs, roleAnnosPerUser)) {
for (Feature targetFeature : roleFs.getType().getFeatures()) {
if (isBasicFeature(targetFeature)) {
continue;
}
if (!targetFeature.getShortName().equals("target")) {
continue;
}
AnnotationFS targetFs = (AnnotationFS) roleFs.getFeatureValue(targetFeature);
if (targetFs == null) {
continue;
}
Map<String, List<FeatureStructure>> targetAnnosPerUser = new HashMap<>();
getAllAnnosOnPosition(aJCases, targetAnnosPerUser, users, targetFs);
// do not agree on targets
if (!isAgree(targetFs, targetAnnosPerUser)) {
linkFSes.remove(roleFs);
}
}
} else // do not agree on some role features
{
linkFSes.remove(roleFs);
}
}
ArrayFS array = baseFs.getCAS().createArrayFS(linkFSes.size());
array.copyFromArray(linkFSes.toArray(new FeatureStructure[linkFSes.size()]), 0, 0, linkFSes.size());
baseFs.setFeatureValue(roleFeature, array);
}
}
}
return aJCases.get(WebAnnoConst.CURATION_USER);
}
use of de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma in project webanno by webanno.
the class ConstraintsGeneratorTest method testSimpleFeature.
@Test
public void testSimpleFeature() throws Exception {
ConstraintsGrammar parser = new ConstraintsGrammar(new FileInputStream("src/test/resources/rules/9.rules"));
Parse p = parser.Parse();
ParsedConstraints constraints = p.accept(new ParserVisitor());
JCas jcas = JCasFactory.createJCas();
jcas.setDocumentText("is");
Lemma lemma = new Lemma(jcas, 0, 2);
lemma.setValue("be");
lemma.addToIndexes();
Evaluator constraintsEvaluator = new ValuesGenerator();
List<PossibleValue> possibleValues = constraintsEvaluator.generatePossibleValues(lemma, "value", constraints);
List<PossibleValue> expectedOutput = new LinkedList<>();
expectedOutput.add(new PossibleValue("be", true));
assertEquals(expectedOutput, possibleValues);
}
Aggregations