use of edu.illinois.cs.cogcomp.edison.features.FeatureCollection in project cogcomp-nlp by CogComp.
the class TestChunkFeatures method testChunkEmbedding.
// protected void setUp() throws Exception {
// super.setUp();
// }
@Test
public final void testChunkEmbedding() throws Exception {
logger.info("\n\tTesting NER embedding");
testFex(ChunkEmbedding.NER, false, ViewNames.NER_CONLL);
logger.info("\n\tTesting chunk embedding");
testFex(ChunkEmbedding.SHALLOW_PARSE, false, ViewNames.SHALLOW_PARSE);
logger.info("\n\tTesting conjoined features");
testFex(FeatureUtilities.conjoin(ChunkEmbedding.NER, ChunkEmbedding.SHALLOW_PARSE), false, ViewNames.NER_CONLL, ViewNames.SHALLOW_PARSE);
logger.info("\n\tTesting NER and chunks");
testFex(new FeatureCollection("", ChunkEmbedding.NER, ChunkEmbedding.SHALLOW_PARSE), false, "");
}
use of edu.illinois.cs.cogcomp.edison.features.FeatureCollection in project cogcomp-nlp by CogComp.
the class CreateTestFeaturesResource method addFeatCollection.
private void addFeatCollection() throws EdisonException, IOException {
Map<Integer, String> map = new HashMap<>();
FeatureCollection featureCollection = new FeatureCollection("features");
featureCollection.addFeatureExtractor(WordFeatureExtractorFactory.conflatedPOS);
featureCollection.addFeatureExtractor(WordFeatureExtractorFactory.gerundMarker);
featureCollection.addFeatureExtractor(WordFeatureExtractorFactory.nominalizationMarker);
for (TextAnnotation ta : tas) {
for (int tokenId = 0; tokenId < ta.size(); tokenId++) {
Constituent c = new Constituent("", "", ta, tokenId, tokenId + 1);
Set<Feature> features = featureCollection.getFeatures(c);
if (features.size() > 0) {
String id = ta.getTokenizedText() + ":" + tokenId;
map.put(id.hashCode(), features.toString());
}
}
}
IOUtils.writeObject(map, FEATURE_COLLECTION_FILE);
}
use of edu.illinois.cs.cogcomp.edison.features.FeatureCollection in project cogcomp-nlp by CogComp.
the class TestWordFeatureFactory method testFeatureCollection.
@Test
public final void testFeatureCollection() throws Exception {
FeatureCollection f = new FeatureCollection("features");
f.addFeatureExtractor(WordFeatureExtractorFactory.conflatedPOS);
f.addFeatureExtractor(WordFeatureExtractorFactory.gerundMarker);
f.addFeatureExtractor(WordFeatureExtractorFactory.nominalizationMarker);
logger.info("\tTesting feature collection");
Map<Integer, String> map = IOUtils.readObjectAsResource(TestWordFeatureFactory.class, "feature.collection.test");
for (TextAnnotation ta : tas) {
for (int tokenId = 0; tokenId < ta.size(); tokenId++) {
Constituent c = new Constituent("", "", ta, tokenId, tokenId + 1);
Set<Feature> features = f.getFeatures(c);
if (features.size() > 0) {
String id = ta.getTokenizedText() + ":" + tokenId;
assertEquals(map.get(id.hashCode()), features.toString());
}
}
}
}
Aggregations