use of com.hankcs.hanlp.corpus.tag.NT in project HanLP by hankcs.
the class NTDictionary method loadDat.
private EnumItem<NT>[] loadDat(String path) {
byte[] bytes = IOUtil.readBytes(path);
if (bytes == null)
return null;
NT[] values = NT.values();
int index = 0;
int size = ByteUtil.bytesHighFirstToInt(bytes, index);
index += 4;
EnumItem<NT>[] valueArray = new EnumItem[size];
for (int i = 0; i < size; ++i) {
int currentSize = ByteUtil.bytesHighFirstToInt(bytes, index);
index += 4;
EnumItem<NT> item = new EnumItem<NT>();
for (int j = 0; j < currentSize; ++j) {
NT tag = values[ByteUtil.bytesHighFirstToInt(bytes, index)];
index += 4;
int frequency = ByteUtil.bytesHighFirstToInt(bytes, index);
index += 4;
item.labelMap.put(tag, frequency);
}
valueArray[i] = item;
}
return valueArray;
}
use of com.hankcs.hanlp.corpus.tag.NT in project HanLP by hankcs.
the class OrganizationRecognition method roleTag.
public static List<EnumItem<NT>> roleTag(List<Vertex> vertexList, WordNet wordNetAll) {
List<EnumItem<NT>> tagList = new LinkedList<EnumItem<NT>>();
// int line = 0;
for (Vertex vertex : vertexList) {
// 构成更长的
Nature nature = vertex.guessNature();
switch(nature) {
case nrf:
{
if (vertex.getAttribute().totalFrequency <= 1000) {
tagList.add(new EnumItem<NT>(NT.F, 1000));
} else
break;
}
continue;
case ni:
case nic:
case nis:
case nit:
{
EnumItem<NT> ntEnumItem = new EnumItem<NT>(NT.K, 1000);
ntEnumItem.addLabel(NT.D, 1000);
tagList.add(ntEnumItem);
}
continue;
case m:
{
EnumItem<NT> ntEnumItem = new EnumItem<NT>(NT.M, 1000);
tagList.add(ntEnumItem);
}
continue;
}
// 此处用等效词,更加精准
EnumItem<NT> NTEnumItem = OrganizationDictionary.dictionary.get(vertex.word);
if (NTEnumItem == null) {
NTEnumItem = new EnumItem<NT>(NT.Z, OrganizationDictionary.transformMatrixDictionary.getTotalFrequency(NT.Z));
}
tagList.add(NTEnumItem);
// line += vertex.realWord.length();
}
return tagList;
}
use of com.hankcs.hanlp.corpus.tag.NT in project HanLP by hankcs.
the class OrganizationRecognition method Recognition.
public static boolean Recognition(List<Vertex> pWordSegResult, WordNet wordNetOptimum, WordNet wordNetAll) {
List<EnumItem<NT>> roleTagList = roleTag(pWordSegResult, wordNetAll);
if (HanLP.Config.DEBUG) {
StringBuilder sbLog = new StringBuilder();
Iterator<Vertex> iterator = pWordSegResult.iterator();
for (EnumItem<NT> NTEnumItem : roleTagList) {
sbLog.append('[');
sbLog.append(iterator.next().realWord);
sbLog.append(' ');
sbLog.append(NTEnumItem);
sbLog.append(']');
}
System.out.printf("机构名角色观察:%s\n", sbLog.toString());
}
List<NT> NTList = viterbiExCompute(roleTagList);
if (HanLP.Config.DEBUG) {
StringBuilder sbLog = new StringBuilder();
Iterator<Vertex> iterator = pWordSegResult.iterator();
sbLog.append('[');
for (NT NT : NTList) {
sbLog.append(iterator.next().realWord);
sbLog.append('/');
sbLog.append(NT);
sbLog.append(" ,");
}
if (sbLog.length() > 1)
sbLog.delete(sbLog.length() - 2, sbLog.length());
sbLog.append(']');
System.out.printf("机构名角色标注:%s\n", sbLog.toString());
}
OrganizationDictionary.parsePattern(NTList, pWordSegResult, wordNetOptimum, wordNetAll);
return true;
}
use of com.hankcs.hanlp.corpus.tag.NT in project HanLP by hankcs.
the class OrganizationDictionary method parsePattern.
/**
* 模式匹配
*
* @param ntList 确定的标注序列
* @param vertexList 原始的未加角色标注的序列
* @param wordNetOptimum 待优化的图
* @param wordNetAll
*/
public static void parsePattern(List<NT> ntList, List<Vertex> vertexList, final WordNet wordNetOptimum, final WordNet wordNetAll) {
// ListIterator<Vertex> listIterator = vertexList.listIterator();
StringBuilder sbPattern = new StringBuilder(ntList.size());
for (NT nt : ntList) {
sbPattern.append(nt.toString());
}
String pattern = sbPattern.toString();
final Vertex[] wordArray = vertexList.toArray(new Vertex[0]);
trie.parseText(pattern, new AhoCorasickDoubleArrayTrie.IHit<String>() {
@Override
public void hit(int begin, int end, String keyword) {
StringBuilder sbName = new StringBuilder();
for (int i = begin; i < end; ++i) {
sbName.append(wordArray[i].realWord);
}
String name = sbName.toString();
// 对一些bad case做出调整
if (isBadCase(name))
return;
// 正式算它是一个名字
if (HanLP.Config.DEBUG) {
System.out.printf("识别出机构名:%s %s\n", name, keyword);
}
int offset = 0;
for (int i = 0; i < begin; ++i) {
offset += wordArray[i].realWord.length();
}
wordNetOptimum.insert(offset, new Vertex(Predefine.TAG_GROUP, name, ATTRIBUTE, WORD_ID), wordNetAll);
}
});
}
Aggregations