use of net.sourceforge.ondex.core.Attribute in project knetbuilder by Rothamsted.
the class ONDEXGraphCloner method cloneConcept.
/**
* Clones a concept from the original graph to the new graph
*
* @param conceptToClone
* the concept to clone in the original graph
* @return the new Concept
*/
public ONDEXConcept cloneConcept(ONDEXConcept conceptToClone) {
if (!metaDataHasBeenCloned) {
cloneMetaData();
}
if (old2newConceptIds.containsKey(conceptToClone.getId())) {
// " already cloned returning previously cloned concept");
return newGraph.getConcept(old2newConceptIds.get(conceptToClone.getId()));
}
ONDEXGraphMetaData nomd = newGraph.getMetaData();
String pid = conceptToClone.getPID();
String desc = conceptToClone.getDescription();
String anno = conceptToClone.getAnnotation();
ArrayList<EvidenceType> ets = new ArrayList<EvidenceType>();
for (EvidenceType evidence : conceptToClone.getEvidence()) {
ets.add(nomd.getEvidenceType(evidence.getId()));
}
DataSource dataSource = conceptToClone.getElementOf();
DataSource newDataSource = nomd.getDataSource(dataSource.getId());
ConceptClass cc = conceptToClone.getOfType();
ConceptClass newCC = nomd.getConceptClass(cc.getId());
ONDEXConcept newConcept = newGraph.createConcept(pid, anno, desc, newDataSource, newCC, ets);
// required to prevent StackOverflow when adding itself as tag
old2newConceptIds.put(conceptToClone.getId(), newConcept.getId());
for (ONDEXConcept tag : conceptToClone.getTags()) {
ONDEXConcept newTag;
if (old2newConceptIds.containsKey(tag.getId())) {
// check if
// tag
// concept
// exists
int cid = old2newConceptIds.get(tag.getId());
newTag = newGraph.getConcept(cid);
} else {
// recursive
newTag = cloneConcept(tag);
}
newConcept.addTag(newTag);
}
for (Attribute attribute : conceptToClone.getAttributes()) {
AttributeName att = attribute.getOfType();
AttributeName newAtt = nomd.getAttributeName(att.getId());
newConcept.createAttribute(newAtt, attribute.getValue(), attribute.isDoIndex());
}
for (ConceptName name : conceptToClone.getConceptNames()) {
newConcept.createConceptName(name.getName(), name.isPreferred());
}
for (ConceptAccession acc : conceptToClone.getConceptAccessions()) {
dataSource = acc.getElementOf();
newDataSource = nomd.getDataSource(dataSource.getId());
newConcept.createConceptAccession(acc.getAccession(), newDataSource, acc.isAmbiguous());
}
return newConcept;
}
use of net.sourceforge.ondex.core.Attribute in project knetbuilder by Rothamsted.
the class Subgraph method mapOnAttribute.
public final void mapOnAttribute() {
Set<ONDEXConcept> allConcepts = this.getConcepts();
Map<ArrayKey<Object>, List<Integer>> gdsToConceptIndex = new HashMap<ArrayKey<Object>, List<Integer>>();
Map<Integer, List<ArrayKey<Object>>> conceptToGdsIndex = new HashMap<Integer, List<ArrayKey<Object>>>();
Set<ConceptClass> ccs = StandardFunctions.getContainedConceptClasses(allConcepts);
// Process a subset for each concept class separately
for (ConceptClass cc : ccs) {
Set<ONDEXConcept> subsetByCC = BitSetFunctions.and(allConcepts, graph.getConceptsOfConceptClass(cc));
gdsToConceptIndex.clear();
conceptToGdsIndex.clear();
for (ONDEXConcept c : subsetByCC) {
// Building the index on all gdss
CategoryMapBuilder<ArrayKey<Object>, Integer> gdsToConceptIndexBuider = new CategoryMapBuilder<ArrayKey<Object>, Integer>(gdsToConceptIndex);
CategoryMapBuilder<Integer, ArrayKey<Object>> conceptToGdsIndexBuider = new CategoryMapBuilder<Integer, ArrayKey<Object>>(conceptToGdsIndex);
for (Attribute attribute : c.getAttributes()) {
ArrayKey<Object> accessionKey = new ArrayKey<Object>(new Object[] { attribute.getOfType(), attribute.getValue() });
gdsToConceptIndexBuider.addEntry(accessionKey, c.getId());
conceptToGdsIndexBuider.addEntry(c.getId(), accessionKey);
}
}
// Collapse equivalent entries
System.err.println("Finished indexing concepts of " + cc.getId() + " concept class and started collapsing " + now("dd.MM.yyyy G 'at' HH:mm:ss z"));
Entry<ArrayKey<Object>, List<Integer>> seed = null;
while (gdsToConceptIndex.size() > 0) {
Set<Integer> cluster = new HashSet<Integer>();
seed = gdsToConceptIndex.entrySet().iterator().next();
List<Integer> subCluster = new ArrayList<Integer>();
subCluster.addAll(seed.getValue());
while (true) {
List<Integer> tempCluster = new ArrayList<Integer>();
for (Integer nextSeed : subCluster) {
List<ArrayKey<Object>> gdss = conceptToGdsIndex.remove(nextSeed);
if (gdss != null) {
for (ArrayKey<Object> gds : gdss) {
List<Integer> list = gdsToConceptIndex.remove(gds);
if (list != null)
tempCluster.addAll(list);
}
}
}
cluster.addAll(subCluster);
if (tempCluster.size() == 0) {
break;
}
subCluster = tempCluster;
}
mergeConcepts(cluster);
}
gdsToConceptIndex.clear();
conceptToGdsIndex.clear();
}
}
use of net.sourceforge.ondex.core.Attribute in project knetbuilder by Rothamsted.
the class DecypherAlignment method writeDecypherFASTAFile.
/**
* @param og the current graph
* @param seqs the Concepts containing the sequences to write
* @param filename the file to write to
* @param sequenceType the type of sequence @see net.sourceforge.ondex.programcalls.SequenceType
* @param includeOnlyTaxIdSeq include only sequences with taxid
* @return
* @throws IOException
*/
public static int writeDecypherFASTAFile(ONDEXGraph og, Set<ONDEXConcept> seqs, String filename, SequenceType sequenceType, boolean includeOnlyTaxIdSeq) throws IOException {
int seqWritten = 0;
String attSeq = null;
if (sequenceType.equals(AA)) {
attSeq = MetaData.Att_AA;
} else if (sequenceType.equals(NA)) {
attSeq = MetaData.Att_NA;
} else {
System.err.println("Unknow attribute sequence type");
}
AttributeName defaultAt = og.getMetaData().getAttributeName(attSeq);
List<AttributeName> attributesOfSequenceType = new ArrayList<AttributeName>();
attributesOfSequenceType.add(defaultAt);
for (int i = 1; i < Integer.MAX_VALUE; i++) {
AttributeName next = og.getMetaData().getAttributeName(attSeq + ':' + i);
if (next == null) {
break;
} else {
System.out.println("Found " + attSeq + ':' + i);
attributesOfSequenceType.add(next);
}
}
AttributeName aTaxID = og.getMetaData().getAttributeName("TAXID");
if (includeOnlyTaxIdSeq) {
// filter out concepts without taxid
Set<ONDEXConcept> taxIdConcepts = og.getConceptsOfAttributeName(aTaxID);
seqs.retainAll(taxIdConcepts);
}
// find concepts with sequences
Set<ONDEXConcept> attConcepts = null;
for (AttributeName att : attributesOfSequenceType) {
Set<ONDEXConcept> sequenceConcepts = og.getConceptsOfAttributeName(att);
System.out.println(att.getId() + " " + sequenceConcepts.size());
if (attConcepts == null) {
attConcepts = sequenceConcepts;
continue;
}
attConcepts.addAll(sequenceConcepts);
System.out.println("Including " + att.getId());
}
// concepts with sequences and TAXID
seqs.retainAll(attConcepts);
System.out.println("Writing " + seqs.size() + " concepts with sequences");
if (seqs.size() == 0) {
return 0;
}
File f = new File(filename);
f.createNewFile();
// new buffered writer
BufferedWriter out = new BufferedWriter(new FileWriter(f));
StringBuilder line = new StringBuilder(70);
for (ONDEXConcept concept : seqs) {
int cid = concept.getId();
Attribute taxIdAttribute = concept.getAttribute(aTaxID);
String taxId = null;
if (taxIdAttribute != null) {
taxId = (String) taxIdAttribute.getValue();
if (taxId.trim().length() == 0) {
if (includeOnlyTaxIdSeq)
continue;
}
} else {
if (includeOnlyTaxIdSeq)
continue;
}
for (AttributeName aSeq : attributesOfSequenceType) {
Attribute attribute = concept.getAttribute(aSeq);
if (attribute == null) {
continue;
}
Object value = attribute.getValue();
String[] sequences = null;
if (value instanceof String)
sequences = new String[] { ((String) value).toUpperCase().trim() };
else if (value instanceof Collection) {
sequences = (String[]) ((Collection) value).toArray();
} else if (value instanceof String[]) {
sequences = (String[]) value;
} else {
System.err.println(value.getClass() + " is an unknown format for " + aSeq.getId());
continue;
}
for (String seq : sequences) {
if (seq == null || seq.length() == 0) {
System.err.println("empty seq");
continue;
}
seq = seq.toUpperCase();
line.append(headerLineStart);
line.append(cid);
line.append(TAXID_DELIMINATOR);
if (taxId != null) {
line.append(taxId);
} else {
line.append(EMPTY_TAXID);
}
line.append(TAXID_DELIMINATOR);
line.append(aSeq.getId());
out.write(line.toString());
line.setLength(0);
out.newLine();
String stringToWrite = null;
if (sequenceType.equals(NA)) {
stringToWrite = replaceAllNonIUPAC_NA_Notation_Chars.matcher(seq).replaceAll("");
} else {
stringToWrite = replaceAllNonChars.matcher(seq).replaceAll("");
}
if (stringToWrite == null || stringToWrite.length() == 0) {
System.err.println("nout to write");
continue;
}
for (int i = 0; i < stringToWrite.length(); i = i + 70) {
int end = i + 70;
if (end > stringToWrite.length() - 1) {
end = stringToWrite.length() - 1;
}
String toWrite = stringToWrite.substring(i, end);
if (toWrite.length() > 0) {
out.write(toWrite);
out.newLine();
}
}
seqWritten++;
}
}
}
out.flush();
out.close();
return seqWritten;
}
use of net.sourceforge.ondex.core.Attribute in project knetbuilder by Rothamsted.
the class Parser method start.
@Override
public void start() throws Exception {
initaliseMetaData();
// Get entity factory
ef = graph.getFactory();
// holder for things to come
TabArrayObject tao;
File dir = new File((String) args.getUniqueValue(FileArgumentDefinition.INPUT_DIR));
// grab files folder
String input_dir = dir.getAbsolutePath() + File.separator;
// TODO:
/*
* Start of parsing of td_phenotype
*
*/
TabDelimited td_phenotype = new TabDelimited(input_dir + "phenotype.csv");
// create wildtype phenotype
ONDEXConcept wildtype = ef.createConcept("", dataSourceAHD, ccAHDPO, etIMPD);
wildtype.createConceptName("wild type", true);
while ((tao = td_phenotype.getNext()) != null) {
// deal with the top level ontology elements
String organ = (String) tao.getElement(2);
ONDEXConcept first_level_element = null;
if (organ.equals("") || organ == null) {
System.out.println("Error");
} else {
if (first_level_elements.containsKey(organ)) {
first_level_element = first_level_elements.get(organ);
} else {
first_level_element = ef.createConcept("", dataSourceAHD, ccAHDPO, etIMPD);
first_level_element.createConceptName(organ, true);
first_level_elements.put(organ, first_level_element);
}
}
// deal with second level ontology elements
String attribute = (String) tao.getElement(3);
ONDEXConcept second_level_element = null;
Pair oa = new Pair(organ, attribute);
if (attribute.equals("") || attribute == null) {
System.out.println("Error");
} else {
if (second_level_elements.containsKey(oa)) {
second_level_element = second_level_elements.get(oa);
} else {
second_level_element = ef.createConcept("", dataSourceAHD, ccAHDPO, etIMPD);
second_level_element.createConceptName(attribute, true);
second_level_elements.put(oa, second_level_element);
// create relation between first and second level elements
ef.createRelation(second_level_element, first_level_element, rtISA, etIMPD);
}
}
// deal with third level ontology elements
String phenotype_desc = (String) tao.getElement(6);
ONDEXConcept third_level_element = null;
Triple oap = new Triple(organ, attribute, phenotype_desc);
if (phenotype_desc.equals("") || phenotype_desc == null) {
System.out.println("Error");
} else {
if (third_level_elements.containsKey(oap)) {
third_level_element = third_level_elements.get(oap);
} else {
third_level_element = ef.createConcept("", dataSourceAHD, ccAHDPO, etIMPD);
third_level_element.createConceptName(phenotype_desc, true);
third_level_elements.put(oap, third_level_element);
// create relation between third and second level elements
ef.createRelation(third_level_element, second_level_element, rtISA, etIMPD);
}
}
// get or creating hormones
ONDEXConcept hormone = getHormone((String) tao.getElement(5));
// get or creating plants
ONDEXConcept plant = getPlant((String) tao.getElement(1));
// create experiments
ONDEXConcept experiment = ef.createConcept("", dataSourceAHD, ccExp, etIMPD);
experiment.createConceptAccession((String) tao.getElement(0), dataSourceAHD, false);
// linking experiments to phenotypes
ef.createRelation(experiment, third_level_element, rtOP, etIMPD);
// linking experiments to plants
ef.createRelation(plant, experiment, rtPP, etIMPD);
// linking experiments to hormone
String has_hormone = (String) tao.getElement(4);
if (has_hormone.equals("0")) {
ef.createRelation(experiment, hormone, rtNotTreat, etIMPD);
} else if (has_hormone.equals("1")) {
ef.createRelation(experiment, hormone, rtTreat, etIMPD);
} else if (has_hormone.equals("2")) {
ef.createRelation(experiment, hormone, rtAssoc, etIMPD);
} else {
System.err.println("Error in phenotype - column 4");
tao.debug();
}
}
// reset tab array object
tao = null;
td_phenotype = null;
/*
* Start of parsing of td_plant_hormone
*
*/
TabDelimited td_plant_hormone = new TabDelimited(input_dir + "plant_hormone.csv");
while ((tao = td_plant_hormone.getNext()) != null) {
String plantName = (String) tao.getElement(1);
String hormoneName = (String) tao.getElement(2);
ONDEXConcept plant = getPlant(plantName);
ONDEXConcept hormone = getHormone(hormoneName);
ONDEXRelation r = ef.createRelation(plant, hormone, rtAssoc, etIMPD);
if (tao.getElement(3).equals("yes")) {
r.createAttribute(anTLConf, true, false);
} else if (tao.getElement(3).equals("no")) {
r.createAttribute(anTLConf, false, false);
} else {
System.err.println("Error in plant hormone - column 3");
tao.debug();
}
}
// reset tab array object
tao = null;
td_plant_hormone = null;
/*
* Start of parsing of td_plant_info
*
*/
TabDelimited td_plant_info = new TabDelimited(input_dir + "plant_info.csv");
while ((tao = td_plant_info.getNext()) != null) {
ONDEXConcept plant = getPlant((String) tao.getElement(0));
String eco = (String) tao.getElement(1);
if (!eco.equals("")) {
plant.createAttribute(anEco, eco, false);
}
String mut = (String) tao.getElement(2);
if (!mut.equals("")) {
plant.createAttribute(anMut, mut, false);
}
String dom = (String) tao.getElement(3);
if (!dom.equals("")) {
plant.createAttribute(anDominance, dom, false);
}
String plantType = (String) tao.getElement(6);
if (!plantType.equals("")) {
plant.createAttribute(anPlantType, plantType, false);
}
String desc = (String) tao.getElement(5);
if (!desc.equals("")) {
plant.createAttribute(anDesc, desc, false);
}
String pmidsList = (String) tao.getElement(4);
if (!pmidsList.equals("")) {
if (pmidsList.contains(";")) {
String[] pmidsL = pmidsList.split(";");
for (String n : pmidsL) {
ONDEXConcept pub = getPub(n);
ef.createRelation(plant, pub, rtPub, etIMPD);
}
} else {
ONDEXConcept pub = getPub(pmidsList);
ef.createRelation(plant, pub, rtPub, etIMPD);
}
}
}
// reset tab array object
tao = null;
td_plant_info = null;
TabDelimited td_gene_hormone_plant = new TabDelimited(input_dir + "gene_hormone_plant.csv");
while ((tao = td_gene_hormone_plant.getNext()) != null) {
String geneAcc = (String) tao.getElement(1);
ONDEXConcept g = getGene(geneAcc);
// AHD names
String geneName = (String) tao.getElement(2);
if (!geneName.equals("")) {
if (geneName.contains(",")) {
String[] gNames = geneName.split(",");
for (String n : gNames) {
if (g.getConceptName(n) == null) {
g.createConceptName(n, true);
}
}
} else {
if (g.getConceptName(geneName) == null) {
g.createConceptName(geneName, true);
}
}
}
// linking genes to plants
String plantName = (String) tao.getElement(4);
ONDEXConcept plant = getPlant(plantName);
ONDEXRelation hasMutatedGene = graph.getRelation(plant, g, rtHasMutatedGene);
if (graph.getRelation(plant, g, rtHasMutatedGene) == null) {
hasMutatedGene = ef.createRelation(plant, g, rtHasMutatedGene, etIMPD);
}
// linking genes to hormones
// TODO: Find better relation
String hormoneName = (String) tao.getElement(3);
ONDEXConcept hormone = getHormone(hormoneName);
if (graph.getRelation(g, hormone, rtAssoc) == null) {
ef.createRelation(g, hormone, rtAssoc, etIMPD);
}
try {
String site = (String) tao.getElement(5);
if (!site.equals("")) {
if (!hasMutatedGene.getAttribute(anSite).getValue().equals(site)) {
hasMutatedGene.createAttribute(anSite, site, false);
}
}
} catch (Exception e) {
// do nothing
}
}
// reset tab array object
tao = null;
td_gene_hormone_plant = null;
TabDelimited td_gene_hormone_info = new TabDelimited(input_dir + "gene_hormone_info.csv");
while ((tao = td_gene_hormone_info.getNext()) != null) {
String gAcc = (String) tao.getElement(1);
String gName = (String) tao.getElement(2);
ONDEXConcept gene = getGene(gAcc);
if ((!gName.equals("")) && gene.getConceptName(gName) == null) {
gene.createConceptName(gName, true);
}
String hormoneName = (String) tao.getElement(3);
ONDEXConcept hormone = getHormone(hormoneName);
ONDEXRelation geneToHormone = graph.getRelation(gene, hormone, rtAssoc);
if (geneToHormone == null) {
geneToHormone = ef.createRelation(gene, hormone, rtAssoc, etIMPD);
}
String hormoneRole = (String) tao.getElement(4);
if (!hormoneRole.equals("-") && !hormoneRole.equals("")) {
if (geneToHormone.getAttribute(anGeneRole) == null) {
geneToHormone.createAttribute(anGeneRole, hormoneRole, false);
} else if (geneToHormone.getAttribute(anGeneRole).getValue().equals(hormoneRole)) {
// do nothing
} else {
// some kind of duplication going on...
tao.debug();
}
}
String geneDesc = (String) tao.getElement(5);
if (!geneDesc.equals("")) {
Attribute attribute = geneToHormone.getAttribute(anDesc);
if (attribute == null) {
geneToHormone.createAttribute(anDesc, geneDesc, false);
} else if (attribute.getValue().equals(geneDesc)) {
// do nothing
} else {
// System.err.println("problem here " + (String) tao.getElement(1));
// appending the new description onto the old one
attribute.setValue(attribute.getValue() + " ; " + geneDesc);
}
}
// still potentially same problems as above
Attribute sureattribute = geneToHormone.getAttribute(anTLConf);
String sure = (String) tao.getElement(7);
if (sureattribute == null) {
if (sure.equals("yes")) {
geneToHormone.createAttribute(anTLConf, true, false);
} else if (sure.equals("no")) {
geneToHormone.createAttribute(anTLConf, false, false);
} else {
System.err.println("Error in gene-hormone info - column 7");
tao.debug();
}
} else if (sureattribute.getValue().equals(sure)) {
// do nothing
} else {
// default to no if there's a conflict
sureattribute.setValue(false);
}
// still same problem as above
// evidence issue, when 2 or more things have one from and one not from go
// defaults to go if there's a clash
Attribute goaattribute = geneToHormone.getAttribute(anGOA);
String go = (String) tao.getElement(8);
if (goaattribute == null) {
if (go == null || go.isEmpty()) {
geneToHormone.addEvidenceType(etGI);
} else if (go.equals("GO")) {
geneToHormone.addEvidenceType(etGOA);
// geneToHormone.createGDS(anGOA, "yes", false);
} else if (go.equals("other")) {
geneToHormone.addEvidenceType(etIMPD);
}
} else if (goaattribute.getValue().equals(go)) {
// do nothing
} else {
// default to go if there's a conflict
goaattribute.setValue("GO");
}
// tying all publication information to the gene only, even though it's not quite true..
// the publication should really be tied to the relations, but one cannot set up a relation
// between a relation and a node
String pimds = (String) tao.getElement(6);
if (pimds.equals("")) {
} else if (pimds.contains(";")) {
String[] pids = pimds.split(";");
for (String p : pids) {
ONDEXConcept pub = getPub(p);
if (graph.getRelation(gene, pub, rtPub) == null) {
ef.createRelation(gene, pub, rtPub, etIMPD);
}
}
} else {
// get publication or create
ONDEXConcept pub = getPub(pimds);
// make relation if none already exists
if (graph.getRelation(gene, pub, rtPub) == null) {
ef.createRelation(gene, pub, rtPub, etIMPD);
}
}
}
/*
* Microarray stuff follows here
*
// reset tab array object
tao = null;
td_gene_hormone_info = null;
TabDelimited td_gene_hormone_microarray = new TabDelimited(input_dir+"gene_hormone_microarray.csv");
while ((tao = td_gene_hormone_microarray.getNext()) != null) {
String row_id = (String) tao.getElement(0);
String exp_id = (String) tao.getElement(1);
String hormoneName = (String) tao.getElement(2);
String locusName = (String) tao.getElement(3);
String microArrayData = (String) tao.getElement(4);
ONDEXConcept hormone = getHormone(hormoneName);
ONDEXConcept gene = getGene(locusName);
ONDEXConcept mexp = getExp(exp_id);
ONDEXConcept c = ef.createConcept("", dataSourceAHD, ccExp, etIMPD);
c.createConceptAccession(row_id, dataSourceAHD, false);
ef.createRelation(c, mexp, rtContrib, etIMPD);
ef.createRelation(c, hormone, rtAssoc, etIMPD);
ef.createRelation(gene, c, rtPP, etIMPD);
c.createGDS(anMicro, microArrayData, false);
}
// reset tab array object
tao = null;
td_gene_hormone_microarray = null;
// microarray info
// there is some stuff not parsed, but not sure about the value of them
TabDelimited microarray_info = new TabDelimited(input_dir+"microarray_info.csv");
while ((tao = microarray_info.getNext()) != null) {
String exp_id = (String) tao.getElement(0);
String plantName = (String) tao.getElement(2);
String hormoneName = (String) tao.getElement(4);
String pmid = ((String) tao.getElement(9)).substring(6); // trim the "PMID: "
String url = (String) tao.getElement(11);
ONDEXConcept mexp = getExp(exp_id);
ONDEXConcept pub = getPub(pmid);
ONDEXConcept hormone = getHormone(hormoneName);
ONDEXConcept plant = getPlant(plantName);
ef.createRelation(mexp, pub, rtPub, etIMPD);
ef.createRelation(hormone, mexp, rtAssoc, etIMPD);
ef.createRelation(plant, mexp, rtPP, etIMPD);
mexp.createGDS(anMicro, url, false);
}
*/
}
use of net.sourceforge.ondex.core.Attribute in project knetbuilder by Rothamsted.
the class Parser method mergeRelations.
/**
* Merges relations occurrences and attributes across two ONDEX graphs into
* first one.
*
* @param graph1
* @param graph2
*/
private void mergeRelations(ONDEXGraph graph1, ONDEXGraph graph2) {
// iterate over all relations in second graph
for (ONDEXRelation r : graph2.getRelations()) {
// check if relation already present in first graph
ONDEXRelation r1 = graph1.getRelation(r.getFromConcept(), r.getToConcept(), r.getOfType());
if (r1 == null) {
// create new relation in first graph
r1 = graph1.createRelation(r.getFromConcept(), r.getToConcept(), r.getOfType(), r.getEvidence());
}
// synchronise tags
Set<ONDEXConcept> newTags = new HashSet<ONDEXConcept>(r.getTags());
newTags.removeAll(r1.getTags());
for (ONDEXConcept c : newTags) {
if (graph1.getConcept(c.getId()) != null)
r1.addTag(c);
}
// synchronise evidence
Set<EvidenceType> newEvidence = new HashSet<EvidenceType>(r.getEvidence());
newEvidence.removeAll(r1.getEvidence());
for (EvidenceType et : newEvidence) {
// create new evidence type if not existing
if (graph1.getMetaData().getEvidenceType(et.getId()) == null)
graph1.getMetaData().createEvidenceType(et.getId(), et.getFullname(), et.getDescription());
r1.addEvidenceType(et);
}
// synchronise attributes
Set<Attribute> newAttributes = new HashSet<Attribute>(r.getAttributes());
newAttributes.removeAll(r1.getAttributes());
for (Attribute attr : newAttributes) {
AttributeName an = attr.getOfType();
// create new attribute name if not existing
if (graph1.getMetaData().getAttributeName(an.getId()) == null)
graph1.getMetaData().createAttributeName(an.getId(), an.getFullname(), an.getDescription(), an.getUnit(), an.getDataType(), an.getSpecialisationOf());
r1.createAttribute(an, attr.getValue(), attr.isDoIndex());
}
}
}
Aggregations