use of io.loaders.json.MonomersJsonLoader in project Smiles2Monomers by yoann-dufresne.
the class ChainLearningTests method setUp.
@Before
public void setUp() throws Exception {
// Loading test monomers
MonomersDB premonos = new MonomersJsonLoader().loadFile("data_tests/monos.json");
// Creation of a subset of monos
this.monos = new MonomersDB();
this.monos.addObject("Dpr", premonos.getObject("Dpr"));
this.monos.addObject("D-Ser", premonos.getObject("D-Ser"));
// Loading test rules
RulesDB rules = RulesJsonLoader.loader.loadFile("data_tests/rules.json");
// Loading test residues
ResidueJsonLoader rjl = new ResidueJsonLoader(rules, this.monos);
this.families = rjl.loadFile("data_tests/res.json");
// Loading test polymers
PolymersJsonLoader pjl = new PolymersJsonLoader(premonos);
PolymersDB base = pjl.loadFile("data_tests/peps.json");
// Creation of learning base with only 1 polymer.
this.learningBase = new PolymersDB();
this.learningBase.addObject("AM-toxin II", base.getObject("306"));
// Construct reference
this.constructReference();
}
use of io.loaders.json.MonomersJsonLoader in project Smiles2Monomers by yoann-dufresne.
the class OverlapData method main.
public static void main(String[] args) {
// Norine Loading
MonomersJsonLoader monosLoader = new MonomersJsonLoader();
MonomersDB norMonos = monosLoader.loadFile("data/monomers.json");
PolymersJsonLoader loader = new PolymersJsonLoader(norMonos);
PolymersDB norine = loader.loadFile("data/peptides_clean.json");
// CCD Loading
MonomersDB ccdMonos = monosLoader.loadFile("data/pdbe_monos_min.json");
loader = new PolymersJsonLoader(ccdMonos);
PolymersDB ccd = loader.loadFile("data/pdbe_polys_clean.json");
// Norine smiles generation
HashMap<String, Polymer> polymers = new HashMap<>();
for (Polymer poly : norine.getObjects()) {
String smiles = null;
try {
smiles = SmilesConverter.conv.toCanonicalSmiles(poly.getSmiles());
} catch (InvalidSmilesException e) {
e.printStackTrace();
continue;
}
polymers.put(smiles, poly);
}
// Test overlapping peptides
int overlap = 0;
for (Polymer poly : ccd.getObjects()) {
String smiles = null;
try {
smiles = SmilesConverter.conv.toCanonicalSmiles(poly.getSmiles());
} catch (InvalidSmilesException e) {
e.printStackTrace();
continue;
}
if (polymers.containsKey(smiles)) {
System.out.println(polymers.get(smiles).getName());
overlap++;
}
}
System.out.println("Total peptides overlapping : " + overlap);
// Norine smiles generation
HashMap<String, Monomer> monomers = new HashMap<>();
for (Monomer mono : norMonos.getObjects()) {
String smiles = null;
try {
smiles = SmilesConverter.conv.toCanonicalSmiles(mono.getSmiles());
} catch (InvalidSmilesException e) {
e.printStackTrace();
continue;
}
monomers.put(smiles, mono);
}
// Test overlapping peptides
overlap = 0;
for (Monomer mono : ccdMonos.getObjects()) {
String smiles = null;
try {
smiles = SmilesConverter.conv.toCanonicalSmiles(mono.getSmiles());
} catch (InvalidSmilesException e) {
e.printStackTrace();
continue;
}
if (monomers.containsKey(smiles)) {
System.out.println(monomers.get(smiles).getName());
overlap++;
}
}
System.out.println("Total monomers overlapping : " + overlap + "/" + ccdMonos.size());
}
use of io.loaders.json.MonomersJsonLoader in project Smiles2Monomers by yoann-dufresne.
the class ResiduesInfosGeneration method main.
public static void main(String[] args) {
MonomersDB monoDB = new MonomersJsonLoader().loadFile("data/monomers.json");
RulesDB rules = RulesJsonLoader.loader.loadFile("data/rules.json");
ResidueJsonLoader rjl = new ResidueJsonLoader(rules, monoDB);
FamilyDB families = rjl.loadFile("data/residues.json");
StringBuffer sb = new StringBuffer();
for (Family fam : families.getFamilies()) {
sb.append(fam.getName() + "\n");
sb.append("Norine link : http://bioinfo.lifl.fr/norine/res_amino.jsp?code=" + fam.getMonomers().get(0).getCode() + "\n");
sb.append("Num of residues : " + fam.getResidues().size() + "\n");
sb.append("Root residues (with max links) :" + "\n");
for (Residue res : fam.getRoots()) sb.append("\t" + res.getName() + " : " + res.getAtomicLinks().size() + "\n");
sb.append("\n");
}
File out = new File("results/infosMonos.txt");
try {
FileWriter fw = new FileWriter(out);
fw.write(sb.toString());
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
use of io.loaders.json.MonomersJsonLoader in project Smiles2Monomers by yoann-dufresne.
the class Json2HTML method main.
public static void main(String[] args) {
if (args.length < 6) {
System.err.println("Command line :\n\tjava main.AtomicToMonomeric <monomersFile> <peptidesFile> <rulesFile> <residuesFile> <coveragesFile> <outType> [outFile] [-v]");
System.err.println(" outType can be \"-zip\" or \"-html\"");
System.exit(42);
}
String monoDBname = args[0];
String pepDBname = args[1];
String rulesDBname = args[2];
String residuesDBname = args[3];
String covsFile = args[4];
boolean zip = args[5].equals("-zip") ? true : false;
String outFile = null;
if (zip && args.length < 7) {
System.err.println("Command line :\n\tjava main.AtomicToMonomeric <monomersFile> <peptidesFile> <rulesFile> <residuesFile> <coveragesFile> <outType> [outFile] [-v]");
System.err.println(" outType can be \"-zip\" or \"-html\"");
System.exit(42);
} else if (zip && args.length >= 7)
outFile = args[6];
// Loading databases
System.out.println("--- Loading ---");
// Maybe loading can be faster for the learning base, using serialized molecules instead of CDK SMILES parsing method.
long loadingTime = System.currentTimeMillis();
MonomersDB monoDB = new MonomersJsonLoader(false).loadFile(monoDBname);
MonomersSerialization ms = new MonomersSerialization();
ms.deserialize(monoDB, "data/serials/monos.serial");
PolymersJsonLoader pcl = new PolymersJsonLoader(monoDB, true);
PolymersDB pepDB = pcl.loadFile(pepDBname);
RulesDB rules = RulesJsonLoader.loader.loadFile(rulesDBname);
ResidueJsonLoader rjl = new ResidueJsonLoader(rules, monoDB);
FamilyDB families = rjl.loadFile(residuesDBname);
loadingTime = (System.currentTimeMillis() - loadingTime) / 1000;
System.out.println("--- Json to HTML ---");
long creationTime = System.currentTimeMillis();
CoveragesJsonLoader cl = new CoveragesJsonLoader(pepDB, families);
CoveragesDB covs = cl.loadFile(covsFile);
List<Coverage> covsList = covs.getObjects();
Collections.sort(covsList);
// Common generations
File imgs = new File("tmp_imgs_" + covsFile.substring(covsFile.lastIndexOf("/") + 1, covsFile.lastIndexOf(".")));
if (!imgs.exists())
imgs.mkdir();
ImagesGeneration ig = new ImagesGeneration();
Map<Coverage, ColorsMap> allColors = ig.generate(imgs, monoDB, covsList);
if (!zip) {
// HTML
File resultDir = null;
if (covsFile.contains("/"))
resultDir = new File(covsFile.substring(0, covsFile.lastIndexOf("/")));
else
resultDir = new File(".");
if (!resultDir.exists())
resultDir.mkdir();
Coverages2HTML c2h = new Coverages2HTML(covsList, monoDB, families);
File htmlFile = new File(resultDir.getPath() + "/test.html");
c2h.createResults(htmlFile, imgs, allColors);
} else {
// Zip File
OutputZiper oz = new OutputZiper(outFile);
oz.createZip(imgs.getPath(), covsFile, pepDBname, monoDBname, residuesDBname, allColors);
}
ig.cleanTmp(imgs);
creationTime = (System.currentTimeMillis() - creationTime) / 1000;
System.out.println();
System.out.println("Total time to load datas : " + loadingTime + "s");
System.out.println("Total time to create HTML : " + creationTime + "s");
System.out.println();
System.out.println("--- Program ended ---");
}
use of io.loaders.json.MonomersJsonLoader in project Smiles2Monomers by yoann-dufresne.
the class PreComputation method main.
public static void main(String[] args) {
// ----------------- Parameters ---------------------------
String rulesDBName = "data/rules.json";
String monosDBName = "data/monomers.json";
String jsonPolymers = "data/learning.json";
String serialFolder = "data/serials/";
String jsonResidues = "data/residues.json";
String jsonChains = "data/chains.json";
int markovianSize = 3;
// Parsing
for (int idx = 0; idx < args.length; idx++) {
if (args[idx].startsWith("-")) {
switch(args[idx]) {
case "-rul":
rulesDBName = args[idx + 1];
break;
case "-mono":
monosDBName = args[idx + 1];
break;
case "-poly":
jsonPolymers = args[idx + 1];
break;
case "-res":
jsonResidues = args[idx + 1];
break;
case "-cha":
jsonChains = args[idx + 1];
break;
case "-serial":
serialFolder = args[idx + 1];
break;
case "-markovian":
markovianSize = new Integer(args[idx + 1]);
break;
default:
System.err.println("Wrong option " + args[idx]);
System.exit(1);
break;
}
idx++;
} else {
System.err.println("Wrong parameter " + args[idx]);
System.exit(1);
}
}
// File existence
File file = new File(rulesDBName);
if (!file.exists()) {
System.err.println("No file at " + rulesDBName);
System.exit(1);
}
file = new File(monosDBName);
if (!file.exists()) {
System.err.println("No file at " + monosDBName);
System.exit(1);
}
file = new File(jsonPolymers);
if (!file.exists()) {
System.err.println("No file at " + jsonPolymers);
System.exit(1);
}
// ------------------- Loadings ------------------------
System.out.println("--- Loading ---");
RulesDB rules = RulesJsonLoader.loader.loadFile(rulesDBName);
MonomersDB monos = new MonomersJsonLoader(true).loadFile(monosDBName);
PolymersJsonLoader pjl = new PolymersJsonLoader(monos, false);
PolymersDB learningBase = pjl.loadFile(jsonPolymers);
// ----------------- Serializations --------------------
System.out.println("--- Data serialisation ---");
File folder = new File(serialFolder);
if (!folder.exists())
folder.mkdir();
MonomersSerialization ms = new MonomersSerialization();
ms.serialize(monos, serialFolder + "monos.serial");
// ----------------- residues --------------------------
ResidueCreator rc = new ResidueCreator(rules);
rc.setVerbose(false);
System.out.println("--- Residues creation ---");
FamilyDB families = rc.createResidues(monos);
System.out.println("--- Saving residues ---");
ResidueJsonLoader rjl = new ResidueJsonLoader(rules, monos);
rjl.saveFile(families, jsonResidues);
// ----------------- chains ----------------------------
System.out.println("--- Learning chains ---");
// Adapt residue structures
for (Residue res : families.getResidues().getObjects()) res.explicitToImplicitHydrogens();
ChainLearning learning = new ChainLearning(learningBase);
learning.setMarkovianSize(markovianSize);
learning.learn(families);
// --- Save ---
System.out.println("--- Saving chains ---");
FamilyChainIO fcio = new FamilyChainIO(families);
fcio.saveFile(learning.getDb(), jsonChains);
System.out.println("--- Ended ---");
}
Aggregations