use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class DBLoader method getProjects.
/**
* Fetch all existing projects from the database.
*/
public Project[] getProjects() {
synchronized (db_lock) {
// connect if disconnected
if (!connectToDatabase()) {
return null;
}
Project[] projects = null;
try {
ResultSet r = connection.prepareStatement("SELECT title, id FROM ab_projects ORDER BY id").executeQuery();
ArrayList<Project> al_projects = new ArrayList<Project>();
while (r.next()) {
al_projects.add(new Project(r.getLong("id"), r.getString("title")));
}
r.close();
projects = new Project[al_projects.size()];
al_projects.toArray(projects);
} catch (Exception e) {
IJError.print(e);
return null;
}
return projects;
}
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class DBLoader method upgradeProjectsTable.
/**
* Used to upgrade old databases.
*/
private boolean upgradeProjectsTable() throws Exception {
// Upgrade database if necessary: set a version field, create the TemplateThing entries in the database for each project from its XML template file, and delete the xml_template column
// Check columns: see if trakem2_version is there
ResultSet r = connection.prepareStatement("SELECT column_name FROM information_schema.columns WHERE table_name='ab_projects' AND column_name='xml_template'").executeQuery();
if (r.next()) {
YesNoCancelDialog yn = new YesNoCancelDialog(IJ.getInstance(), "Upgrade", "Need to upgrade table projects.\nNo data will be lost, but reorganized.\nProceed?");
if (!yn.yesPressed()) {
return false;
}
// retrieve and parse XML template from each project
ResultSet r1 = connection.prepareStatement("SELECT * FROM ab_projects").executeQuery();
while (r1.next()) {
long project_id = r1.getLong("id");
// parse the XML file stored in the db and save the TemplateThing into the ab_things table
InputStream xml_stream = null;
try {
String query = "SELECT xml_template FROM ab_projects WHERE id=" + project_id;
ResultSet result = connection.prepareStatement(query).executeQuery();
if (result.next()) {
xml_stream = result.getBinaryStream("xml_template");
}
result.close();
} catch (Exception e) {
IJError.print(e);
return false;
}
if (null == xml_stream) {
Utils.showMessage("Failed to upgrade the database schema: XML template stream is null.");
return false;
}
TemplateThing template_root = new TrakEM2MLParser(xml_stream).getTemplateRoot();
if (null == template_root) {
Utils.showMessage("Failed to upgrade the database schema: root TemplateThing is null.");
return false;
}
Project project = new Project(project_id, r1.getString("title"));
project.setTempLoader(this);
template_root.addToDatabase(project);
}
r1.close();
// remove the XML column
connection.prepareStatement("ALTER TABLE ab_projects DROP xml_template").execute();
// org.postgresql.util.PSQLException: ERROR: adding columns with defaults is not implemented in 7.4.* (only in 8.1.4+)
// connection.prepareStatement("ALTER TABLE ab_projects ADD version text default '" + Utils.version + "'").execute();
// so: workaround
connection.prepareStatement("ALTER TABLE ab_projects ADD version TEXT").execute();
connection.prepareStatement("ALTER TABLE ab_projects ALTER COLUMN version SET DEFAULT '" + Utils.version + "'").execute();
}
r.close();
// success!
return true;
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class DBLoader method getRootLayerThing.
/**
* Fetches the root LayerSet, fills it with children (recursively) and uses the profiles, pipes, etc., from the project_thing. Will reconnect the links and open Displays for the layers that have one.
*/
public LayerThing getRootLayerThing(Project project, ProjectThing project_thing, TemplateThing layer_set_tt, TemplateThing layer_tt) {
synchronized (db_lock) {
// connect if disconnected
if (!connectToDatabase()) {
return null;
}
HashMap hs_pt = new HashMap();
unpack(project_thing, hs_pt);
LayerThing root = null;
try {
// -1 signals root
ResultSet r = connection.prepareStatement("SELECT * FROM ab_things WHERE project_id=" + project.getId() + " AND type='layer_set' AND parent_id=-1").executeQuery();
if (r.next()) {
root = getLayerThing(r, project, hs_pt, layer_set_tt, layer_tt);
}
r.close();
if (null == root) {
Utils.log("Loader.getRootLayerThing: can't find it for project id=" + project.getId());
return null;
}
// Redo the links! hs_pt contains now all Displayable objects.
ResultSet rl = connection.prepareStatement("SELECT * FROM ab_links WHERE project_id=" + project.getId()).executeQuery();
while (rl.next()) {
Long id1 = new Long(rl.getLong("id1"));
Long id2 = new Long(rl.getLong("id2"));
Object ob1 = hs_pt.get(id1);
Object ob2 = hs_pt.get(id2);
if (null != ob1 && null != ob2) {
Displayable d = (Displayable) ob1;
d.link((Displayable) ob2, false);
} else {
Utils.log("Loader: broken link between " + id1 + " and " + id2);
}
}
rl.close();
} catch (Exception e) {
IJError.print(e);
return null;
}
return root;
}
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class NeuroML method exportNeuroML.
/**
* Export to NeuroML 1.8.3, with synapses.
* Every {@link Tree} is represented by a <cell>, and an instance of that <cell>
* is represented by a <population> of one single cell.
*/
public static final void exportNeuroML(final Set<Tree<?>> trees, final Writer w) throws Exception {
if (trees.isEmpty())
return;
// Header
w.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<!-- Exported from TrakEM2 '" + Utils.version + "' at " + new Date() + "\nTrakEM2 software by Albert Cardona, Institute of Neuroinformatics of the University of Zurich and ETH Zurich -->\n" + "<neuroml xmlns=\"http://morphml.org/neuroml/schema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xmlns:net=\"http://morphml.org/networkml/schema\"\n" + " xmlns:mml=\"http://morphml.org/morphml/schema\"\n" + " xmlns:meta=\"http://morphml.org/metadata/schema\"\n" + " xmlns:bio=\"http://morphml.org/biophysics/schema\"\n" + " xmlns:cml=\"http://morphml.org/channelml/schema\"\n" + " xsi:schemaLocation=\"http://morphml.org/neuroml/schema http://www.neuroml.org/NeuroMLValidator/NeuroMLFiles/Schemata/v1.8.1/Level3/NeuroML_Level3_v1.8.1.xsd\"\n" + " length_units=\"micrometer\">\n");
final List<HalfSynapse> presynaptic = new ArrayList<HalfSynapse>();
final List<HalfSynapse> postsynaptic = new ArrayList<HalfSynapse>();
// Scale units to micrometers
final Calibration cal = trees.iterator().next().getLayerSet().getCalibration();
final double scale = scaleToMicrometers(cal);
final AffineTransform scale2d = new AffineTransform(cal.pixelWidth * scale, 0, 0, cal.pixelHeight * scale, 0, 0);
// not pixelDepth
final double zScale = cal.pixelWidth * scale;
w.write("<cells>\n");
// Each Tree is a cell
for (final Tree<?> t : trees) {
if (null == t.getRoot())
continue;
exportMorphMLCell(w, t, trees, presynaptic, postsynaptic, scale2d, zScale);
}
w.write("</cells>\n");
// Write a a population of cell for every Tree, where each population has only one cell at 0,0,0.
// If the id=10, then the name is p10 and the type is t10.
w.write("<populations xmlns=\"http://morphml.org/networkml/schema\">\n");
for (final Tree<?> t : trees) {
w.write(" <population name=\"p");
final String sid = Long.toString(t.getId());
w.write(sid);
w.write("\" cell_type=\"t");
w.write(sid);
w.write("\">\n <instances size=\"1\">\n <instance id=\"0\"><location x=\"0\" y=\"0\" z=\"0\"/></instance>\n </instances>\n </population>\n");
}
w.write("</populations>\n");
// Write a project group with all the synapses among the members of the set of trees.
w.write("<projections units=\"Physiological Units\" xmlns=\"http://morphml.org/networkml/schema\">\n");
// Figure out which pre connect to which post: the Connector instance is shared, so use it as key
final Map<Connector, HalfSynapse> cpre = new HashMap<Connector, HalfSynapse>();
for (final HalfSynapse syn : presynaptic) {
cpre.put(syn.c, syn);
}
final Map<TreePair, List<Synapse>> pairs = new HashMap<TreePair, List<Synapse>>();
for (final HalfSynapse post : postsynaptic) {
final HalfSynapse pre = cpre.get(post.c);
// Does not originate within the set of trees
if (null == pre)
continue;
// pre and post share the same Connector
final TreePair pair = new TreePair(pre.t, post.t);
List<Synapse> ls = pairs.get(pair);
if (null == ls) {
ls = new ArrayList<Synapse>();
pairs.put(pair, ls);
}
ls.add(new Synapse(pre, post));
}
for (final Map.Entry<TreePair, List<Synapse>> e : pairs.entrySet()) {
// Write synapse between pre and post
final TreePair pair = e.getKey();
w.write(" <projection name=\"NetworkConnection\" source=\"p");
w.write(Long.toString(pair.source.getId()));
w.write("\" target=\"p");
w.write(Long.toString(pair.target.getId()));
w.write("\">\n");
w.write(" <synapse_props synapse_type=\"DoubExpSynA\" internal_delay=\"5\" weight=\"1\" threshold=\"-20\"/>\n");
w.write(" <connections size=\"");
final List<Synapse> ls = e.getValue();
w.write(Integer.toString(ls.size()));
w.write("\">\n");
int cid = 0;
for (final Synapse syn : ls) {
w.write(" <connection id=\"");
w.write(Integer.toString(cid));
w.write("\" pre_cell_id=\"0\" pre_segment_id=\"");
w.write(Long.toString(syn.pre.segmentId));
w.write("\" pre_fraction_along=\"0.5\" post_cell_id=\"0\" post_segment_id=\"");
w.write(Long.toString(syn.post.segmentId));
w.write("\"/>\n");
cid += 1;
}
w.write(" </connections>\n");
w.write(" </projection>\n");
}
w.write(" </projections>\n");
w.write("</neuroml>\n");
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class AlignTask method createTransformPropertiesTable.
/**
* Creates a map only for visible patches that intersect vdata.
* @param src_vdata represents the VectorData instances in original form, of the original project and layer set.
* @param tgt_vdata if not null, it must have the same size as src_data and their elements correspond one-to-one (as in, tgt element a clone of src element at the same index).
* @param lids_to_operate The id of the layers on which any operation will be done
* tgt_data enables transformVectorData to apply the transforms to copies of the src_vdata in another project.
*/
public static final ReferenceData createTransformPropertiesTable(final List<Displayable> src_vdata, final List<Displayable> tgt_vdata, final Set<Long> lids_to_operate) {
if (src_vdata.isEmpty())
return null;
final Map<Long, Patch.TransformProperties> tp = new HashMap<Long, Patch.TransformProperties>();
// A map of Displayable vs a map of Layer id vs list of Patch ids in that Layer that lay under the Patch, sorted by stack index
final Map<Displayable, Map<Long, TreeMap<Integer, Long>>> underlying = new HashMap<Displayable, Map<Long, TreeMap<Integer, Long>>>();
// The set of layers used
final Set<Long> src_layer_lids_used = new HashSet<Long>();
// Parallelize! This operation can be insanely expensive
final int nproc = Runtime.getRuntime().availableProcessors();
final ExecutorService exec = Utils.newFixedThreadPool(nproc, "AlignTask-createTransformPropertiesTable");
final List<Future<?>> dtasks = new ArrayList<Future<?>>();
final List<Future<?>> ltasks = new ArrayList<Future<?>>();
final Thread current = Thread.currentThread();
try {
for (int i = src_vdata.size() - 1; i > -1; i--) {
final Displayable src_d = src_vdata.get(i);
// filter out
if (!(src_d instanceof VectorData))
continue;
// use src_d if tgt_vdata is null
final Displayable tgt_d = null == tgt_vdata ? src_d : tgt_vdata.get(i);
// Some checking
if (!(tgt_d instanceof VectorData)) {
Utils.log("WARNING ignoring provided tgt_vdata " + tgt_d + " which is NOT a VectorData instance!");
continue;
}
if (src_d.getClass() != tgt_d.getClass()) {
Utils.log("WARNING src_d and tgt_d are instances of different classes:\n src_d :: " + src_d + "\n tgt_d :: " + tgt_d);
}
dtasks.add(exec.submit(new Runnable() {
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void run() {
final Map<Long, TreeMap<Integer, Long>> under = new HashMap<Long, TreeMap<Integer, Long>>();
synchronized (underlying) {
underlying.put(tgt_d, under);
}
if (current.isInterrupted())
return;
// Iterate the layers in which this VectorData has any data AND which have to be transformed
for (final Long olid : src_d.getLayerIds()) {
final long lid = olid.longValue();
// layer with id 'lid' is not affected
if (!lids_to_operate.contains(lid))
continue;
final Layer la = src_d.getLayerSet().getLayer(lid);
final Area a = src_d.getAreaAt(la);
if (null == a || a.isEmpty()) {
// does not paint in the layer
continue;
}
// The list of patches that lay under VectorData d, sorted by their stack index in the layer
final TreeMap<Integer, Long> stacked_patch_ids = new TreeMap<Integer, Long>();
synchronized (under) {
under.put(lid, stacked_patch_ids);
}
final boolean[] layer_visited = new boolean[] { false };
// Iterate source patches
for (final Patch patch : (Collection<Patch>) (Collection) la.getDisplayables(Patch.class, a, true)) {
// pick visible patches only
if (current.isInterrupted())
return;
try {
ltasks.add(exec.submit(new Runnable() {
@Override
public void run() {
if (current.isInterrupted())
return;
synchronized (patch) {
Patch.TransformProperties props;
synchronized (tp) {
props = tp.get(patch.getId());
}
if (null == props) {
props = patch.getTransformPropertiesCopy();
// Cache the props
synchronized (tp) {
tp.put(patch.getId(), props);
}
}
// Cache this patch as under the VectorData d
synchronized (stacked_patch_ids) {
// sorted by stack index
stacked_patch_ids.put(la.indexOf(patch), patch.getId());
// Utils.log("Added patch for layer " + la + " with stack index " + la.indexOf(patch) + ", patch " + patch);
}
if (!layer_visited[0]) {
// synch may fail to avoid adding it twice
// but it's ok since it's a Set anyway
layer_visited[0] = true;
synchronized (src_layer_lids_used) {
src_layer_lids_used.add(la.getId());
}
}
}
}
}));
} catch (final Throwable t) {
IJError.print(t);
return;
}
}
}
}
}));
}
Utils.wait(dtasks);
Utils.wait(ltasks);
} catch (final Throwable t) {
IJError.print(t);
} finally {
exec.shutdownNow();
}
return new ReferenceData(tp, underlying, src_layer_lids_used);
}
Aggregations