use of ini.trakem2.display.Layer in project TrakEM2 by trakem2.
the class ProjectTree method rawSendToSiblingProject.
/**
* Assumes that both projects have the same TemplateThing structure,
* and assumes that the parent of the ({@code source_pt} and the {@code landing_parent}
* instances are of the same type.
*
* @param source_pt The {@link ProjectThing} to be cloned.
* @param transfer_mode Either 0 ("As is") or 1 ("Transformed with the images").
* @param target_project The sibling project into which insert a clone of the {@code source_pt}.
* @param landing_parent The ProjectThing in the sibling project that receives the cloned {@code source_pt}.
*/
public boolean rawSendToSiblingProject(// the source ProjectThing to copy to the target project
final ProjectThing source_pt, final int transfer_mode, final Project target_project, final ProjectThing landing_parent) {
try {
// Check that all the Layers used by the objects to transfer also exist in the target project!
// 1 - Cheap way: check if all layers in the target project exist in the source project, by id
HashSet<Long> lids = new HashSet<Long>();
for (final Layer layer : this.project.getRootLayerSet().getLayers()) {
lids.add(layer.getId());
}
HashSet<Long> tgt_lids = new HashSet<Long>(lids);
for (final Layer layer : target_project.getRootLayerSet().getLayers()) {
lids.remove(layer.getId());
tgt_lids.add(layer.getId());
}
List<Displayable> original_vdata = null;
final Set<Long> lids_to_operate = new HashSet<Long>();
if (0 != lids.size()) {
original_vdata = new ArrayList<Displayable>();
// All their layers MUST be in the target project.
for (final ProjectThing child : source_pt.findChildrenOfTypeR(Displayable.class)) {
final Displayable d = (Displayable) child.getObject();
if (!tgt_lids.containsAll(d.getLayerIds())) {
Utils.log("CANNOT transfer: not all required layers are present in the target project!\n First object that couldn't be transfered: \n " + d);
return false;
}
if (d instanceof VectorData) {
original_vdata.add(d);
lids_to_operate.addAll(d.getLayerIds());
}
}
}
// Deep cloning of the ProjectThing to transfer, then added to the landing_parent in the other tree.
ProjectThing copy;
try {
// new ids, taken from target_project
copy = source_pt.deepClone(target_project, false);
} catch (Exception ee) {
Utils.logAll("Can't send: " + ee.getMessage());
IJError.print(ee);
return false;
}
if (null == landing_parent.getChildTemplate(copy.getTemplate().getType())) {
// ensure a copy is there
landing_parent.getTemplate().addChild(copy.getTemplate().shallowCopy());
}
if (!landing_parent.addChild(copy)) {
Utils.log("Could NOT transfer the node!");
return false;
}
// Get the list of Profile instances in the source Project, in the same order
// that they will be in the target project:
final List<Profile> srcProfiles = new ArrayList<Profile>();
for (final ProjectThing profile_pt : source_pt.findChildrenOfTypeR(Profile.class)) {
srcProfiles.add((Profile) profile_pt.getObject());
}
final List<ProjectThing> copies = copy.findChildrenOfTypeR(Displayable.class);
final List<Profile> newProfiles = new ArrayList<Profile>();
// Utils.log2("copies size: " + copies.size());
final List<Displayable> vdata = new ArrayList<Displayable>();
final List<ZDisplayable> zd = new ArrayList<ZDisplayable>();
for (final ProjectThing t : copies) {
final Displayable d = (Displayable) t.getObject();
// all should be, this is just future-proof code.
if (d instanceof VectorData)
vdata.add(d);
if (d instanceof ZDisplayable) {
zd.add((ZDisplayable) d);
} else {
// profile: always special
newProfiles.add((Profile) d);
}
}
// Fix Profile instances: exploit that the order as been conserved when copying.
int profileIndex = 0;
for (final Profile newProfile : newProfiles) {
// Corresponding Profile:
final Profile srcProfile = srcProfiles.get(profileIndex++);
// Corresponding layer: layers have the same IDs by definition of what a sibling Project is.
final Layer newLayer = target_project.getRootLayerSet().getLayer(srcProfile.getLayer().getId());
newLayer.add(newProfile);
// Corresponding links
for (final Displayable srcLinkedProfile : srcProfile.getLinked(Profile.class)) {
newProfile.link(newProfiles.get(srcProfiles.indexOf(srcLinkedProfile)));
}
}
// add them all in one shot
target_project.getRootLayerSet().addAll(zd);
// could have changed
target_project.getTemplateTree().rebuild();
// When trying to rebuild just the landing_parent, it doesn't always work. Needs checking TODO
target_project.getProjectTree().rebuild();
// Open up the path to the landing parent node
final TreePath tp = new TreePath(DNDTree.findNode(landing_parent, target_project.getProjectTree()).getPath());
Utils.invokeLater(new Runnable() {
public void run() {
target_project.getProjectTree().scrollPathToVisible(tp);
target_project.getProjectTree().setSelectionPath(tp);
}
});
if (1 == transfer_mode) {
// Collect original vdata
if (null == original_vdata) {
original_vdata = new ArrayList<Displayable>();
for (final ProjectThing child : source_pt.findChildrenOfTypeR(Displayable.class)) {
final Displayable d = (Displayable) child.getObject();
if (d instanceof VectorData) {
original_vdata.add(d);
lids_to_operate.addAll(d.getLayerIds());
}
}
}
// Utils.log2("original vdata:", original_vdata);
// Utils.log2("vdata:", vdata);
// Transform with images
AlignTask.transformVectorData(AlignTask.createTransformPropertiesTable(original_vdata, vdata, lids_to_operate), vdata, target_project.getRootLayerSet());
}
return true;
} catch (Exception e) {
IJError.print(e);
}
return false;
}
use of ini.trakem2.display.Layer in project TrakEM2 by trakem2.
the class DBLoader method fetchLayer.
/**
* Load all objects into the Layer: Profile and Pipe from the hs_pt (full of ProjectThing wrapping them), and Patch, LayerSet, DLabel, etc from the database.
*/
private Layer fetchLayer(Project project, long id, HashMap hs_pt) throws Exception {
ResultSet r = connection.prepareStatement("SELECT * FROM ab_layers WHERE id=" + id).executeQuery();
Layer layer = null;
if (r.next()) {
long layer_id = r.getLong("id");
layer = new Layer(project, layer_id, r.getDouble("z"), r.getDouble("thickness"));
// find the Layer's parent
long parent_id = r.getLong("layer_set_id");
Object set = hs_pt.get(new Long(parent_id));
if (null != set) {
((LayerSet) set).addSilently(layer);
} else {
Utils.log("Loader.fetchLayer: WARNING no parent for layer " + layer);
}
// add the displayables from hs_pt that correspond to this layer (and all other objects that belong to the layer)
HashMap hs_d = new HashMap();
ResultSet rd = connection.prepareStatement("SELECT ab_displayables.id, ab_profiles.id, layer_id, stack_index FROM ab_displayables,ab_profiles WHERE ab_displayables.id=ab_profiles.id AND layer_id=" + layer_id).executeQuery();
while (rd.next()) {
Long idd = new Long(rd.getLong("id"));
Object ob = hs_pt.get(idd);
// Utils.log("Found profile with id=" + idd + " and ob = " + ob);
if (null != ob) {
hs_d.put(new Integer(rd.getInt("stack_index")), ob);
}
}
rd.close();
// fetch LayerSet objects (which are also Displayable), and put them in the hs_pt (this is hackerous)
ResultSet rls = connection.prepareStatement("SELECT * FROM ab_layer_sets, ab_displayables WHERE ab_layer_sets.id=ab_displayables.id AND ab_layer_sets.parent_layer_id=" + id).executeQuery();
while (rls.next()) {
long ls_id = rls.getLong("id");
LayerSet layer_set = new LayerSet(project, ls_id, rls.getString("title"), (float) rls.getDouble("width"), (float) rls.getDouble("height"), rls.getDouble("rot_x"), rls.getDouble("rot_y"), rls.getDouble("rot_z"), (float) rls.getDouble("layer_width"), (float) rls.getDouble("layer_height"), rls.getBoolean("locked"), rls.getInt("snapshots_mode"), new AffineTransform(rls.getDouble("m00"), rls.getDouble("m10"), rls.getDouble("m01"), rls.getDouble("m11"), rls.getDouble("m02"), rls.getDouble("m12")));
hs_pt.put(new Long(ls_id), layer_set);
hs_d.put(new Integer(rls.getInt("stack_index")), layer_set);
layer_set.setLayer(layer, false);
// find the pipes (or other possible ZDisplayable objects) in the hs_pt that belong to this LayerSet and add them silently
ResultSet rpi = connection.prepareStatement("SELECT ab_displayables.id, ab_zdisplayables.id, layer_id, layer_set_id, stack_index FROM ab_displayables,ab_zdisplayables WHERE ab_displayables.id=ab_zdisplayables.id AND layer_set_id=" + ls_id + " ORDER BY stack_index ASC").executeQuery();
while (rpi.next()) {
Long idd = new Long(rpi.getLong("id"));
Object ob = hs_pt.get(idd);
if (null != ob && ob instanceof ZDisplayable) {
layer_set.addSilently((ZDisplayable) ob);
} else {
Utils.log("fetchLayer: failed to add a ZDisplayable to the layer_set. zdispl id = " + idd);
}
}
rpi.close();
}
rls.close();
// add Patch objects from ab_patches joint-called with ab_displayables
ResultSet rp = connection.prepareStatement("SELECT ab_patches.id, ab_displayables.id, layer_id, title, width, height, stack_index, imp_type, locked, min, max, m00, m10, m01, m11, m02, m12 FROM ab_patches,ab_displayables WHERE ab_patches.id=ab_displayables.id AND ab_displayables.layer_id=" + layer_id).executeQuery();
while (rp.next()) {
long patch_id = rp.getLong("id");
Patch patch = new Patch(project, patch_id, rp.getString("title"), (float) rp.getDouble("width"), (float) rp.getDouble("height"), rp.getInt("o_width"), rp.getInt("o_height"), rp.getInt("imp_type"), rp.getBoolean("locked"), rp.getDouble("min"), rp.getDouble("max"), new AffineTransform(rp.getDouble("m00"), rp.getDouble("m10"), rp.getDouble("m01"), rp.getDouble("m11"), rp.getDouble("m02"), rp.getDouble("m12")));
// collecting all Displayable objects to reconstruct links
hs_pt.put(new Long(patch_id), patch);
hs_d.put(new Integer(rp.getInt("stack_index")), patch);
}
rp.close();
// add DLabel objects
ResultSet rl = connection.prepareStatement("SELECT ab_labels.id, ab_displayables.id, layer_id, title, width, height, m00, m10, m01, m11, m02, m12, stack_index, font_name, font_style, font_size, ab_labels.type, locked FROM ab_labels,ab_displayables WHERE ab_labels.id=ab_displayables.id AND ab_displayables.layer_id=" + layer_id).executeQuery();
while (rl.next()) {
long label_id = rl.getLong("id");
DLabel label = new DLabel(project, label_id, rl.getString("title"), (float) rl.getDouble("width"), (float) rl.getDouble("height"), rl.getInt("type"), rl.getString("font_name"), rl.getInt("font_style"), rl.getInt("font_size"), rl.getBoolean("locked"), new AffineTransform(rl.getDouble("m00"), rl.getDouble("m10"), rl.getDouble("m01"), rl.getDouble("m11"), rl.getDouble("m02"), rl.getDouble("m12")));
// collecting all Displayable objects to reconstruct links
hs_pt.put(new Long(label_id), label);
hs_d.put(new Integer(rl.getInt("stack_index")), label);
}
rl.close();
// Add silently to the Layer ordered by stack index
Set e = hs_d.keySet();
Object[] si = new Object[hs_d.size()];
si = e.toArray(si);
// will it sort an array of integers correctly? Who knows!
Arrays.sort(si);
for (int i = 0; i < si.length; i++) {
// Utils.log("Loader layer.addSilently: adding " + (DBObject)hs_d.get(si[i]));
layer.addSilently((DBObject) hs_d.get(si[i]));
}
// find displays and open later, when fully loaded.
ResultSet rdi = connection.prepareStatement("SELECT * FROM ab_displays WHERE layer_id=" + layer.getId()).executeQuery();
while (rdi.next()) {
fetchDisplay(rdi, layer);
}
rdi.close();
}
r.close();
return layer;
}
use of ini.trakem2.display.Layer in project TrakEM2 by trakem2.
the class Render method renderObject.
/**
* Accepts a 'profile_list' Thing and composes a new Ob
*/
private void renderObject(Thing profile_list) {
// check preconditions
if (!profile_list.getType().equals("profile_list"))
return;
// do not accept an empty profile_list Thing
final ArrayList<? extends Thing> al = profile_list.getChildren();
if (null == al || al.size() < 2)
return;
// new style: follows profiles links and generates several obs, one per branch, ensuring that there is oly one profile per layer in the generated Ob for the .shapes file.
// 1 - gather all profiles
final HashSet<Profile> hs = new HashSet<Profile>();
for (final Thing child : al) {
Object ob = child.getObject();
if (ob instanceof Profile) {
hs.add((Profile) ob);
} else {
Utils.log2("Render: skipping non Profile class child");
}
}
String name = profile_list.getParent().getTitle();
final ArrayList<String> al_used_names = new ArrayList<String>();
// make unique object name, since it'll be the group
String name2 = name;
int k = 1;
while (ht_objects.containsKey(name2)) {
name2 = name + "-" + k;
k++;
}
name = name2;
al_used_names.add(name);
// 2 - start at the last found profile with the lowest Z, and recurse until done
// Utils.log2("Calling renderSubObjects with " + hs.size() + " profiles");
renderSubObjects(hs, al_used_names);
/* //old style, assumes a single profile per section
Profile[] profiles = new Profile[al.size()];
Iterator it = al.iterator();
int i = 0;
while (it.hasNext()) {
Thing child = (Thing)it.next();
Displayable displ = (Displayable)child.getObject();
profiles[i] = (Profile)displ; //this cast is safe (as long as I'm the only programmer and I remember that Thing objects added to a 'profile_list' Thing are of class Profile only)
i++;
}
// make unique object name, since it'll be the group
String name = profile_list.getParent().getTitle();
String name2 = name;
int k = 1;
while (ht_objects.containsKey(name2)) {
name2 = name + "_" + k;
k++;
}
name = name2;
// store
ht_objects.put(name, new Ob(name, profiles));
*/
}
use of ini.trakem2.display.Layer in project TrakEM2 by trakem2.
the class M method apply.
/**
* Parts of @param a not intersected by any of @param vdt rois will be left untouched if @param remove_outside is false.
*/
public static final void apply(final VectorDataTransform vdt, final Area a, final boolean remove_outside) {
final Area b = new Area();
for (final VectorDataTransform.ROITransform rt : vdt.transforms) {
// Cut the intersecting part from a:
final Area intersection = new Area(a);
intersection.intersect(rt.roi);
a.subtract(intersection);
// .. and add it to b, transformed:
b.add(M.transform(rt.ct, intersection));
}
if (!M.isEmpty(a)) {
if (remove_outside) {
// Clear areas not affected any ROITransform
Utils.log("WARNING: parts of an area in layer " + vdt.layer + "\n did not intersect any transformation target\n and where removed.");
a.reset();
} else
Utils.log("WARNING: parts of an area in layer " + vdt.layer + "\n remain untransformed.");
}
// Add b (the transformed parts) to what remains of a
a.add(b);
}
use of ini.trakem2.display.Layer in project TrakEM2 by trakem2.
the class AreaUtils method generateTriangles.
/**
* Expects areas in local coordinates to the Displayable @param d.
* @param d
* @param scale The scaling of the entire universe, to limit the overall box
* @param resample_ The optimization parameter for marching cubes (i.e. a value of 2 will scale down to half, then apply marching cubes, then scale up by 2 the vertices coordinates).
* @param areas
* @return The List of triangles involved, specified as three consecutive vertices. A list of Point3f vertices.
*/
public static List<Point3f> generateTriangles(final Displayable d, final double scale, final int resample_, final Map<Layer, Area> areas) {
// in the LayerSet, layers are ordered by Z already.
try {
int n = areas.size();
if (0 == n)
return null;
final int resample;
if (resample_ <= 0) {
resample = 1;
Utils.log2("Fixing zero or negative resampling value to 1.");
} else
resample = resample_;
final LayerSet layer_set = d.getLayerSet();
final AffineTransform aff = d.getAffineTransformCopy();
final Rectangle r = d.getBoundingBox(null);
// remove translation from a copy of the Displayable's AffineTransform
final AffineTransform at_translate = new AffineTransform();
at_translate.translate(-r.x, -r.y);
aff.preConcatenate(at_translate);
// incorporate resampling scaling into the transform
final AffineTransform atK = new AffineTransform();
// Utils.log("resample: " + resample + " scale: " + scale);
// 'scale' is there to limit gigantic universes
final double K = (1.0 / resample) * scale;
atK.scale(K, K);
aff.preConcatenate(atK);
final Calibration cal = layer_set.getCalibrationCopy();
// Find first layer, compute depth, and fill in the depth vs area map
Layer first_layer = null, last_layer = null;
final int w = (int) Math.ceil(r.width * K);
final int h = (int) Math.ceil(r.height * K);
int depth = 0;
final Map<Integer, Area> ma = new HashMap<Integer, Area>();
for (final Layer la : layer_set.getLayers()) {
// layers sorted by Z ASC
final Area area = areas.get(la);
if (null != area) {
ma.put(depth, area);
if (null == first_layer) {
first_layer = la;
}
// Utils.log("area at depth " + depth + " for layer " + la);
depth++;
n--;
} else if (0 != depth) {
// Utils.log("Empty area at depth " + depth);
// an empty layer
depth++;
}
if (0 == n) {
last_layer = la;
// no more areas to paint
break;
}
}
if (0 == depth) {
Utils.log("ERROR could not find any areas for " + d);
return null;
}
if (0 != n) {
Utils.log("WARNING could not find all areas for " + d);
}
// No zero-padding: Marching Cubes now can handle edges
final ShapeList<ByteType> shapeList = new ShapeListCached<ByteType>(new int[] { w, h, depth }, new ByteType(), 32);
final Image<ByteType> shapeListImage = new Image<ByteType>(shapeList, shapeList.getBackground(), "ShapeListContainer");
// 255 or -1 don't work !? So, giving the highest value (127) that is both a byte and an int.
final ByteType intensity = new ByteType((byte) 127);
for (final Map.Entry<Integer, Area> e : ma.entrySet()) {
Area a = e.getValue();
if (!aff.isIdentity()) {
a = M.areaInIntsByRounding(a.createTransformedArea(aff));
}
shapeList.addShape(a, intensity, new int[] { e.getKey() });
}
// debug:
// ImagePlus imp = ImageJFunctions.displayAsVirtualStack(shapeListImage);
// imp.getProcessor().setMinAndMax( 0, 255 );
// imp.show();
// Utils.log2("Using imglib Shape List Image Container");
// Now marching cubes
// origins at 0,0,0: uncalibrated
final List<Point3f> list = new MCTriangulator().getTriangles(shapeListImage, 1, new float[3]);
// The list of triangles has coordinates:
// - in x,y: in pixels, scaled by K = (1 / resample) * scale,
// translated by r.x, r.y (the top-left coordinate of this AreaList bounding box)
// - in z: in stack slice indices
// So all x,y,z must be corrected in x,y and z of the proper layer
// final double offset = first_layer.getZ();
final int i_first_layer = layer_set.indexOf(first_layer);
// The x,y translation to correct each point by:
final float dx = (float) (r.x * scale * cal.pixelWidth);
final float dy = (float) (r.y * scale * cal.pixelHeight);
// Correct x,y by resampling and calibration, but not scale
// scale is already in the pixel coordinates
final float rsw = (float) (resample * cal.pixelWidth);
final float rsh = (float) (resample * cal.pixelHeight);
// no resampling in Z. and Uses pixelWidth, not pixelDepth.
final double sz = scale * cal.pixelWidth;
// debug:
/*
// which p.z types exist?
final TreeSet<Float> ts = new TreeSet<Float>();
for (final Iterator it = list.iterator(); it.hasNext(); ) {
ts.add(((Point3f)it.next()).z);
}
for (final Float pz : ts) Utils.log2("A z: " + pz);
*/
// debug: How many different Z?
/*
HashSet<Float> zs = new HashSet<Float>();
for (Point3f p : list) {
zs.add(p.z);
}
ArrayList<Float> a = new ArrayList<Float>(zs);
java.util.Collections.sort(a);
for (Float f : a) {
Utils.log("f: " + f);
}
*/
// Utils.log2("Number of slices: " + imp.getNSlices());
// Fix all points:
// Read from list, modify and put into verts
// and don't modify it if the verts already has it (it's just coincident)
final Point3f[] verts = new Point3f[list.size()];
// Utils.log("number of verts: " + verts.length + " mod 3: " + (verts.length % 3));
final TreeMap<Integer, Point3f> output = new TreeMap<Integer, Point3f>();
// The first section generates vertices at -1 and 0
// The last section generates them at last_section_index and last_section_index +1
// Capture from -1 to 0
fix3DPoints(list, output, verts, first_layer.getZ(), 0, -1, dx, dy, rsw, rsh, sz, 1);
int slice_index = 0;
for (final Layer la : layer_set.getLayers().subList(i_first_layer, i_first_layer + depth)) {
// If layer is empty, continue
/* // YEAH don't! At least the immediate next layer would have points, like the extra Z level after last layer, to account for the thickness of the layer!
if (empty_layers.contains(la)) {
slice_index++;
continue;
}
*/
fix3DPoints(list, output, verts, la.getZ(), la.getThickness(), slice_index, dx, dy, rsw, rsh, sz, 1);
slice_index++;
}
// Do the last layer again. The last layer has two Z planes in which it has pixels:
try {
// Capture from last_section_index to last_section_index+1, inclusive
fix3DPoints(list, output, verts, last_layer.getZ() + last_layer.getThickness(), 0, slice_index, dx, dy, rsw, rsh, sz, 2);
} catch (final Exception ee) {
IJError.print(ee);
}
// Handle potential errors:
if (0 != list.size() - output.size()) {
Utils.log2("Unprocessed/unused points: " + (list.size() - output.size()));
for (int i = 0; i < verts.length; i++) {
if (null == verts[i]) {
final Point3f p = (Point3f) list.get(i);
Utils.log2("verts[" + i + "] = " + p.x + ", " + p.y + ", " + p.z + " p.z as int: " + ((int) (p.z + 0.05f)));
}
}
return new ArrayList<Point3f>(output.values());
} else {
return java.util.Arrays.asList(verts);
}
} catch (final Exception e) {
e.printStackTrace();
}
return null;
}
Aggregations