use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class Loader method makePrescaledTiles.
/**
* Generate 256x256 tiles, as many as necessary, to cover the given srcRect, starting at max_scale. Designed to be slow but memory-capable.
*
* filename = z + "/" + row + "_" + column + "_" + s + ".jpg";
*
* row and column run from 0 to n stepsize 1
* that is, row = y / ( 256 * 2^s ) and column = x / ( 256 * 2^s )
*
* z : z-level (slice)
* x,y: the row and column
* s: scale, which is 1 / (2^s), in integers: 0, 1, 2 ...
*
* var MAX_S = Math.floor( Math.log( MAX_Y + 1 ) / Math.LN2 ) - Math.floor( Math.log( Y_TILE_SIZE ) / Math.LN2 ) - 1;
*
* The module should not be more than 5
* At al levels, there should be an even number of rows and columns, except for the coarsest level.
* The coarsest level should be at least 5x5 tiles.
*
* Best results obtained when the srcRect approaches or is a square. Black space will pad the right and bottom edges when the srcRect is not exactly a square.
* Only the area within the srcRect is ever included, even if actual data exists beyond.
*
* @return The watcher thread, for joining purposes, or null if the dialog is canceled or preconditions are not passed.
* @throws IllegalArgumentException if the type is not ImagePlus.GRAY8 or Imageplus.COLOR_RGB.
*/
public Bureaucrat makePrescaledTiles(final Layer[] layers, final Class<?> clazz, final Rectangle srcRect, double max_scale_, final int c_alphas, final int type, String target_dir, final boolean from_original_images, final Saver saver, final int tileSide) {
if (null == layers || 0 == layers.length)
return null;
switch(type) {
case ImagePlus.GRAY8:
case ImagePlus.COLOR_RGB:
break;
default:
throw new IllegalArgumentException("Can only export for web with 8-bit or RGB");
}
// choose target directory
if (null == target_dir) {
final DirectoryChooser dc = new DirectoryChooser("Choose target directory");
target_dir = dc.getDirectory();
if (null == target_dir)
return null;
}
if (IJ.isWindows())
target_dir = target_dir.replace('\\', '/');
if (!target_dir.endsWith("/"))
target_dir += "/";
if (max_scale_ > 1) {
Utils.log("Prescaled Tiles: using max scale of 1.0");
// no point
max_scale_ = 1;
}
final String dir = target_dir;
final double max_scale = max_scale_;
final Worker worker = new Worker("Creating prescaled tiles") {
private void cleanUp() {
finishedWorking();
}
@Override
public void run() {
startedWorking();
try {
// project name
// String pname = layer[0].getProject().getTitle();
// create 'z' directories if they don't exist: check and ask!
// start with the highest scale level
final int[] best = determineClosestPowerOfTwo(srcRect.width > srcRect.height ? srcRect.width : srcRect.height);
final int edge_length = best[0];
final int n_edge_tiles = edge_length / tileSide;
Utils.log2("srcRect: " + srcRect);
Utils.log2("edge_length, n_edge_tiles, best[1] " + best[0] + ", " + n_edge_tiles + ", " + best[1]);
// thumbnail dimensions
// LayerSet ls = layer[0].getParent();
final double ratio = srcRect.width / (double) srcRect.height;
double thumb_scale = 1.0;
if (ratio >= 1) {
// width is larger or equal than height
thumb_scale = 192.0 / srcRect.width;
} else {
thumb_scale = 192.0 / srcRect.height;
}
// Figure out layer indices, given that layers are not necessarily evenly spaced
final TreeMap<Integer, Layer> indices = new TreeMap<Integer, Layer>();
final ArrayList<Integer> missingIndices = new ArrayList<Integer>();
final double resolution_z_px;
final int smallestIndex, largestIndex;
if (1 == layers.length) {
indices.put(0, layers[0]);
resolution_z_px = layers[0].getZ();
smallestIndex = 0;
largestIndex = 0;
} else {
// Ensure layers are sorted by Z index and are unique pointers and unique in Z coordinate:
final TreeMap<Double, Layer> t = new TreeMap<Double, Layer>();
for (final Layer l1 : new HashSet<Layer>(Arrays.asList(layers))) {
final Layer l2 = t.get(l1.getZ());
if (null == l2) {
t.put(l1.getZ(), l1);
} else {
// Ignore the layer with less objects
if (l1.getDisplayables().size() > l2.getDisplayables().size()) {
t.put(l1.getZ(), l1);
Utils.log("Ignoring duplicate layer: " + l2);
}
}
}
// What is the mode thickness, measured by Z(i-1) - Z(i)?
// (Distance between the Z of two consecutive layers)
final HashMap<Double, Integer> counts = new HashMap<Double, Integer>();
final Layer prev = t.get(t.firstKey());
double modeThickness = 0;
int modeThicknessCount = 0;
for (final Layer la : t.tailMap(prev.getZ(), false).values()) {
// Thickness with 3-decimal precision only
final double d = ((int) ((la.getZ() - prev.getZ()) * 1000 + 0.5)) / 1000.0;
Integer c = counts.get(d);
//
if (null == c)
c = 0;
++c;
counts.put(d, c);
//
if (c > modeThicknessCount) {
modeThicknessCount = c;
modeThickness = d;
}
}
// Not pixelDepth
resolution_z_px = modeThickness * prev.getParent().getCalibration().pixelWidth;
// Assign an index to each layer, approximating each layer at modeThickness intervals
for (final Layer la : t.values()) {
indices.put((int) (la.getZ() / modeThickness + 0.5), la);
}
// First and last
smallestIndex = indices.firstKey();
largestIndex = indices.lastKey();
Utils.logAll("indices: " + smallestIndex + ", " + largestIndex);
// Which indices are missing?
for (int i = smallestIndex + 1; i < largestIndex; ++i) {
if (!indices.containsKey(i)) {
missingIndices.add(i);
}
}
}
// JSON metadata for CATMAID
{
final StringBuilder sb = new StringBuilder("{");
final LayerSet ls = layers[0].getParent();
final Calibration cal = ls.getCalibration();
sb.append("\"volume_width_px\": ").append(srcRect.width).append(',').append('\n').append("\"volume_height_px\": ").append(srcRect.height).append(',').append('\n').append("\"volume_sections\": ").append(largestIndex - smallestIndex + 1).append(',').append('\n').append("\"extension\": \"").append(saver.getExtension()).append('\"').append(',').append('\n').append("\"resolution_x\": ").append(cal.pixelWidth).append(',').append('\n').append("\"resolution_y\": ").append(cal.pixelHeight).append(',').append('\n').append("\"resolution_z\": ").append(resolution_z_px).append(',').append('\n').append("\"units\": \"").append(cal.getUnit()).append('"').append(',').append('\n').append("\"offset_x_px\": 0,\n").append("\"offset_y_px\": 0,\n").append("\"offset_z_px\": ").append(indices.get(indices.firstKey()).getZ() * cal.pixelWidth / cal.pixelDepth).append(',').append('\n').append("\"missing_layers\": [");
for (final Integer i : missingIndices) sb.append(i - smallestIndex).append(',');
// remove last comma
sb.setLength(sb.length() - 1);
sb.append("]}");
if (!Utils.saveToFile(new File(dir + "metadata.json"), sb.toString())) {
Utils.logAll("WARNING: could not save " + dir + "metadata.json\nThe contents was:\n" + sb.toString());
}
}
for (final Map.Entry<Integer, Layer> entry : indices.entrySet()) {
if (this.quit) {
cleanUp();
return;
}
final int index = entry.getKey() - smallestIndex;
final Layer layer = entry.getValue();
// 1 - create a directory 'z' named as the layer's index
String tile_dir = dir + index;
File fdir = new File(tile_dir);
final int tag = 1;
// Ensure there is a usable directory:
while (fdir.exists() && !fdir.isDirectory()) {
fdir = new File(tile_dir + "_" + tag);
}
if (!fdir.exists()) {
fdir.mkdir();
Utils.log("Created directory " + fdir);
}
// if the directory exists already just reuse it, overwritting its files if so.
final String tmp = fdir.getAbsolutePath().replace('\\', '/');
if (!tile_dir.equals(tmp))
Utils.log("\tWARNING: directory will not be in the standard location.");
// debug:
Utils.log2("tile_dir: " + tile_dir + "\ntmp: " + tmp);
tile_dir = tmp;
if (!tile_dir.endsWith("/"))
tile_dir += "/";
// 2 - create layer thumbnail, max 192x192
ImagePlus thumb = getFlatImage(layer, srcRect, thumb_scale, c_alphas, type, clazz, true);
saver.save(thumb, tile_dir + "small");
// ImageSaver.saveAsJpeg(thumb.getProcessor(), tile_dir + "small.jpg", jpeg_quality, ImagePlus.COLOR_RGB != type);
flush(thumb);
thumb = null;
// 3 - fill directory with tiles
if (edge_length < tileSide) {
// edge_length is the largest length of the tileSide x tileSide tile map that covers an area equal or larger than the desired srcRect (because all tiles have to be tileSide x tileSide in size)
// create single tile per layer
makeTile(layer, srcRect, max_scale, c_alphas, type, clazz, tile_dir + "0_0_0", saver);
} else {
// create pyramid of tiles
if (from_original_images) {
Utils.log("Exporting from web using original images");
// Create a giant 8-bit image of the whole layer from original images
double scale = 1;
Utils.log("Export srcRect: " + srcRect);
// WARNING: the snapshot will most likely be smaller than the virtual square image being chopped into tiles
ImageProcessor snapshot = null;
if (ImagePlus.COLOR_RGB == type) {
Utils.log("WARNING: ignoring alpha masks for 'use original images' and 'RGB color' options");
snapshot = Patch.makeFlatImage(type, layer, srcRect, scale, (ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), Color.black, true);
} else if (ImagePlus.GRAY8 == type) {
// Respect alpha masks and display range:
Utils.log("WARNING: ignoring scale for 'use original images' and '8-bit' options");
snapshot = ExportUnsignedShort.makeFlatImage((ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), srcRect, 0).convertToByte(true);
} else {
Utils.log("ERROR: don't know how to generate mipmaps for type '" + type + "'");
cleanUp();
return;
}
int scale_pow = 0;
int n_et = n_edge_tiles;
final ExecutorService exe = Utils.newFixedThreadPool("export-for-web");
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
try {
while (n_et >= best[1]) {
final int snapWidth = snapshot.getWidth();
final int snapHeight = snapshot.getHeight();
final ImageProcessor source = snapshot;
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final String path = new StringBuilder(tile_dir).append(row).append('_').append(col).append('_').append(scale_pow).toString();
final int tileXStart = col * tileSide;
final int tileYStart = row * tileSide;
final int pixelOffset = tileYStart * snapWidth + tileXStart;
fus.add(exe.submit(new Callable<Boolean>() {
@Override
public Boolean call() {
if (ImagePlus.GRAY8 == type) {
final byte[] pixels = (byte[]) source.getPixels();
final byte[] p = new byte[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ByteProcessor(tileSide, tileSide, p, GRAY_LUT)), path);
} else {
final int[] pixels = (int[]) source.getPixels();
final int[] p = new int[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ColorProcessor(tileSide, tileSide, p)), path);
}
}
}));
}
}
//
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
//
Utils.wait(fus);
fus.clear();
// Scale snapshot in half with area averaging
final ImageProcessor nextSnapshot;
if (ImagePlus.GRAY8 == type) {
nextSnapshot = new ByteProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final byte[] p1 = (byte[]) snapshot.getPixels();
final byte[] p2 = (byte[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
p2[i++] = (byte) (((p1[offset1a + x1] & 0xff) + (p1[offset1a + x1 + 1] & 0xff) + (p1[offset1b + x1] & 0xff) + (p1[offset1b + x1 + 1] & 0xff)) / 4);
}
}
} else {
nextSnapshot = new ColorProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final int[] p1 = (int[]) snapshot.getPixels();
final int[] p2 = (int[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
final int ka = p1[offset1a + x1], kb = p1[offset1a + x1 + 1], kc = p1[offset1b + x1], kd = p1[offset1b + x1 + 1];
// Average each channel independently
p2[i++] = (((// red
((ka >> 16) & 0xff) + ((kb >> 16) & 0xff) + ((kc >> 16) & 0xff) + ((kd >> 16) & 0xff)) / 4) << 16) + (((// green
((ka >> 8) & 0xff) + ((kb >> 8) & 0xff) + ((kc >> 8) & 0xff) + ((kd >> 8) & 0xff)) / 4) << 8) + (// blue
(ka & 0xff) + (kb & 0xff) + (kc & 0xff) + (kd & 0xff)) / 4;
}
}
}
// Assign for next iteration
snapshot = nextSnapshot;
// Scale snapshot with a TransformMesh
/*
AffineModel2D aff = new AffineModel2D();
aff.set(0.5f, 0, 0, 0.5f, 0, 0);
ImageProcessor scaledSnapshot = new ByteProcessor((int)(snapshot.getWidth() * scale), (int)(snapshot.getHeight() * scale));
final CoordinateTransformMesh mesh = new CoordinateTransformMesh( aff, 32, snapshot.getWidth(), snapshot.getHeight() );
final mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh> mapping = new mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh>( mesh );
mapping.mapInterpolated(snapshot, scaledSnapshot, Runtime.getRuntime().availableProcessors());
// Assign for next iteration
snapshot = scaledSnapshot;
snapshotPixels = (byte[]) scaledSnapshot.getPixels();
*/
}
} catch (final Throwable t) {
IJError.print(t);
} finally {
exe.shutdown();
}
} else {
// max_scale; // WARNING if scale is different than 1, it will FAIL to set the next scale properly.
double scale = 1;
int scale_pow = 0;
// cached for local modifications in the loop, works as loop controler
int n_et = n_edge_tiles;
while (n_et >= best[1]) {
// best[1] is the minimal root found, i.e. 1,2,3,4,5 from which then powers of two were taken to make up for the edge_length
// 0 < scale <= 1, so no precision lost
final int tile_side = (int) (256 / scale);
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final int i_tile = row * n_et + col;
Utils.showProgress(i_tile / (double) (n_et * n_et));
if (0 == i_tile % 100) {
// RGB int[] images
releaseToFit(tile_side * tile_side * 4 * 2);
}
if (this.quit) {
cleanUp();
return;
}
final Rectangle tile_src = new // TODO row and col are inverted
Rectangle(// TODO row and col are inverted
srcRect.x + tile_side * row, srcRect.y + tile_side * col, tile_side, // in absolute coords, magnification later.
tile_side);
// crop bounds
if (tile_src.x + tile_src.width > srcRect.x + srcRect.width)
tile_src.width = srcRect.x + srcRect.width - tile_src.x;
if (tile_src.y + tile_src.height > srcRect.y + srcRect.height)
tile_src.height = srcRect.y + srcRect.height - tile_src.y;
// negative tile sizes will be made into black tiles
// (negative dimensions occur for tiles beyond the edges of srcRect, since the grid of tiles has to be of equal number of rows and cols)
// should be row_col_scale, but results in transposed tiles in googlebrains, so I reversed the order.
makeTile(layer, tile_src, scale, c_alphas, type, clazz, new StringBuilder(tile_dir).append(col).append('_').append(row).append('_').append(scale_pow).toString(), saver);
}
}
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
}
}
}
}
} catch (final Exception e) {
IJError.print(e);
} finally {
Utils.showProgress(1);
}
cleanUp();
finishedWorking();
}
};
// watcher thread
return Bureaucrat.createAndStart(worker, layers[0].getProject());
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class Loader method importNextImage.
public Patch importNextImage(final Project project, final double x, final double y) {
if (null == last_opened_path) {
return importImage(project, x, y);
}
final int i_slash = last_opened_path.lastIndexOf("/");
final String dir_name = last_opened_path.substring(0, i_slash + 1);
final File dir = new File(dir_name);
final String last_file = last_opened_path.substring(i_slash + 1);
final String[] file_names = dir.list();
String next_file = null;
final String exts = "tiftiffjpgjpegpnggifzipdicombmppgm";
for (int i = 0; i < file_names.length; i++) {
if (last_file.equals(file_names[i]) && i < file_names.length - 1) {
// loop until finding a suitable next
for (int j = i + 1; j < file_names.length; j++) {
final String ext = file_names[j].substring(file_names[j].lastIndexOf('.') + 1).toLowerCase();
if (-1 != exts.indexOf(ext)) {
next_file = file_names[j];
break;
}
}
break;
}
}
if (null == next_file) {
Utils.showMessage("No more files after " + last_file);
return null;
}
releaseToFit(new File(dir_name + next_file).length() * 3);
IJ.redirectErrorMessages();
final ImagePlus imp = openImagePlus(dir_name + next_file);
if (null == imp)
return null;
if (0 == imp.getWidth() || 0 == imp.getHeight()) {
Utils.showMessage("Can't import image of zero width or height.");
flush(imp);
return null;
}
final String path = dir + "/" + next_file;
final Patch p = new Patch(project, imp.getTitle(), x, y, imp);
addedPatchFrom(path, p);
// WARNING may be altered concurrently
last_opened_path = path;
if (isMipMapsRegenerationEnabled())
regenerateMipMaps(p);
return p;
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class Loader method importLabelsAsAreaLists.
/**
* If base_x or base_y are Double.MAX_VALUE, then those values are asked for in a GenericDialog.
*/
public Bureaucrat importLabelsAsAreaLists(final Layer first_layer, final String path_, final double base_x_, final double base_y_, final float alpha_, final boolean add_background_) {
final Worker worker = new Worker("Import labels as arealists") {
@Override
public void run() {
startedWorking();
try {
String path = path_;
if (null == path) {
final OpenDialog od = new OpenDialog("Select stack", "");
final String name = od.getFileName();
if (null == name || 0 == name.length()) {
return;
}
String dir = od.getDirectory().replace('\\', '/');
if (!dir.endsWith("/"))
dir += "/";
path = dir + name;
}
if (path.toLowerCase().endsWith(".xml")) {
Utils.log("Avoided opening a TrakEM2 project.");
return;
}
double base_x = base_x_;
double base_y = base_y_;
float alpha = alpha_;
boolean add_background = add_background_;
Layer layer = first_layer;
if (Double.MAX_VALUE == base_x || Double.MAX_VALUE == base_y || alpha < 0 || alpha > 1) {
final GenericDialog gd = new GenericDialog("Base x, y");
Utils.addLayerChoice("First layer:", first_layer, gd);
gd.addNumericField("Base_X:", 0, 0);
gd.addNumericField("Base_Y:", 0, 0);
gd.addSlider("Alpha:", 0, 100, 40);
gd.addCheckbox("Add background (zero)", false);
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
layer = first_layer.getParent().getLayer(gd.getNextChoiceIndex());
base_x = gd.getNextNumber();
base_y = gd.getNextNumber();
if (Double.isNaN(base_x) || Double.isNaN(base_y)) {
Utils.log("Base x or y is NaN!");
return;
}
alpha = (float) (gd.getNextNumber() / 100);
add_background = gd.getNextBoolean();
}
releaseToFit(new File(path).length() * 3);
final ImagePlus imp;
if (path.toLowerCase().endsWith(".am")) {
final AmiraMeshDecoder decoder = new AmiraMeshDecoder();
if (decoder.open(path))
imp = new ImagePlus(path, decoder.getStack());
else
imp = null;
} else {
imp = openImagePlus(path);
}
if (null == imp) {
Utils.log("Could not open image at " + path);
return;
}
final Map<Float, AreaList> alis = AmiraImporter.extractAreaLists(imp, layer, base_x, base_y, alpha, add_background);
if (!hasQuitted() && alis.size() > 0) {
layer.getProject().getProjectTree().insertSegmentations(alis.values());
}
} catch (final Exception e) {
IJError.print(e);
} finally {
finishedWorking();
}
}
};
return Bureaucrat.createAndStart(worker, first_layer.getProject());
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class TMLHandler method getProjectData.
/**
* returns 4 objects packed in an array:
* <pre>
* [0] = root TemplateThing
* [1] = root ProjectThing (contains Project instance)
* [2] = root LayerThing (contains the top-level LayerSet)
* [3] = expanded states of all ProjectThing objects
* </pre>
* <p>
* Also, triggers the reconstruction of links and assignment of Displayable objects to their layer.
* </p>
*/
public Object[] getProjectData(final boolean open_displays) {
if (null == project)
return null;
this.open_displays = open_displays;
// Links exist between Displayable objects.
for (final Displayable d : ht_displayables.values()) {
String olinks = ht_links.get(d);
// not linked
if (null == olinks)
continue;
String[] links = olinks.split(",");
Long lid = null;
for (int i = 0; i < links.length; i++) {
try {
lid = new Long(links[i]);
} catch (NumberFormatException nfe) {
Utils.log2("Ignoring incorrectly formated link '" + links[i] + "' for ob " + d);
continue;
}
Displayable partner = ht_displayables.get(lid);
if (null != partner)
d.link(partner, false);
else
Utils.log("TMLHandler: can't find partner with id=" + links[i] + " for Displayable with id=" + d.getId());
}
}
// 1.2 - Reconstruct linked properties
for (final Map.Entry<Displayable, Map<Long, Map<String, String>>> lpe : all_linked_props.entrySet()) {
final Displayable origin = lpe.getKey();
for (final Map.Entry<Long, Map<String, String>> e : lpe.getValue().entrySet()) {
final Displayable target = ht_displayables.get(e.getKey());
if (null == target) {
Utils.log("Setting linked properties for origin " + origin.getId() + ":\n\t* Could not find target displayable #" + e.getKey());
continue;
}
origin.setLinkedProperties(target, e.getValue());
}
}
// 2 - Add Displayable objects to ProjectThing that can contain them
for (final Map.Entry<Long, ProjectThing> entry : ht_oid_pt.entrySet()) {
ProjectThing pt = entry.getValue();
Object od = ht_displayables.remove(entry.getKey());
// Utils.log("==== processing: Displayable [" + od + "] vs. ProjectThing [" + pt + "]");
if (null != od) {
pt.setObject(od);
} else {
Utils.log("#### Failed to find a Displayable for ProjectThing " + pt + " #####");
}
}
// 3 - Assign a layer pointer to ZDisplayable objects
for (final ZDisplayable zd : ht_zdispl.values()) {
// zd.setLayer((Layer)zd.getLayerSet().getLayers().get(0));
zd.setLayer(zd.getLayerSet().getLayer(0));
}
// 4 - Assign layers to Treeline nodes
for (final Layer la : al_layers) {
final List<Node<?>> list = node_layer_table.remove(la.getId());
if (null == list)
continue;
for (final Node<?> nd : list) nd.setLayer(la);
}
if (!node_layer_table.isEmpty()) {
Utils.log("ERROR: node_layer_table is not empty!");
}
// 5 - Assign root nodes to Treelines, now that all nodes have a layer
for (final Map.Entry<Tree<?>, Node<?>> e : tree_root_nodes.entrySet()) {
if (null == e.getValue()) {
// Utils.log2("Ignoring, applies to new Treeline format only.");
continue;
}
// Can't compile with <?>
// will generate node caches of each Treeline
e.getKey().setRoot((Node) e.getValue());
}
tree_root_nodes.clear();
// Assign colors to nodes
for (final Map.Entry<Color, Collection<Node<?>>> e : node_colors.entrySet()) {
for (final Node<?> nd : e.getValue()) {
nd.setColor(e.getKey());
}
}
node_colors.clear();
// 6 - Run legacy operations
for (final Runnable r : legacy) {
r.run();
}
try {
// Create a table with all layer ids vs layer instances:
final HashMap<Long, Layer> ht_lids = new HashMap<Long, Layer>();
for (final Layer layer : al_layers) {
ht_lids.put(new Long(layer.getId()), layer);
}
// Spawn threads to recreate buckets, starting from the subset of displays to open
int n = Runtime.getRuntime().availableProcessors();
switch(n) {
case 1:
break;
case 2:
case 3:
case 4:
n--;
break;
default:
n -= 2;
break;
}
final ExecutorService exec = Utils.newFixedThreadPool(n, "TMLHandler-recreateBuckets");
final Set<Long> dlids = new HashSet<Long>();
final LayerSet layer_set = (LayerSet) root_lt.getObject();
final List<Future<?>> fus = new ArrayList<Future<?>>();
final List<Future<?>> fus2 = new ArrayList<Future<?>>();
for (final HashMap<String, String> ht_attributes : al_displays) {
String ob = ht_attributes.get("layer_id");
if (null == ob)
continue;
final Long lid = new Long(ob);
dlids.add(lid);
final Layer la = ht_lids.get(lid);
if (null == la) {
ht_lids.remove(lid);
continue;
}
// to open later:
new Display(project, Long.parseLong(ht_attributes.get("id")), la, ht_attributes);
fus.add(exec.submit(new Runnable() {
public void run() {
la.recreateBuckets();
}
}));
}
fus.add(exec.submit(new Runnable() {
public void run() {
// only for ZDisplayable
layer_set.recreateBuckets(false);
}
}));
// Ensure launching:
if (dlids.isEmpty() && layer_set.size() > 0) {
dlids.add(layer_set.getLayer(0).getId());
}
final List<Layer> layers = layer_set.getLayers();
for (final Long lid : new HashSet<Long>(dlids)) {
fus.add(exec.submit(new Runnable() {
public void run() {
int start = layer_set.indexOf(layer_set.getLayer(lid.longValue()));
int next = start + 1;
int prev = start - 1;
while (next < layer_set.size() || prev > -1) {
if (prev > -1) {
final Layer lprev = layers.get(prev);
synchronized (dlids) {
if (dlids.add(lprev.getId())) {
// returns true if not there already
fus2.add(exec.submit(new Runnable() {
public void run() {
lprev.recreateBuckets();
}
}));
}
}
prev--;
}
if (next < layers.size()) {
final Layer lnext = layers.get(next);
synchronized (dlids) {
if (dlids.add(lnext.getId())) {
// returns true if not there already
fus2.add(exec.submit(new Runnable() {
public void run() {
lnext.recreateBuckets();
}
}));
}
}
next++;
}
}
Utils.log2("done recreateBuckets chunk");
}
}));
}
Utils.wait(fus);
exec.submit(new Runnable() {
public void run() {
Utils.log2("waiting for TMLHandler fus...");
Utils.wait(fus2);
Utils.log2("done waiting TMLHandler fus.");
exec.shutdown();
}
});
} catch (Throwable t) {
IJError.print(t);
}
return new Object[] { root_tt, root_pt, root_lt, ht_pt_expanded };
}
use of ini.trakem2.Project in project TrakEM2 by trakem2.
the class DefaultTreeTransferHandler method canPerformAction.
public boolean canPerformAction(DNDTree target, DefaultMutableTreeNode dragged_node, int action, Point location) {
// prevent drags from non-tree components
if (null == dragged_node)
return false;
// Can't drop onto a TemplateTree
if (target instanceof TemplateTree) {
return false;
}
// Can't drag a node that contains a Project!
if (dragged_node.getUserObject() instanceof ProjectThing && ((ProjectThing) dragged_node.getUserObject()).getObject() instanceof Project) {
return false;
}
// Can't drag basic object nodes from a template tree RECONSIDERED, I like it even if it looks inconsistent (but types are types!)
/*
if (dragged_node.getUserObject() instanceof TemplateThing && project.isBasicType(((Thing)dragged_node.getUserObject()).getType())) {
return false;
}
*/
// else, the target has to be not null
TreePath pathTarget = target.getPathForLocation(location.x, location.y);
if (pathTarget == null) {
target.setSelectionPath(null);
return false;
}
/* // debug
if (action == DnDConstants.ACTION_COPY) {
Utils.log("can drop: Action copy");
} else if (action == DnDConstants.ACTION_MOVE) {
Utils.log("can drop: Action move");
} else {
Utils.log("can drop: Unexpected action: " + action);
}
*/
target.setSelectionPath(pathTarget);
DefaultMutableTreeNode parent_node = (DefaultMutableTreeNode) pathTarget.getLastPathComponent();
// can be a Thing or an Attribute
Object parent_ob = parent_node.getUserObject();
Thing child_thing = (Thing) dragged_node.getUserObject();
if (DnDConstants.ACTION_MOVE == action || DnDConstants.ACTION_COPY == action) {
if (parent_ob instanceof ProjectThing) {
ProjectThing parent_thing = (ProjectThing) parent_ob;
// check if it's allowed to give to this parent such a child:
if (!parent_thing.uniquePathExists(child_thing.getType()) && !parent_thing.canHaveAsChild(child_thing)) {
// Utils.log("Not possible.");
return false;
}
// - the leaf that is going to be dropped into itself or any of its descendants.
if (parent_node == dragged_node.getParent() || dragged_node.isNodeDescendant(parent_node)) {
// Utils.log("preventing dragging onto itself or any of the self children.");
return false;
} else {
return true;
}
}
}
// default:
return false;
}
Aggregations