use of ini.trakem2.display.LayerSet in project TrakEM2 by trakem2.
the class Loader method makePrescaledTiles.
/**
* Generate 256x256 tiles, as many as necessary, to cover the given srcRect, starting at max_scale. Designed to be slow but memory-capable.
*
* filename = z + "/" + row + "_" + column + "_" + s + ".jpg";
*
* row and column run from 0 to n stepsize 1
* that is, row = y / ( 256 * 2^s ) and column = x / ( 256 * 2^s )
*
* z : z-level (slice)
* x,y: the row and column
* s: scale, which is 1 / (2^s), in integers: 0, 1, 2 ...
*
* var MAX_S = Math.floor( Math.log( MAX_Y + 1 ) / Math.LN2 ) - Math.floor( Math.log( Y_TILE_SIZE ) / Math.LN2 ) - 1;
*
* The module should not be more than 5
* At al levels, there should be an even number of rows and columns, except for the coarsest level.
* The coarsest level should be at least 5x5 tiles.
*
* Best results obtained when the srcRect approaches or is a square. Black space will pad the right and bottom edges when the srcRect is not exactly a square.
* Only the area within the srcRect is ever included, even if actual data exists beyond.
*
* @return The watcher thread, for joining purposes, or null if the dialog is canceled or preconditions are not passed.
* @throws IllegalArgumentException if the type is not ImagePlus.GRAY8 or Imageplus.COLOR_RGB.
*/
public Bureaucrat makePrescaledTiles(final Layer[] layers, final Class<?> clazz, final Rectangle srcRect, double max_scale_, final int c_alphas, final int type, String target_dir, final boolean from_original_images, final Saver saver, final int tileSide) {
if (null == layers || 0 == layers.length)
return null;
switch(type) {
case ImagePlus.GRAY8:
case ImagePlus.COLOR_RGB:
break;
default:
throw new IllegalArgumentException("Can only export for web with 8-bit or RGB");
}
// choose target directory
if (null == target_dir) {
final DirectoryChooser dc = new DirectoryChooser("Choose target directory");
target_dir = dc.getDirectory();
if (null == target_dir)
return null;
}
if (IJ.isWindows())
target_dir = target_dir.replace('\\', '/');
if (!target_dir.endsWith("/"))
target_dir += "/";
if (max_scale_ > 1) {
Utils.log("Prescaled Tiles: using max scale of 1.0");
// no point
max_scale_ = 1;
}
final String dir = target_dir;
final double max_scale = max_scale_;
final Worker worker = new Worker("Creating prescaled tiles") {
private void cleanUp() {
finishedWorking();
}
@Override
public void run() {
startedWorking();
try {
// project name
// String pname = layer[0].getProject().getTitle();
// create 'z' directories if they don't exist: check and ask!
// start with the highest scale level
final int[] best = determineClosestPowerOfTwo(srcRect.width > srcRect.height ? srcRect.width : srcRect.height);
final int edge_length = best[0];
final int n_edge_tiles = edge_length / tileSide;
Utils.log2("srcRect: " + srcRect);
Utils.log2("edge_length, n_edge_tiles, best[1] " + best[0] + ", " + n_edge_tiles + ", " + best[1]);
// thumbnail dimensions
// LayerSet ls = layer[0].getParent();
final double ratio = srcRect.width / (double) srcRect.height;
double thumb_scale = 1.0;
if (ratio >= 1) {
// width is larger or equal than height
thumb_scale = 192.0 / srcRect.width;
} else {
thumb_scale = 192.0 / srcRect.height;
}
// Figure out layer indices, given that layers are not necessarily evenly spaced
final TreeMap<Integer, Layer> indices = new TreeMap<Integer, Layer>();
final ArrayList<Integer> missingIndices = new ArrayList<Integer>();
final double resolution_z_px;
final int smallestIndex, largestIndex;
if (1 == layers.length) {
indices.put(0, layers[0]);
resolution_z_px = layers[0].getZ();
smallestIndex = 0;
largestIndex = 0;
} else {
// Ensure layers are sorted by Z index and are unique pointers and unique in Z coordinate:
final TreeMap<Double, Layer> t = new TreeMap<Double, Layer>();
for (final Layer l1 : new HashSet<Layer>(Arrays.asList(layers))) {
final Layer l2 = t.get(l1.getZ());
if (null == l2) {
t.put(l1.getZ(), l1);
} else {
// Ignore the layer with less objects
if (l1.getDisplayables().size() > l2.getDisplayables().size()) {
t.put(l1.getZ(), l1);
Utils.log("Ignoring duplicate layer: " + l2);
}
}
}
// What is the mode thickness, measured by Z(i-1) - Z(i)?
// (Distance between the Z of two consecutive layers)
final HashMap<Double, Integer> counts = new HashMap<Double, Integer>();
final Layer prev = t.get(t.firstKey());
double modeThickness = 0;
int modeThicknessCount = 0;
for (final Layer la : t.tailMap(prev.getZ(), false).values()) {
// Thickness with 3-decimal precision only
final double d = ((int) ((la.getZ() - prev.getZ()) * 1000 + 0.5)) / 1000.0;
Integer c = counts.get(d);
//
if (null == c)
c = 0;
++c;
counts.put(d, c);
//
if (c > modeThicknessCount) {
modeThicknessCount = c;
modeThickness = d;
}
}
// Not pixelDepth
resolution_z_px = modeThickness * prev.getParent().getCalibration().pixelWidth;
// Assign an index to each layer, approximating each layer at modeThickness intervals
for (final Layer la : t.values()) {
indices.put((int) (la.getZ() / modeThickness + 0.5), la);
}
// First and last
smallestIndex = indices.firstKey();
largestIndex = indices.lastKey();
Utils.logAll("indices: " + smallestIndex + ", " + largestIndex);
// Which indices are missing?
for (int i = smallestIndex + 1; i < largestIndex; ++i) {
if (!indices.containsKey(i)) {
missingIndices.add(i);
}
}
}
// JSON metadata for CATMAID
{
final StringBuilder sb = new StringBuilder("{");
final LayerSet ls = layers[0].getParent();
final Calibration cal = ls.getCalibration();
sb.append("\"volume_width_px\": ").append(srcRect.width).append(',').append('\n').append("\"volume_height_px\": ").append(srcRect.height).append(',').append('\n').append("\"volume_sections\": ").append(largestIndex - smallestIndex + 1).append(',').append('\n').append("\"extension\": \"").append(saver.getExtension()).append('\"').append(',').append('\n').append("\"resolution_x\": ").append(cal.pixelWidth).append(',').append('\n').append("\"resolution_y\": ").append(cal.pixelHeight).append(',').append('\n').append("\"resolution_z\": ").append(resolution_z_px).append(',').append('\n').append("\"units\": \"").append(cal.getUnit()).append('"').append(',').append('\n').append("\"offset_x_px\": 0,\n").append("\"offset_y_px\": 0,\n").append("\"offset_z_px\": ").append(indices.get(indices.firstKey()).getZ() * cal.pixelWidth / cal.pixelDepth).append(',').append('\n').append("\"missing_layers\": [");
for (final Integer i : missingIndices) sb.append(i - smallestIndex).append(',');
// remove last comma
sb.setLength(sb.length() - 1);
sb.append("]}");
if (!Utils.saveToFile(new File(dir + "metadata.json"), sb.toString())) {
Utils.logAll("WARNING: could not save " + dir + "metadata.json\nThe contents was:\n" + sb.toString());
}
}
for (final Map.Entry<Integer, Layer> entry : indices.entrySet()) {
if (this.quit) {
cleanUp();
return;
}
final int index = entry.getKey() - smallestIndex;
final Layer layer = entry.getValue();
// 1 - create a directory 'z' named as the layer's index
String tile_dir = dir + index;
File fdir = new File(tile_dir);
final int tag = 1;
// Ensure there is a usable directory:
while (fdir.exists() && !fdir.isDirectory()) {
fdir = new File(tile_dir + "_" + tag);
}
if (!fdir.exists()) {
fdir.mkdir();
Utils.log("Created directory " + fdir);
}
// if the directory exists already just reuse it, overwritting its files if so.
final String tmp = fdir.getAbsolutePath().replace('\\', '/');
if (!tile_dir.equals(tmp))
Utils.log("\tWARNING: directory will not be in the standard location.");
// debug:
Utils.log2("tile_dir: " + tile_dir + "\ntmp: " + tmp);
tile_dir = tmp;
if (!tile_dir.endsWith("/"))
tile_dir += "/";
// 2 - create layer thumbnail, max 192x192
ImagePlus thumb = getFlatImage(layer, srcRect, thumb_scale, c_alphas, type, clazz, true);
saver.save(thumb, tile_dir + "small");
// ImageSaver.saveAsJpeg(thumb.getProcessor(), tile_dir + "small.jpg", jpeg_quality, ImagePlus.COLOR_RGB != type);
flush(thumb);
thumb = null;
// 3 - fill directory with tiles
if (edge_length < tileSide) {
// edge_length is the largest length of the tileSide x tileSide tile map that covers an area equal or larger than the desired srcRect (because all tiles have to be tileSide x tileSide in size)
// create single tile per layer
makeTile(layer, srcRect, max_scale, c_alphas, type, clazz, tile_dir + "0_0_0", saver);
} else {
// create pyramid of tiles
if (from_original_images) {
Utils.log("Exporting from web using original images");
// Create a giant 8-bit image of the whole layer from original images
double scale = 1;
Utils.log("Export srcRect: " + srcRect);
// WARNING: the snapshot will most likely be smaller than the virtual square image being chopped into tiles
ImageProcessor snapshot = null;
if (ImagePlus.COLOR_RGB == type) {
Utils.log("WARNING: ignoring alpha masks for 'use original images' and 'RGB color' options");
snapshot = Patch.makeFlatImage(type, layer, srcRect, scale, (ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), Color.black, true);
} else if (ImagePlus.GRAY8 == type) {
// Respect alpha masks and display range:
Utils.log("WARNING: ignoring scale for 'use original images' and '8-bit' options");
snapshot = ExportUnsignedShort.makeFlatImage((ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), srcRect, 0).convertToByte(true);
} else {
Utils.log("ERROR: don't know how to generate mipmaps for type '" + type + "'");
cleanUp();
return;
}
int scale_pow = 0;
int n_et = n_edge_tiles;
final ExecutorService exe = Utils.newFixedThreadPool("export-for-web");
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
try {
while (n_et >= best[1]) {
final int snapWidth = snapshot.getWidth();
final int snapHeight = snapshot.getHeight();
final ImageProcessor source = snapshot;
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final String path = new StringBuilder(tile_dir).append(row).append('_').append(col).append('_').append(scale_pow).toString();
final int tileXStart = col * tileSide;
final int tileYStart = row * tileSide;
final int pixelOffset = tileYStart * snapWidth + tileXStart;
fus.add(exe.submit(new Callable<Boolean>() {
@Override
public Boolean call() {
if (ImagePlus.GRAY8 == type) {
final byte[] pixels = (byte[]) source.getPixels();
final byte[] p = new byte[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ByteProcessor(tileSide, tileSide, p, GRAY_LUT)), path);
} else {
final int[] pixels = (int[]) source.getPixels();
final int[] p = new int[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ColorProcessor(tileSide, tileSide, p)), path);
}
}
}));
}
}
//
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
//
Utils.wait(fus);
fus.clear();
// Scale snapshot in half with area averaging
final ImageProcessor nextSnapshot;
if (ImagePlus.GRAY8 == type) {
nextSnapshot = new ByteProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final byte[] p1 = (byte[]) snapshot.getPixels();
final byte[] p2 = (byte[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
p2[i++] = (byte) (((p1[offset1a + x1] & 0xff) + (p1[offset1a + x1 + 1] & 0xff) + (p1[offset1b + x1] & 0xff) + (p1[offset1b + x1 + 1] & 0xff)) / 4);
}
}
} else {
nextSnapshot = new ColorProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final int[] p1 = (int[]) snapshot.getPixels();
final int[] p2 = (int[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
final int ka = p1[offset1a + x1], kb = p1[offset1a + x1 + 1], kc = p1[offset1b + x1], kd = p1[offset1b + x1 + 1];
// Average each channel independently
p2[i++] = (((// red
((ka >> 16) & 0xff) + ((kb >> 16) & 0xff) + ((kc >> 16) & 0xff) + ((kd >> 16) & 0xff)) / 4) << 16) + (((// green
((ka >> 8) & 0xff) + ((kb >> 8) & 0xff) + ((kc >> 8) & 0xff) + ((kd >> 8) & 0xff)) / 4) << 8) + (// blue
(ka & 0xff) + (kb & 0xff) + (kc & 0xff) + (kd & 0xff)) / 4;
}
}
}
// Assign for next iteration
snapshot = nextSnapshot;
// Scale snapshot with a TransformMesh
/*
AffineModel2D aff = new AffineModel2D();
aff.set(0.5f, 0, 0, 0.5f, 0, 0);
ImageProcessor scaledSnapshot = new ByteProcessor((int)(snapshot.getWidth() * scale), (int)(snapshot.getHeight() * scale));
final CoordinateTransformMesh mesh = new CoordinateTransformMesh( aff, 32, snapshot.getWidth(), snapshot.getHeight() );
final mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh> mapping = new mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh>( mesh );
mapping.mapInterpolated(snapshot, scaledSnapshot, Runtime.getRuntime().availableProcessors());
// Assign for next iteration
snapshot = scaledSnapshot;
snapshotPixels = (byte[]) scaledSnapshot.getPixels();
*/
}
} catch (final Throwable t) {
IJError.print(t);
} finally {
exe.shutdown();
}
} else {
// max_scale; // WARNING if scale is different than 1, it will FAIL to set the next scale properly.
double scale = 1;
int scale_pow = 0;
// cached for local modifications in the loop, works as loop controler
int n_et = n_edge_tiles;
while (n_et >= best[1]) {
// best[1] is the minimal root found, i.e. 1,2,3,4,5 from which then powers of two were taken to make up for the edge_length
// 0 < scale <= 1, so no precision lost
final int tile_side = (int) (256 / scale);
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final int i_tile = row * n_et + col;
Utils.showProgress(i_tile / (double) (n_et * n_et));
if (0 == i_tile % 100) {
// RGB int[] images
releaseToFit(tile_side * tile_side * 4 * 2);
}
if (this.quit) {
cleanUp();
return;
}
final Rectangle tile_src = new // TODO row and col are inverted
Rectangle(// TODO row and col are inverted
srcRect.x + tile_side * row, srcRect.y + tile_side * col, tile_side, // in absolute coords, magnification later.
tile_side);
// crop bounds
if (tile_src.x + tile_src.width > srcRect.x + srcRect.width)
tile_src.width = srcRect.x + srcRect.width - tile_src.x;
if (tile_src.y + tile_src.height > srcRect.y + srcRect.height)
tile_src.height = srcRect.y + srcRect.height - tile_src.y;
// negative tile sizes will be made into black tiles
// (negative dimensions occur for tiles beyond the edges of srcRect, since the grid of tiles has to be of equal number of rows and cols)
// should be row_col_scale, but results in transposed tiles in googlebrains, so I reversed the order.
makeTile(layer, tile_src, scale, c_alphas, type, clazz, new StringBuilder(tile_dir).append(col).append('_').append(row).append('_').append(scale_pow).toString(), saver);
}
}
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
}
}
}
}
} catch (final Exception e) {
IJError.print(e);
} finally {
Utils.showProgress(1);
}
cleanUp();
finishedWorking();
}
};
// watcher thread
return Bureaucrat.createAndStart(worker, layers[0].getProject());
}
use of ini.trakem2.display.LayerSet in project TrakEM2 by trakem2.
the class Loader method importImages.
/**
* <p>Import images from the given text file, which is expected to contain 4 columns or optionally 9 columns:</p>
* <ul>
* <li>column 1: image file path (if base_dir is not null, it will be prepended)</li>
* <li>column 2: x coord [px]</li>
* <li>column 3: y coord [px]</li>
* <li>column 4: z coord [px] (layer_thickness will be multiplied to it if not zero)</li>
* </ul>
* <p>optional columns, if a property is not known, it can be set to "-" which makes TrakEM2 open the file and find out by itself</p>
* <ul>
* <li>column 5: width [px]</li>
* <li>column 6: height [px]</li>
* <li>column 7: min intensity [double] (for screen display)</li>
* <li>column 8: max intensity [double] (for screen display)</li>
* <li>column 9: type [integer] (pixel types according to ImagepPlus types: 0=8bit int gray, 1=16bit int gray, 2=32bit float gray, 3=8bit indexed color, 4=32-bit RGB color</li>
* </ul>
*
* <p>This function implements the "Import from text file" command.</p>
*
* <p>Layers will be automatically created as needed inside the LayerSet to which the given ref_layer belongs.</p>
* <p>
* The text file can contain comments that start with the # sign.
* </p>
* <p>
* Images will be imported in parallel, using as many cores as your machine has.
* </p>
* @param calibration_ transforms the read coordinates into pixel coordinates, including x,y,z, and layer thickness.
* @param scale_ Between 0 and 1. When lower than 1, a preprocessor script is created for the imported images, to scale them down.
*/
public Bureaucrat importImages(Layer ref_layer, String abs_text_file_path_, String column_separator_, double layer_thickness_, double calibration_, boolean homogenize_contrast_, float scale_, int border_width_) {
// check parameters: ask for good ones if necessary
if (null == abs_text_file_path_) {
final String[] file = Utils.selectFile("Select text file");
// user canceled dialog
if (null == file)
return null;
abs_text_file_path_ = file[0] + file[1];
}
if (null == column_separator_ || 0 == column_separator_.length() || Double.isNaN(layer_thickness_) || layer_thickness_ <= 0 || Double.isNaN(calibration_) || calibration_ <= 0) {
final Calibration cal = ref_layer.getParent().getCalibrationCopy();
final GenericDialog gdd = new GenericDialog("Options");
final String[] separators = new String[] { "tab", "space", "comma (,)" };
gdd.addMessage("Choose a layer to act as the zero for the Z coordinates:");
Utils.addLayerChoice("Base layer", ref_layer, gdd);
gdd.addChoice("Column separator: ", separators, separators[0]);
// default: 60 nm
gdd.addNumericField("Layer thickness: ", cal.pixelDepth, 2);
gdd.addNumericField("Calibration (data to pixels): ", 1, 2);
gdd.addCheckbox("Homogenize contrast layer-wise", homogenize_contrast_);
gdd.addSlider("Scale:", 0, 100, 100);
gdd.addNumericField("Hide border with alpha mask", 0, 0, 6, "pixels");
gdd.showDialog();
if (gdd.wasCanceled())
return null;
layer_thickness_ = gdd.getNextNumber();
if (layer_thickness_ < 0 || Double.isNaN(layer_thickness_)) {
Utils.log("Improper layer thickness value.");
return null;
}
calibration_ = gdd.getNextNumber();
if (0 == calibration_ || Double.isNaN(calibration_)) {
Utils.log("Improper calibration value.");
return null;
}
// not pixelDepth!
layer_thickness_ /= cal.pixelWidth;
ref_layer = ref_layer.getParent().getLayer(gdd.getNextChoiceIndex());
column_separator_ = "\t";
switch(gdd.getNextChoiceIndex()) {
case 1:
column_separator_ = " ";
break;
case 2:
column_separator_ = ",";
break;
default:
break;
}
homogenize_contrast_ = gdd.getNextBoolean();
final double sc = gdd.getNextNumber();
if (Double.isNaN(sc))
scale_ = 1.0f;
else
scale_ = ((float) sc) / 100.0f;
final int border = (int) gdd.getNextNumber();
if (border < 0) {
Utils.log("Nonsensical border value: " + border);
return null;
}
border_width_ = border;
}
if (Float.isNaN(scale_) || scale_ < 0 || scale_ > 1) {
Utils.log("Non-sensical scale: " + scale_ + "\nUsing scale of 1 instead.");
scale_ = 1;
}
// make vars accessible from inner threads:
final Layer base_layer = ref_layer;
final String abs_text_file_path = abs_text_file_path_;
final String column_separator = column_separator_;
final double layer_thickness = layer_thickness_;
final double calibration = calibration_;
final boolean homogenize_contrast = homogenize_contrast_;
final float scale = (float) scale_;
final int border_width = border_width_;
return Bureaucrat.createAndStart(new Worker.Task("Importing images", true) {
@Override
public void exec() {
try {
// 1 - read text file
final String[] lines = Utils.openTextFileLines(abs_text_file_path);
if (null == lines || 0 == lines.length) {
Utils.log2("No images to import from " + abs_text_file_path);
return;
}
ContrastEnhancerWrapper cew = null;
if (homogenize_contrast) {
cew = new ContrastEnhancerWrapper();
cew.showDialog();
}
final String sep2 = column_separator + column_separator;
// 2 - set a base dir path if necessary
String base_dir = null;
// to wait on mipmap regeneration
final Vector<Future<?>> fus = new Vector<Future<?>>();
final LayerSet layer_set = base_layer.getParent();
final double z_zero = base_layer.getZ();
final AtomicInteger n_imported = new AtomicInteger(0);
final Set<Layer> touched_layers = new HashSet<Layer>();
final int NP = Runtime.getRuntime().availableProcessors();
int np = NP;
switch(np) {
case 1:
case 2:
break;
default:
np = np / 2;
break;
}
final ExecutorService ex = Utils.newFixedThreadPool(np, "import-images");
final List<Future<?>> imported = new ArrayList<Future<?>>();
final Worker wo = this;
final String script_path;
// If scale is at least 1/100 lower than 1, then:
if (Math.abs(scale - (int) scale) > 0.01) {
// Assume source and target sigma of 0.5
final double sigma = Math.sqrt(Math.pow(1 / scale, 2) - 0.25);
final String script = new StringBuilder().append("import ij.ImagePlus;\n").append("import ij.process.ImageProcessor;\n").append("import ij.plugin.filter.GaussianBlur;\n").append("GaussianBlur blur = new GaussianBlur();\n").append(// as in ij.plugin.filter.GaussianBlur
"double accuracy = (imp.getType() == ImagePlus.GRAY8 || imp.getType() == ImagePlus.COLOR_RGB) ? 0.002 : 0.0002;\n").append("imp.getProcessor().setInterpolationMethod(ImageProcessor.NONE);\n").append("blur.blurGaussian(imp.getProcessor(),").append(sigma).append(',').append(sigma).append(",accuracy);\n").append("imp.setProcessor(imp.getTitle(), imp.getProcessor().resize((int)(imp.getWidth() * ").append(scale).append("), (int)(imp.getHeight() * ").append(scale).append(")));").toString();
File f = new File(getStorageFolder() + "resize-" + scale + ".bsh");
int v = 1;
while (f.exists()) {
f = new File(getStorageFolder() + "resize-" + scale + "." + v + ".bsh");
v++;
}
script_path = Utils.saveToFile(f, script) ? f.getAbsolutePath() : null;
if (null == script_path) {
Utils.log("Could NOT save a preprocessor script for image scaling\nat path " + f.getAbsolutePath());
}
} else {
script_path = null;
}
Utils.log("Scaling script path is " + script_path);
final AtomicReference<Triple<Integer, Integer, ByteProcessor>> last_mask = new AtomicReference<Triple<Integer, Integer, ByteProcessor>>();
// 3 - parse each line
for (int i = 0; i < lines.length; i++) {
if (Thread.currentThread().isInterrupted() || hasQuitted()) {
this.quit();
return;
}
// process line
// first thing is the backslash removal, before they get processed at all
String line = lines[i].replace('\\', '/').trim();
final int ic = line.indexOf('#');
// remove comment at end of line if any
if (-1 != ic)
line = line.substring(0, ic);
if (0 == line.length() || '#' == line.charAt(0))
continue;
// reduce line, so that separators are really unique
while (-1 != line.indexOf(sep2)) {
line = line.replaceAll(sep2, column_separator);
}
final String[] column = line.split(column_separator);
if (column.length < 4) {
Utils.log("Less than 4 columns: can't import from line " + i + " : " + line);
continue;
}
// obtain coordinates
double x = 0, y = 0, z = 0;
try {
x = Double.parseDouble(column[1].trim());
y = Double.parseDouble(column[2].trim());
z = Double.parseDouble(column[3].trim());
} catch (final NumberFormatException nfe) {
Utils.log("Non-numeric value in a numeric column at line " + i + " : " + line);
continue;
}
x *= calibration;
y *= calibration;
z = z * calibration + z_zero;
// obtain path
String path = column[0].trim();
if (0 == path.length())
continue;
// check if path is relative
if ((!IJ.isWindows() && '/' != path.charAt(0)) || (IJ.isWindows() && 1 != path.indexOf(":/"))) {
// path is relative.
if (null == base_dir) {
// may not be null if another thread that got the lock first set it to non-null
// Ask for source directory
final DirectoryChooser dc = new DirectoryChooser("Choose source directory");
final String dir = dc.getDirectory();
if (null == dir) {
// quit all threads
return;
}
base_dir = Utils.fixDir(dir);
}
}
if (null != base_dir)
path = base_dir + path;
final File f = new File(path);
if (!f.exists()) {
Utils.log("No file found for path " + path);
continue;
}
// will create a new Layer if necessary
final Layer layer = layer_set.getLayer(z, layer_thickness, true);
touched_layers.add(layer);
final String imagefilepath = path;
final double xx = x * scale;
final double yy = y * scale;
final Callable<Patch> creator;
if (column.length >= 9) {
creator = new Callable<Patch>() {
private final int parseInt(final String t) {
if (t.equals("-"))
return -1;
return Integer.parseInt(t);
}
private final double parseDouble(final String t) {
if (t.equals("-"))
return Double.NaN;
return Double.parseDouble(t);
}
@Override
public Patch call() throws Exception {
int o_width = parseInt(column[4].trim());
int o_height = parseInt(column[5].trim());
double min = parseDouble(column[6].trim());
double max = parseDouble(column[7].trim());
int type = parseInt(column[8].trim());
if (-1 == type || -1 == o_width || -1 == o_height) {
// Read them from the file header
final ImageFileHeader ifh = new ImageFileHeader(imagefilepath);
o_width = ifh.width;
o_height = ifh.height;
type = ifh.type;
if (!ifh.isSupportedType()) {
Utils.log("Incompatible image type: " + imagefilepath);
return null;
}
}
ImagePlus imp = null;
if (Double.isNaN(min) || Double.isNaN(max)) {
imp = openImagePlus(imagefilepath);
min = imp.getProcessor().getMin();
max = imp.getProcessor().getMax();
}
final Patch patch = new Patch(layer.getProject(), new File(imagefilepath).getName(), o_width, o_height, o_width, o_height, type, 1.0f, Color.yellow, false, min, max, new AffineTransform(1, 0, 0, 1, xx, yy), imagefilepath);
if (null != script_path && null != imp) {
// For use in setting the preprocessor script
cacheImagePlus(patch.getId(), imp);
}
return patch;
}
};
} else {
creator = new Callable<Patch>() {
@Override
public Patch call() throws Exception {
IJ.redirectErrorMessages();
final ImageFileHeader ifh = new ImageFileHeader(imagefilepath);
final int o_width = ifh.width;
final int o_height = ifh.height;
final int type = ifh.type;
if (!ifh.isSupportedType()) {
Utils.log("Incompatible image type: " + imagefilepath);
return null;
}
double min = 0;
double max = 255;
switch(type) {
case ImagePlus.GRAY16:
case ImagePlus.GRAY32:
// Determine suitable min and max
// TODO Stream through the image, do not load it!
final ImagePlus imp = openImagePlus(imagefilepath);
if (null == imp) {
Utils.log("Ignoring unopenable image from " + imagefilepath);
return null;
}
min = imp.getProcessor().getMin();
max = imp.getProcessor().getMax();
break;
}
// add Patch
final Patch patch = new Patch(layer.getProject(), new File(imagefilepath).getName(), o_width, o_height, o_width, o_height, type, 1.0f, Color.yellow, false, min, max, new AffineTransform(1, 0, 0, 1, xx, yy), imagefilepath);
return patch;
}
};
}
// Otherwise, images would end up loaded twice for no reason
if (0 == (i % (NP + NP))) {
final ArrayList<Future<?>> a = new ArrayList<Future<?>>(NP + NP);
synchronized (fus) {
// .add is also synchronized, fus is a Vector
int k = 0;
while (!fus.isEmpty() && k < NP) {
a.add(fus.remove(0));
k++;
}
}
for (final Future<?> fu : a) {
try {
if (wo.hasQuitted())
return;
fu.get();
} catch (final Throwable t) {
t.printStackTrace();
}
}
}
imported.add(ex.submit(new Runnable() {
@Override
public void run() {
if (wo.hasQuitted())
return;
/* */
IJ.redirectErrorMessages();
Patch patch;
try {
patch = creator.call();
} catch (final Exception e) {
e.printStackTrace();
Utils.log("Could not load patch from " + imagefilepath);
return;
}
// Set the script if any
if (null != script_path) {
try {
patch.setPreprocessorScriptPath(script_path);
} catch (final Throwable t) {
Utils.log("FAILED to set a scaling preprocessor script to patch " + patch);
IJError.print(t);
}
}
// Set an alpha mask to crop away the borders
if (border_width > 0) {
final Triple<Integer, Integer, ByteProcessor> m = last_mask.get();
if (null != m && m.a == patch.getOWidth() && m.b == patch.getOHeight()) {
// Reuse
patch.setAlphaMask(m.c);
} else {
// Create new mask
final ByteProcessor mask = new ByteProcessor(patch.getOWidth(), patch.getOHeight());
mask.setValue(255);
mask.setRoi(new Roi(border_width, border_width, mask.getWidth() - 2 * border_width, mask.getHeight() - 2 * border_width));
mask.fill();
patch.setAlphaMask(mask);
// Store as last
last_mask.set(new Triple<Integer, Integer, ByteProcessor>(mask.getWidth(), mask.getHeight(), mask));
}
}
if (!homogenize_contrast) {
fus.add(regenerateMipMaps(patch));
}
synchronized (layer) {
layer.add(patch, true);
}
wo.setTaskName("Imported " + (n_imported.incrementAndGet() + 1) + "/" + lines.length);
}
}));
}
Utils.wait(imported);
ex.shutdown();
if (0 == n_imported.get()) {
Utils.log("No images imported.");
return;
}
base_layer.getParent().setMinimumDimensions();
Display.repaint(base_layer.getParent());
recreateBuckets(touched_layers);
if (homogenize_contrast) {
setTaskName("Enhance contrast");
// layer-wise (layer order is irrelevant):
cew.applyLayerWise(touched_layers);
cew.shutdown();
}
Utils.wait(fus);
} catch (final Exception e) {
IJError.print(e);
}
}
}, base_layer.getProject());
}
use of ini.trakem2.display.LayerSet in project TrakEM2 by trakem2.
the class TMLHandler method getProjectData.
/**
* returns 4 objects packed in an array:
* <pre>
* [0] = root TemplateThing
* [1] = root ProjectThing (contains Project instance)
* [2] = root LayerThing (contains the top-level LayerSet)
* [3] = expanded states of all ProjectThing objects
* </pre>
* <p>
* Also, triggers the reconstruction of links and assignment of Displayable objects to their layer.
* </p>
*/
public Object[] getProjectData(final boolean open_displays) {
if (null == project)
return null;
this.open_displays = open_displays;
// Links exist between Displayable objects.
for (final Displayable d : ht_displayables.values()) {
String olinks = ht_links.get(d);
// not linked
if (null == olinks)
continue;
String[] links = olinks.split(",");
Long lid = null;
for (int i = 0; i < links.length; i++) {
try {
lid = new Long(links[i]);
} catch (NumberFormatException nfe) {
Utils.log2("Ignoring incorrectly formated link '" + links[i] + "' for ob " + d);
continue;
}
Displayable partner = ht_displayables.get(lid);
if (null != partner)
d.link(partner, false);
else
Utils.log("TMLHandler: can't find partner with id=" + links[i] + " for Displayable with id=" + d.getId());
}
}
// 1.2 - Reconstruct linked properties
for (final Map.Entry<Displayable, Map<Long, Map<String, String>>> lpe : all_linked_props.entrySet()) {
final Displayable origin = lpe.getKey();
for (final Map.Entry<Long, Map<String, String>> e : lpe.getValue().entrySet()) {
final Displayable target = ht_displayables.get(e.getKey());
if (null == target) {
Utils.log("Setting linked properties for origin " + origin.getId() + ":\n\t* Could not find target displayable #" + e.getKey());
continue;
}
origin.setLinkedProperties(target, e.getValue());
}
}
// 2 - Add Displayable objects to ProjectThing that can contain them
for (final Map.Entry<Long, ProjectThing> entry : ht_oid_pt.entrySet()) {
ProjectThing pt = entry.getValue();
Object od = ht_displayables.remove(entry.getKey());
// Utils.log("==== processing: Displayable [" + od + "] vs. ProjectThing [" + pt + "]");
if (null != od) {
pt.setObject(od);
} else {
Utils.log("#### Failed to find a Displayable for ProjectThing " + pt + " #####");
}
}
// 3 - Assign a layer pointer to ZDisplayable objects
for (final ZDisplayable zd : ht_zdispl.values()) {
// zd.setLayer((Layer)zd.getLayerSet().getLayers().get(0));
zd.setLayer(zd.getLayerSet().getLayer(0));
}
// 4 - Assign layers to Treeline nodes
for (final Layer la : al_layers) {
final List<Node<?>> list = node_layer_table.remove(la.getId());
if (null == list)
continue;
for (final Node<?> nd : list) nd.setLayer(la);
}
if (!node_layer_table.isEmpty()) {
Utils.log("ERROR: node_layer_table is not empty!");
}
// 5 - Assign root nodes to Treelines, now that all nodes have a layer
for (final Map.Entry<Tree<?>, Node<?>> e : tree_root_nodes.entrySet()) {
if (null == e.getValue()) {
// Utils.log2("Ignoring, applies to new Treeline format only.");
continue;
}
// Can't compile with <?>
// will generate node caches of each Treeline
e.getKey().setRoot((Node) e.getValue());
}
tree_root_nodes.clear();
// Assign colors to nodes
for (final Map.Entry<Color, Collection<Node<?>>> e : node_colors.entrySet()) {
for (final Node<?> nd : e.getValue()) {
nd.setColor(e.getKey());
}
}
node_colors.clear();
// 6 - Run legacy operations
for (final Runnable r : legacy) {
r.run();
}
try {
// Create a table with all layer ids vs layer instances:
final HashMap<Long, Layer> ht_lids = new HashMap<Long, Layer>();
for (final Layer layer : al_layers) {
ht_lids.put(new Long(layer.getId()), layer);
}
// Spawn threads to recreate buckets, starting from the subset of displays to open
int n = Runtime.getRuntime().availableProcessors();
switch(n) {
case 1:
break;
case 2:
case 3:
case 4:
n--;
break;
default:
n -= 2;
break;
}
final ExecutorService exec = Utils.newFixedThreadPool(n, "TMLHandler-recreateBuckets");
final Set<Long> dlids = new HashSet<Long>();
final LayerSet layer_set = (LayerSet) root_lt.getObject();
final List<Future<?>> fus = new ArrayList<Future<?>>();
final List<Future<?>> fus2 = new ArrayList<Future<?>>();
for (final HashMap<String, String> ht_attributes : al_displays) {
String ob = ht_attributes.get("layer_id");
if (null == ob)
continue;
final Long lid = new Long(ob);
dlids.add(lid);
final Layer la = ht_lids.get(lid);
if (null == la) {
ht_lids.remove(lid);
continue;
}
// to open later:
new Display(project, Long.parseLong(ht_attributes.get("id")), la, ht_attributes);
fus.add(exec.submit(new Runnable() {
public void run() {
la.recreateBuckets();
}
}));
}
fus.add(exec.submit(new Runnable() {
public void run() {
// only for ZDisplayable
layer_set.recreateBuckets(false);
}
}));
// Ensure launching:
if (dlids.isEmpty() && layer_set.size() > 0) {
dlids.add(layer_set.getLayer(0).getId());
}
final List<Layer> layers = layer_set.getLayers();
for (final Long lid : new HashSet<Long>(dlids)) {
fus.add(exec.submit(new Runnable() {
public void run() {
int start = layer_set.indexOf(layer_set.getLayer(lid.longValue()));
int next = start + 1;
int prev = start - 1;
while (next < layer_set.size() || prev > -1) {
if (prev > -1) {
final Layer lprev = layers.get(prev);
synchronized (dlids) {
if (dlids.add(lprev.getId())) {
// returns true if not there already
fus2.add(exec.submit(new Runnable() {
public void run() {
lprev.recreateBuckets();
}
}));
}
}
prev--;
}
if (next < layers.size()) {
final Layer lnext = layers.get(next);
synchronized (dlids) {
if (dlids.add(lnext.getId())) {
// returns true if not there already
fus2.add(exec.submit(new Runnable() {
public void run() {
lnext.recreateBuckets();
}
}));
}
}
next++;
}
}
Utils.log2("done recreateBuckets chunk");
}
}));
}
Utils.wait(fus);
exec.submit(new Runnable() {
public void run() {
Utils.log2("waiting for TMLHandler fus...");
Utils.wait(fus2);
Utils.log2("done waiting TMLHandler fus.");
exec.shutdown();
}
});
} catch (Throwable t) {
IJError.print(t);
}
return new Object[] { root_tt, root_pt, root_lt, ht_pt_expanded };
}
use of ini.trakem2.display.LayerSet in project TrakEM2 by trakem2.
the class Merger method createTable.
private static JTable createTable(final HashSet<ZDisplayable> hs, final String column_title, final Project p1, final Project p2) {
final TwoColumnModel tcm = new TwoColumnModel(hs, column_title);
final JTable table = new JTable(tcm);
table.setDefaultRenderer(table.getColumnClass(0), new DefaultTableCellRenderer() {
private static final long serialVersionUID = 1L;
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
final Component c = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if (1 == column && tcm.sent[row]) {
c.setBackground(Color.green);
c.setForeground(Color.white);
} else if (isSelected) {
c.setForeground(table.getSelectionForeground());
c.setBackground(table.getSelectionBackground());
} else {
c.setBackground(Color.white);
c.setForeground(Color.black);
}
return c;
}
});
table.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent me) {
final JTable src = (JTable) me.getSource();
final TwoColumnModel model = (TwoColumnModel) src.getModel();
final int row = src.rowAtPoint(me.getPoint()), col = src.columnAtPoint(me.getPoint());
if (2 == me.getClickCount()) {
Object ob = model.getValueAt(row, col);
if (ob instanceof ZDisplayable) {
ZDisplayable zd = (ZDisplayable) ob;
Display df = Display.getOrCreateFront(zd.getProject());
// also select
df.show(zd.getFirstLayer(), zd, true, false);
}
} else if (me.isPopupTrigger()) {
JPopupMenu popup = new JPopupMenu();
final JMenuItem send = new JMenuItem("Send selection");
popup.add(send);
send.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
ArrayList<ZDisplayable> col = new ArrayList<ZDisplayable>();
for (final int i : src.getSelectedRows()) {
col.add((ZDisplayable) model.getValueAt(i, 0));
}
if (col.isEmpty())
return;
// the other
Project target = col.get(0).getProject() == p1 ? p2 : p1;
LayerSet ls = target.getRootLayerSet();
ArrayList<ZDisplayable> copies = new ArrayList<ZDisplayable>();
for (ZDisplayable zd : col) {
copies.add((ZDisplayable) zd.clone(target, false));
model.sent[row] = true;
}
// 1. To the LayerSet:
ls.addAll(copies);
// 2. To the ProjectTree:
target.getProjectTree().insertSegmentations(copies);
// Update:
model.fireTableDataChanged();
}
});
popup.show(table, me.getX(), me.getY());
}
}
});
return table;
}
use of ini.trakem2.display.LayerSet in project TrakEM2 by trakem2.
the class Merger method compare.
/**
* Take two projects and find out what is different among them,
* independent of id.
*/
public static final void compare(final Project p1, final Project p2) {
Utils.log("Be warned: only Treeline, AreaTree and Connector are considered at the moment.");
final LayerSet ls1 = p1.getRootLayerSet(), ls2 = p2.getRootLayerSet();
final Collection<ZDisplayable> zds1 = ls1.getZDisplayables(), zds2 = ls2.getZDisplayables();
final HashSet<Class<?>> accepted = new HashSet<Class<?>>();
accepted.add(Treeline.class);
accepted.add(AreaTree.class);
accepted.add(Connector.class);
final HashMap<Displayable, List<Change>> matched = new HashMap<Displayable, List<Change>>();
final HashSet<ZDisplayable> empty1 = new HashSet<ZDisplayable>(), empty2 = new HashSet<ZDisplayable>();
final HashSet<ZDisplayable> unmatched1 = new HashSet<ZDisplayable>(), unmatched2 = new HashSet<ZDisplayable>(zds2);
// Remove instances of classes not accepted
for (final Iterator<ZDisplayable> it = unmatched2.iterator(); it.hasNext(); ) {
ZDisplayable zd = it.next();
if (!accepted.contains(zd.getClass())) {
it.remove();
continue;
}
if (zd.isDeletable()) {
it.remove();
empty2.add(zd);
}
}
zds2.removeAll(empty2);
final AtomicInteger counter = new AtomicInteger(0);
// or at least one or more that are similar in that they have some nodes in common.
try {
ini.trakem2.parallel.Process.unbound(zds1, new TaskFactory<ZDisplayable, Object>() {
@Override
public Object process(final ZDisplayable zd1) {
Utils.showProgress(counter.getAndIncrement() / (float) zds1.size());
if (!accepted.contains(zd1.getClass())) {
Utils.log("Ignoring: [A] " + zd1);
return null;
}
if (zd1.isDeletable()) {
synchronized (empty1) {
empty1.add(zd1);
}
return null;
}
final List<Change> cs = new ArrayList<Change>();
for (final ZDisplayable zd2 : zds2) {
// Same class?
if (zd1.getClass() != zd2.getClass())
continue;
if (zd1 instanceof Tree<?> && zd2 instanceof Tree<?>) {
Change c = compareTrees(zd1, zd2);
if (c.hasSimilarNodes()) {
cs.add(c);
if (1 == cs.size()) {
synchronized (matched) {
matched.put(zd1, cs);
}
}
synchronized (unmatched2) {
unmatched2.remove(zd2);
}
}
// debug
if (zd1.getId() == zd2.getId()) {
Utils.log("zd1 #" + zd1.getId() + " is similar to #" + zd2.getId() + ": " + c.hasSimilarNodes());
}
}
}
if (cs.isEmpty()) {
synchronized (unmatched1) {
unmatched1.add(zd1);
}
}
return null;
}
});
} catch (Exception e) {
IJError.print(e);
}
// reset
Utils.showProgress(1);
Utils.log("matched.size(): " + matched.size());
makeGUI(p1, p2, empty1, empty2, matched, unmatched1, unmatched2);
}
Aggregations