use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class Display method insertStack.
/**
* @param stack_patch is just a Patch of a series of Patch that make a stack of Patches.
*/
private boolean insertStack(final ProjectThing target_landmarks, final Project source, final ProjectThing source_landmarks, final Patch stack_patch) {
final List<Ball> l1 = new ArrayList<Ball>();
final List<Ball> l2 = new ArrayList<Ball>();
// source is the one that has the stack_patch
final Collection<ProjectThing> b1s = source_landmarks.findChildrenOfType("ball");
// target is this
final Collection<ProjectThing> b2s = target_landmarks.findChildrenOfType("ball");
final HashSet<String> seen = new HashSet<String>();
for (final ProjectThing b1 : b1s) {
final Ball ball1 = (Ball) b1.getObject();
if (null == ball1) {
Utils.log("ERROR: there's an empty 'ball' node in target project" + project.toString());
return false;
}
final String title1 = ball1.getTitle();
for (final ProjectThing b2 : b2s) {
final Ball ball2 = (Ball) b2.getObject();
if (null == ball2) {
Utils.log("ERROR: there's an empty 'ball' node in source project" + source.toString());
return false;
}
if (title1.equals(ball2.getTitle())) {
if (seen.contains(title1))
continue;
seen.add(title1);
l1.add(ball1);
l2.add(ball2);
}
}
}
if (l1.size() < 4) {
Utils.log("ERROR: found only " + l1.size() + " common landmarks: needs at least 4!");
return false;
}
// Extract coordinates of source project landmarks, in patch stack coordinate space
final List<double[]> c1 = new ArrayList<double[]>();
for (final Ball ball1 : l1) {
final Map<Layer, double[]> m = ball1.getRawBalls();
if (1 != m.size()) {
Utils.log("ERROR: ball object " + ball1 + " from target project " + project + " has " + m.size() + " balls instead of just 1.");
return false;
}
final Map.Entry<Layer, double[]> e = m.entrySet().iterator().next();
final Layer layer = e.getKey();
final double[] xyr = e.getValue();
final double[] fin = new double[] { xyr[0], xyr[1] };
final AffineTransform affine = ball1.getAffineTransformCopy();
try {
affine.preConcatenate(stack_patch.getAffineTransform().createInverse());
} catch (final Exception nite) {
IJError.print(nite);
return false;
}
final double[] fout = new double[2];
affine.transform(fin, 0, fout, 0, 1);
c1.add(new double[] { fout[0], fout[1], layer.getParent().indexOf(layer) });
}
// Extract coordinates of target (this) project landmarks, in calibrated world space
final List<double[]> c2 = new ArrayList<double[]>();
for (final Ball ball2 : l2) {
final double[][] b = ball2.getBalls();
if (1 != b.length) {
Utils.log("ERROR: ball object " + ball2 + " from source project " + source + " has " + b.length + " balls instead of just 1.");
return false;
}
final double[] fin = new double[] { b[0][0], b[0][1] };
final AffineTransform affine = ball2.getAffineTransformCopy();
final double[] fout = new double[2];
affine.transform(fin, 0, fout, 0, 1);
c2.add(new double[] { fout[0], fout[1], b[0][2] });
}
// Print landmarks:
Utils.log("Landmarks:");
for (Iterator<double[]> it1 = c1.iterator(), it2 = c2.iterator(); it1.hasNext(); ) {
Utils.log(Utils.toString(it1.next()) + " <--> " + Utils.toString(it2.next()));
}
// Create point matches
final List<PointMatch> pm = new ArrayList<PointMatch>();
for (Iterator<double[]> it1 = c1.iterator(), it2 = c2.iterator(); it1.hasNext(); ) {
pm.add(new mpicbg.models.PointMatch(new mpicbg.models.Point(it1.next()), new mpicbg.models.Point(it2.next())));
}
// Estimate AffineModel3D
final AffineModel3D aff3d = new AffineModel3D();
try {
aff3d.fit(pm);
} catch (final Exception e) {
IJError.print(e);
return false;
}
// Create and add the Stack
final String path = stack_patch.getImageFilePath();
final Stack st = new Stack(project, new File(path).getName(), 0, 0, getLayerSet().getLayers().get(0), path);
st.setInvertibleCoordinateTransform(aff3d);
getLayerSet().add(st);
return true;
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class Loader method insertGrid.
/**
* Insert grid in layer (with optional stitching)
*
* @param layer The Layer to inser the grid into
* @param dir The base dir of the images to open
* @param first_image_name name of the first image in the list
* @param cols The list of columns, containing each an array of String file names in each column.
* @param bx The top-left X coordinate of the grid to insert
* @param by The top-left Y coordinate of the grid to insert
* @param bt_overlap bottom-top overlap of the images
* @param lr_overlap left-right overlap of the images
* @param link_images Link images to their neighbors
* @param stitch_tiles montage option
* @param cc_percent_overlap tiles overlap
* @param cc_scale tiles scaling previous to stitching (1 = no scaling)
* @param min_R regression threshold (minimum acceptable R)
* @param homogenize_contrast contrast homogenization option
* @param stitching_rule stitching rule (upper left corner or free)
*/
private void insertGrid(final Layer layer, final String dir_, final String first_image_name, final int n_images, final ArrayList<String[]> cols, final double bx, final double by, final double bt_overlap, final double lr_overlap, final boolean link_images, final boolean stitch_tiles, final boolean homogenize_contrast, final StitchingTEM.PhaseCorrelationParam pc_param, final Worker worker) {
// create a Worker, then give it to the Bureaucrat
try {
String dir = dir_;
final ArrayList<Patch> al = new ArrayList<Patch>();
Utils.showProgress(0.0D);
// less repaints on IJ status bar
opener.setSilentMode(true);
int x = 0;
int y = 0;
int largest_y = 0;
ImagePlus img = null;
// open the selected image, to use as reference for width and height
// w1nd0wz safe
dir = dir.replace('\\', '/');
if (!dir.endsWith("/"))
dir += "/";
String path = dir + first_image_name;
// TODO arbitrary x3 factor
releaseToFit(new File(path).length() * 3);
IJ.redirectErrorMessages();
ImagePlus first_img = openImagePlus(path);
if (null == first_img) {
Utils.log("Selected image to open first is null.");
return;
}
if (null == first_img)
return;
final int first_image_width = first_img.getWidth();
final int first_image_height = first_img.getHeight();
final int first_image_type = first_img.getType();
// start
final Patch[][] pall = new Patch[cols.size()][((String[]) cols.get(0)).length];
int width, height;
// counter
int k = 0;
boolean auto_fix_all = false;
boolean ignore_all = false;
boolean resize = false;
if (!ControlWindow.isGUIEnabled()) {
// headless mode: autofix all
auto_fix_all = true;
resize = true;
}
// Accumulate mipmap generation tasks
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
startLargeUpdate();
for (int i = 0; i < cols.size(); i++) {
final String[] rows = (String[]) cols.get(i);
if (i > 0) {
x -= lr_overlap;
}
for (int j = 0; j < rows.length; j++) {
if (Thread.currentThread().isInterrupted()) {
Display.repaint(layer);
rollback();
return;
}
if (j > 0) {
y -= bt_overlap;
}
// get file name
final String file_name = (String) rows[j];
path = dir + file_name;
if (null != first_img && file_name.equals(first_image_name)) {
img = first_img;
// release pointer
first_img = null;
} else {
// open image
releaseToFit(first_image_width, first_image_height, first_image_type, 1.5f);
try {
IJ.redirectErrorMessages();
img = openImagePlus(path);
} catch (final OutOfMemoryError oome) {
printMemState();
throw oome;
}
}
if (null == img) {
Utils.log("null image! skipping.");
pall[i][j] = null;
continue;
}
width = img.getWidth();
height = img.getHeight();
int rw = width;
int rh = height;
if (width != first_image_width || height != first_image_height) {
int new_width = first_image_width;
int new_height = first_image_height;
if (!auto_fix_all && !ignore_all) {
final GenericDialog gdr = new GenericDialog("Size mismatch!");
gdr.addMessage("The size of " + file_name + " is " + width + " x " + height);
gdr.addMessage("but the selected image was " + first_image_width + " x " + first_image_height);
gdr.addMessage("Adjust to selected image dimensions?");
gdr.addNumericField("width: ", (double) first_image_width, 0);
// should not be editable ... or at least, explain in some way that the dimensions can be edited just for this image --> done below
gdr.addNumericField("height: ", (double) first_image_height, 0);
gdr.addMessage("[If dimensions are changed they will apply only to this image]");
gdr.addMessage("");
final String[] au = new String[] { "fix all", "ignore all" };
gdr.addChoice("Automate:", au, au[1]);
gdr.addMessage("Cancel == NO OK = YES");
gdr.showDialog();
if (gdr.wasCanceled()) {
resize = false;
// do nothing: don't fix/resize
}
resize = true;
// catch values
new_width = (int) gdr.getNextNumber();
new_height = (int) gdr.getNextNumber();
final int iau = gdr.getNextChoiceIndex();
if (new_width != first_image_width || new_height != first_image_height) {
auto_fix_all = false;
} else {
auto_fix_all = (0 == iau);
}
ignore_all = (1 == iau);
if (ignore_all)
resize = false;
}
if (resize) {
// resize Patch dimensions
rw = first_image_width;
rh = first_image_height;
}
}
// add new Patch at base bx,by plus the x,y of the grid
// will call back and cache the image
final Patch patch = new Patch(layer.getProject(), img.getTitle(), bx + x, by + y, img);
if (width != rw || height != rh)
patch.setDimensions(rw, rh, false);
addedPatchFrom(path, patch);
if (// prevent it
homogenize_contrast)
// prevent it
setMipMapsRegeneration(false);
else
fus.add(regenerateMipMaps(patch));
//
// after the above two lines! Otherwise it will paint fine, but throw exceptions on the way
layer.add(patch, true);
// otherwise when reopening it has to fetch all ImagePlus and scale and zip them all! This method though creates the awt and the snap, thus filling up memory and slowing down, but it's worth it.
patch.updateInDatabase("tiff_snapshot");
pall[i][j] = patch;
al.add(patch);
if (ControlWindow.isGUIEnabled()) {
// northwest to prevent screwing up Patch coordinates.
layer.getParent().enlargeToFit(patch, LayerSet.NORTHWEST);
}
y += img.getHeight();
Utils.showProgress((double) k / n_images);
k++;
}
x += img.getWidth();
if (largest_y < y) {
largest_y = y;
}
// resetting!
y = 0;
}
// build list
final Patch[] pa = new Patch[al.size()];
int f = 0;
// list in row-first order
for (int j = 0; j < pall[0].length; j++) {
// 'j' is row
for (int i = 0; i < pall.length; i++) {
// 'i' is column
pa[f++] = pall[i][j];
}
}
// optimize repaints: all to background image
Display.clearSelection(layer);
// make the first one be top, and the rest under it in left-right and top-bottom order
for (int j = 0; j < pa.length; j++) {
layer.moveBottom(pa[j]);
}
// make picture
// getFlatImage(layer, layer.getMinimalBoundingBox(Patch.class), 0.25, 1, ImagePlus.GRAY8, Patch.class, null, false).show();
// optimize repaints: all to background image
Display.clearSelection(layer);
if (homogenize_contrast) {
if (null != worker)
worker.setTaskName("Enhancing contrast");
// 0 - check that all images are of the same type
int tmp_type = pa[0].getType();
for (int e = 1; e < pa.length; e++) {
if (pa[e].getType() != tmp_type) {
// can't continue
tmp_type = Integer.MAX_VALUE;
Utils.log("Can't homogenize histograms: images are not all of the same type.\nFirst offending image is: " + al.get(e));
break;
}
}
if (Integer.MAX_VALUE != tmp_type) {
// checking on error flag
// Set min and max for all images
// 1 - fetch statistics for each image
final ArrayList<ImageStatistics> al_st = new ArrayList<ImageStatistics>();
// list of Patch ordered by stdDev ASC
final ArrayList<Patch> al_p = new ArrayList<Patch>();
int type = -1;
for (int i = 0; i < pa.length; i++) {
if (Thread.currentThread().isInterrupted()) {
Display.repaint(layer);
rollback();
return;
}
ImagePlus imp = fetchImagePlus(pa[i]);
// speed-up trick: extract data from smaller image
if (imp.getWidth() > 1024) {
releaseToFit(1024, (int) ((imp.getHeight() * 1024) / imp.getWidth()), imp.getType(), 1.1f);
// cheap and fast nearest-point resizing
imp = new ImagePlus(imp.getTitle(), imp.getProcessor().resize(1024));
}
if (-1 == type)
type = imp.getType();
final ImageStatistics i_st = imp.getStatistics();
// order by stdDev, from small to big
int q = 0;
for (final ImageStatistics st : al_st) {
q++;
if (st.stdDev > i_st.stdDev)
break;
}
if (q == al.size()) {
// append at the end. WARNING if importing thousands of images, this is a potential source of out of memory errors. I could just recompute it when I needed it again below
al_st.add(i_st);
al_p.add(pa[i]);
} else {
al_st.add(q, i_st);
al_p.add(q, pa[i]);
}
}
// shallow copy of the ordered list
final ArrayList<Patch> al_p2 = new ArrayList<Patch>(al_p);
// 2 - discard the first and last 25% (TODO: a proper histogram clustering analysis and histogram examination should apply here)
if (pa.length > 3) {
// under 4 images, use them all
int i = 0;
while (i <= pa.length * 0.25) {
al_p.remove(i);
i++;
}
final int count = i;
i = pa.length - 1 - count;
while (i > (pa.length * 0.75) - count) {
al_p.remove(i);
i--;
}
}
// 3 - compute common histogram for the middle 50% images
final Patch[] p50 = new Patch[al_p.size()];
al_p.toArray(p50);
final StackStatistics stats = new StackStatistics(new PatchStack(p50, 1));
// 4 - compute autoAdjust min and max values
// extracting code from ij.plugin.frame.ContrastAdjuster, method autoAdjust
int autoThreshold = 0;
double min = 0;
double max = 0;
// once for 8-bit and color, twice for 16 and 32-bit (thus the 2501 autoThreshold value)
final int limit = stats.pixelCount / 10;
final int[] histogram = stats.histogram;
// else autoThreshold /= 2;
if (ImagePlus.GRAY16 == type || ImagePlus.GRAY32 == type)
autoThreshold = 2500;
else
autoThreshold = 5000;
final int threshold = stats.pixelCount / autoThreshold;
int i = -1;
boolean found = false;
int count;
do {
i++;
count = histogram[i];
if (count > limit)
count = 0;
found = count > threshold;
} while (!found && i < 255);
final int hmin = i;
i = 256;
do {
i--;
count = histogram[i];
if (count > limit)
count = 0;
found = count > threshold;
} while (!found && i > 0);
final int hmax = i;
if (hmax >= hmin) {
min = stats.histMin + hmin * stats.binSize;
max = stats.histMin + hmax * stats.binSize;
if (min == max) {
min = stats.min;
max = stats.max;
}
}
// 5 - compute common mean within min,max range
final double target_mean = getMeanOfRange(stats, min, max);
Utils.log2("Loader min,max: " + min + ", " + max + ", target mean: " + target_mean);
// 6 - apply to all
for (i = al_p2.size() - 1; i > -1; i--) {
// the order is different, thus getting it from the proper list
final Patch p = (Patch) al_p2.get(i);
final double dm = target_mean - getMeanOfRange((ImageStatistics) al_st.get(i), min, max);
// displacing in the opposite direction, makes sense, so that the range is drifted upwards and thus the target 256 range for an awt.Image will be closer to the ideal target_mean
p.setMinAndMax(min - dm, max - dm);
// OBSOLETE and wrong //p.putMinAndMax(fetchImagePlus(p));
}
setMipMapsRegeneration(true);
if (isMipMapsRegenerationEnabled()) {
// recreate files
for (final Patch p : al) fus.add(regenerateMipMaps(p));
}
Display.repaint(layer, new Rectangle(0, 0, (int) layer.getParent().getLayerWidth(), (int) layer.getParent().getLayerHeight()), 0);
// make picture
// getFlatImage(layer, layer.getMinimalBoundingBox(Patch.class), 0.25, 1, ImagePlus.GRAY8, Patch.class, null, false).show();
}
}
if (stitch_tiles) {
// Wait until all mipmaps for the new images have been generated before attempting to register
Utils.wait(fus);
// create undo
layer.getParent().addTransformStep(new HashSet<Displayable>(layer.getDisplayables(Patch.class)));
// wait until repainting operations have finished (otherwise, calling crop on an ImageProcessor fails with out of bounds exception sometimes)
if (null != Display.getFront())
Display.getFront().getCanvas().waitForRepaint();
if (null != worker)
worker.setTaskName("Stitching");
StitchingTEM.stitch(pa, cols.size(), bt_overlap, lr_overlap, true, pc_param).run();
}
// link with images on top, bottom, left and right.
if (link_images) {
if (null != worker)
worker.setTaskName("Linking");
for (int i = 0; i < pall.length; i++) {
// 'i' is column
for (int j = 0; j < pall[0].length; j++) {
// 'j' is row
final Patch p = pall[i][j];
// can happen if a slot is empty
if (null == p)
continue;
if (i > 0 && null != pall[i - 1][j])
p.link(pall[i - 1][j]);
if (i < pall.length - 1 && null != pall[i + 1][j])
p.link(pall[i + 1][j]);
if (j > 0 && null != pall[i][j - 1])
p.link(pall[i][j - 1]);
if (j < pall[0].length - 1 && null != pall[i][j + 1])
p.link(pall[i][j + 1]);
}
}
}
commitLargeUpdate();
// resize LayerSet
// int new_width = x;
// int new_height = largest_y;
// Math.abs(bx) + new_width, Math.abs(by) + new_height);
layer.getParent().setMinimumDimensions();
// update indexes
// so its done once only
layer.updateInDatabase("stack_index");
// create panels in all Displays showing this layer
/* // not needed anymore
Iterator it = al.iterator();
while (it.hasNext()) {
Display.add(layer, (Displayable)it.next(), false); // don't set it active, don't want to reload the ImagePlus!
}
*/
// update Displays
Display.update(layer);
layer.recreateBuckets();
// debug:
} catch (final Throwable t) {
IJError.print(t);
rollback();
setMipMapsRegeneration(true);
}
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class Loader method makePrescaledTiles.
/**
* Generate 256x256 tiles, as many as necessary, to cover the given srcRect, starting at max_scale. Designed to be slow but memory-capable.
*
* filename = z + "/" + row + "_" + column + "_" + s + ".jpg";
*
* row and column run from 0 to n stepsize 1
* that is, row = y / ( 256 * 2^s ) and column = x / ( 256 * 2^s )
*
* z : z-level (slice)
* x,y: the row and column
* s: scale, which is 1 / (2^s), in integers: 0, 1, 2 ...
*
* var MAX_S = Math.floor( Math.log( MAX_Y + 1 ) / Math.LN2 ) - Math.floor( Math.log( Y_TILE_SIZE ) / Math.LN2 ) - 1;
*
* The module should not be more than 5
* At al levels, there should be an even number of rows and columns, except for the coarsest level.
* The coarsest level should be at least 5x5 tiles.
*
* Best results obtained when the srcRect approaches or is a square. Black space will pad the right and bottom edges when the srcRect is not exactly a square.
* Only the area within the srcRect is ever included, even if actual data exists beyond.
*
* @return The watcher thread, for joining purposes, or null if the dialog is canceled or preconditions are not passed.
* @throws IllegalArgumentException if the type is not ImagePlus.GRAY8 or Imageplus.COLOR_RGB.
*/
public Bureaucrat makePrescaledTiles(final Layer[] layers, final Class<?> clazz, final Rectangle srcRect, double max_scale_, final int c_alphas, final int type, String target_dir, final boolean from_original_images, final Saver saver, final int tileSide) {
if (null == layers || 0 == layers.length)
return null;
switch(type) {
case ImagePlus.GRAY8:
case ImagePlus.COLOR_RGB:
break;
default:
throw new IllegalArgumentException("Can only export for web with 8-bit or RGB");
}
// choose target directory
if (null == target_dir) {
final DirectoryChooser dc = new DirectoryChooser("Choose target directory");
target_dir = dc.getDirectory();
if (null == target_dir)
return null;
}
if (IJ.isWindows())
target_dir = target_dir.replace('\\', '/');
if (!target_dir.endsWith("/"))
target_dir += "/";
if (max_scale_ > 1) {
Utils.log("Prescaled Tiles: using max scale of 1.0");
// no point
max_scale_ = 1;
}
final String dir = target_dir;
final double max_scale = max_scale_;
final Worker worker = new Worker("Creating prescaled tiles") {
private void cleanUp() {
finishedWorking();
}
@Override
public void run() {
startedWorking();
try {
// project name
// String pname = layer[0].getProject().getTitle();
// create 'z' directories if they don't exist: check and ask!
// start with the highest scale level
final int[] best = determineClosestPowerOfTwo(srcRect.width > srcRect.height ? srcRect.width : srcRect.height);
final int edge_length = best[0];
final int n_edge_tiles = edge_length / tileSide;
Utils.log2("srcRect: " + srcRect);
Utils.log2("edge_length, n_edge_tiles, best[1] " + best[0] + ", " + n_edge_tiles + ", " + best[1]);
// thumbnail dimensions
// LayerSet ls = layer[0].getParent();
final double ratio = srcRect.width / (double) srcRect.height;
double thumb_scale = 1.0;
if (ratio >= 1) {
// width is larger or equal than height
thumb_scale = 192.0 / srcRect.width;
} else {
thumb_scale = 192.0 / srcRect.height;
}
// Figure out layer indices, given that layers are not necessarily evenly spaced
final TreeMap<Integer, Layer> indices = new TreeMap<Integer, Layer>();
final ArrayList<Integer> missingIndices = new ArrayList<Integer>();
final double resolution_z_px;
final int smallestIndex, largestIndex;
if (1 == layers.length) {
indices.put(0, layers[0]);
resolution_z_px = layers[0].getZ();
smallestIndex = 0;
largestIndex = 0;
} else {
// Ensure layers are sorted by Z index and are unique pointers and unique in Z coordinate:
final TreeMap<Double, Layer> t = new TreeMap<Double, Layer>();
for (final Layer l1 : new HashSet<Layer>(Arrays.asList(layers))) {
final Layer l2 = t.get(l1.getZ());
if (null == l2) {
t.put(l1.getZ(), l1);
} else {
// Ignore the layer with less objects
if (l1.getDisplayables().size() > l2.getDisplayables().size()) {
t.put(l1.getZ(), l1);
Utils.log("Ignoring duplicate layer: " + l2);
}
}
}
// What is the mode thickness, measured by Z(i-1) - Z(i)?
// (Distance between the Z of two consecutive layers)
final HashMap<Double, Integer> counts = new HashMap<Double, Integer>();
final Layer prev = t.get(t.firstKey());
double modeThickness = 0;
int modeThicknessCount = 0;
for (final Layer la : t.tailMap(prev.getZ(), false).values()) {
// Thickness with 3-decimal precision only
final double d = ((int) ((la.getZ() - prev.getZ()) * 1000 + 0.5)) / 1000.0;
Integer c = counts.get(d);
//
if (null == c)
c = 0;
++c;
counts.put(d, c);
//
if (c > modeThicknessCount) {
modeThicknessCount = c;
modeThickness = d;
}
}
// Not pixelDepth
resolution_z_px = modeThickness * prev.getParent().getCalibration().pixelWidth;
// Assign an index to each layer, approximating each layer at modeThickness intervals
for (final Layer la : t.values()) {
indices.put((int) (la.getZ() / modeThickness + 0.5), la);
}
// First and last
smallestIndex = indices.firstKey();
largestIndex = indices.lastKey();
Utils.logAll("indices: " + smallestIndex + ", " + largestIndex);
// Which indices are missing?
for (int i = smallestIndex + 1; i < largestIndex; ++i) {
if (!indices.containsKey(i)) {
missingIndices.add(i);
}
}
}
// JSON metadata for CATMAID
{
final StringBuilder sb = new StringBuilder("{");
final LayerSet ls = layers[0].getParent();
final Calibration cal = ls.getCalibration();
sb.append("\"volume_width_px\": ").append(srcRect.width).append(',').append('\n').append("\"volume_height_px\": ").append(srcRect.height).append(',').append('\n').append("\"volume_sections\": ").append(largestIndex - smallestIndex + 1).append(',').append('\n').append("\"extension\": \"").append(saver.getExtension()).append('\"').append(',').append('\n').append("\"resolution_x\": ").append(cal.pixelWidth).append(',').append('\n').append("\"resolution_y\": ").append(cal.pixelHeight).append(',').append('\n').append("\"resolution_z\": ").append(resolution_z_px).append(',').append('\n').append("\"units\": \"").append(cal.getUnit()).append('"').append(',').append('\n').append("\"offset_x_px\": 0,\n").append("\"offset_y_px\": 0,\n").append("\"offset_z_px\": ").append(indices.get(indices.firstKey()).getZ() * cal.pixelWidth / cal.pixelDepth).append(',').append('\n').append("\"missing_layers\": [");
for (final Integer i : missingIndices) sb.append(i - smallestIndex).append(',');
// remove last comma
sb.setLength(sb.length() - 1);
sb.append("]}");
if (!Utils.saveToFile(new File(dir + "metadata.json"), sb.toString())) {
Utils.logAll("WARNING: could not save " + dir + "metadata.json\nThe contents was:\n" + sb.toString());
}
}
for (final Map.Entry<Integer, Layer> entry : indices.entrySet()) {
if (this.quit) {
cleanUp();
return;
}
final int index = entry.getKey() - smallestIndex;
final Layer layer = entry.getValue();
// 1 - create a directory 'z' named as the layer's index
String tile_dir = dir + index;
File fdir = new File(tile_dir);
final int tag = 1;
// Ensure there is a usable directory:
while (fdir.exists() && !fdir.isDirectory()) {
fdir = new File(tile_dir + "_" + tag);
}
if (!fdir.exists()) {
fdir.mkdir();
Utils.log("Created directory " + fdir);
}
// if the directory exists already just reuse it, overwritting its files if so.
final String tmp = fdir.getAbsolutePath().replace('\\', '/');
if (!tile_dir.equals(tmp))
Utils.log("\tWARNING: directory will not be in the standard location.");
// debug:
Utils.log2("tile_dir: " + tile_dir + "\ntmp: " + tmp);
tile_dir = tmp;
if (!tile_dir.endsWith("/"))
tile_dir += "/";
// 2 - create layer thumbnail, max 192x192
ImagePlus thumb = getFlatImage(layer, srcRect, thumb_scale, c_alphas, type, clazz, true);
saver.save(thumb, tile_dir + "small");
// ImageSaver.saveAsJpeg(thumb.getProcessor(), tile_dir + "small.jpg", jpeg_quality, ImagePlus.COLOR_RGB != type);
flush(thumb);
thumb = null;
// 3 - fill directory with tiles
if (edge_length < tileSide) {
// edge_length is the largest length of the tileSide x tileSide tile map that covers an area equal or larger than the desired srcRect (because all tiles have to be tileSide x tileSide in size)
// create single tile per layer
makeTile(layer, srcRect, max_scale, c_alphas, type, clazz, tile_dir + "0_0_0", saver);
} else {
// create pyramid of tiles
if (from_original_images) {
Utils.log("Exporting from web using original images");
// Create a giant 8-bit image of the whole layer from original images
double scale = 1;
Utils.log("Export srcRect: " + srcRect);
// WARNING: the snapshot will most likely be smaller than the virtual square image being chopped into tiles
ImageProcessor snapshot = null;
if (ImagePlus.COLOR_RGB == type) {
Utils.log("WARNING: ignoring alpha masks for 'use original images' and 'RGB color' options");
snapshot = Patch.makeFlatImage(type, layer, srcRect, scale, (ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), Color.black, true);
} else if (ImagePlus.GRAY8 == type) {
// Respect alpha masks and display range:
Utils.log("WARNING: ignoring scale for 'use original images' and '8-bit' options");
snapshot = ExportUnsignedShort.makeFlatImage((ArrayList<Patch>) (List) layer.getDisplayables(Patch.class, true), srcRect, 0).convertToByte(true);
} else {
Utils.log("ERROR: don't know how to generate mipmaps for type '" + type + "'");
cleanUp();
return;
}
int scale_pow = 0;
int n_et = n_edge_tiles;
final ExecutorService exe = Utils.newFixedThreadPool("export-for-web");
final ArrayList<Future<?>> fus = new ArrayList<Future<?>>();
try {
while (n_et >= best[1]) {
final int snapWidth = snapshot.getWidth();
final int snapHeight = snapshot.getHeight();
final ImageProcessor source = snapshot;
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final String path = new StringBuilder(tile_dir).append(row).append('_').append(col).append('_').append(scale_pow).toString();
final int tileXStart = col * tileSide;
final int tileYStart = row * tileSide;
final int pixelOffset = tileYStart * snapWidth + tileXStart;
fus.add(exe.submit(new Callable<Boolean>() {
@Override
public Boolean call() {
if (ImagePlus.GRAY8 == type) {
final byte[] pixels = (byte[]) source.getPixels();
final byte[] p = new byte[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ByteProcessor(tileSide, tileSide, p, GRAY_LUT)), path);
} else {
final int[] pixels = (int[]) source.getPixels();
final int[] p = new int[tileSide * tileSide];
for (int y = 0, sourceIndex = pixelOffset; y < tileSide && tileYStart + y < snapHeight; sourceIndex = pixelOffset + y * snapWidth, y++) {
final int offsetL = y * tileSide;
for (int x = 0; x < tileSide && tileXStart + x < snapWidth; sourceIndex++, x++) {
p[offsetL + x] = pixels[sourceIndex];
}
}
return saver.save(new ImagePlus(path, new ColorProcessor(tileSide, tileSide, p)), path);
}
}
}));
}
}
//
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
//
Utils.wait(fus);
fus.clear();
// Scale snapshot in half with area averaging
final ImageProcessor nextSnapshot;
if (ImagePlus.GRAY8 == type) {
nextSnapshot = new ByteProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final byte[] p1 = (byte[]) snapshot.getPixels();
final byte[] p2 = (byte[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
p2[i++] = (byte) (((p1[offset1a + x1] & 0xff) + (p1[offset1a + x1 + 1] & 0xff) + (p1[offset1b + x1] & 0xff) + (p1[offset1b + x1 + 1] & 0xff)) / 4);
}
}
} else {
nextSnapshot = new ColorProcessor((int) (srcRect.width * scale), (int) (srcRect.height * scale));
final int[] p1 = (int[]) snapshot.getPixels();
final int[] p2 = (int[]) nextSnapshot.getPixels();
final int width1 = snapshot.getWidth();
final int width2 = nextSnapshot.getWidth();
final int height2 = nextSnapshot.getHeight();
int i = 0;
for (int y1 = 0, y2 = 0; y2 < height2; y1 += 2, y2++) {
final int offset1a = y1 * width1;
final int offset1b = (y1 + 1) * width1;
for (int x1 = 0, x2 = 0; x2 < width2; x1 += 2, x2++) {
final int ka = p1[offset1a + x1], kb = p1[offset1a + x1 + 1], kc = p1[offset1b + x1], kd = p1[offset1b + x1 + 1];
// Average each channel independently
p2[i++] = (((// red
((ka >> 16) & 0xff) + ((kb >> 16) & 0xff) + ((kc >> 16) & 0xff) + ((kd >> 16) & 0xff)) / 4) << 16) + (((// green
((ka >> 8) & 0xff) + ((kb >> 8) & 0xff) + ((kc >> 8) & 0xff) + ((kd >> 8) & 0xff)) / 4) << 8) + (// blue
(ka & 0xff) + (kb & 0xff) + (kc & 0xff) + (kd & 0xff)) / 4;
}
}
}
// Assign for next iteration
snapshot = nextSnapshot;
// Scale snapshot with a TransformMesh
/*
AffineModel2D aff = new AffineModel2D();
aff.set(0.5f, 0, 0, 0.5f, 0, 0);
ImageProcessor scaledSnapshot = new ByteProcessor((int)(snapshot.getWidth() * scale), (int)(snapshot.getHeight() * scale));
final CoordinateTransformMesh mesh = new CoordinateTransformMesh( aff, 32, snapshot.getWidth(), snapshot.getHeight() );
final mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh> mapping = new mpicbg.ij.TransformMeshMapping<CoordinateTransformMesh>( mesh );
mapping.mapInterpolated(snapshot, scaledSnapshot, Runtime.getRuntime().availableProcessors());
// Assign for next iteration
snapshot = scaledSnapshot;
snapshotPixels = (byte[]) scaledSnapshot.getPixels();
*/
}
} catch (final Throwable t) {
IJError.print(t);
} finally {
exe.shutdown();
}
} else {
// max_scale; // WARNING if scale is different than 1, it will FAIL to set the next scale properly.
double scale = 1;
int scale_pow = 0;
// cached for local modifications in the loop, works as loop controler
int n_et = n_edge_tiles;
while (n_et >= best[1]) {
// best[1] is the minimal root found, i.e. 1,2,3,4,5 from which then powers of two were taken to make up for the edge_length
// 0 < scale <= 1, so no precision lost
final int tile_side = (int) (256 / scale);
for (int row = 0; row < n_et; row++) {
for (int col = 0; col < n_et; col++) {
final int i_tile = row * n_et + col;
Utils.showProgress(i_tile / (double) (n_et * n_et));
if (0 == i_tile % 100) {
// RGB int[] images
releaseToFit(tile_side * tile_side * 4 * 2);
}
if (this.quit) {
cleanUp();
return;
}
final Rectangle tile_src = new // TODO row and col are inverted
Rectangle(// TODO row and col are inverted
srcRect.x + tile_side * row, srcRect.y + tile_side * col, tile_side, // in absolute coords, magnification later.
tile_side);
// crop bounds
if (tile_src.x + tile_src.width > srcRect.x + srcRect.width)
tile_src.width = srcRect.x + srcRect.width - tile_src.x;
if (tile_src.y + tile_src.height > srcRect.y + srcRect.height)
tile_src.height = srcRect.y + srcRect.height - tile_src.y;
// negative tile sizes will be made into black tiles
// (negative dimensions occur for tiles beyond the edges of srcRect, since the grid of tiles has to be of equal number of rows and cols)
// should be row_col_scale, but results in transposed tiles in googlebrains, so I reversed the order.
makeTile(layer, tile_src, scale, c_alphas, type, clazz, new StringBuilder(tile_dir).append(col).append('_').append(row).append('_').append(scale_pow).toString(), saver);
}
}
scale_pow++;
// works as magnification
scale = 1 / Math.pow(2, scale_pow);
n_et /= 2;
}
}
}
}
} catch (final Exception e) {
IJError.print(e);
} finally {
Utils.showProgress(1);
}
cleanUp();
finishedWorking();
}
};
// watcher thread
return Bureaucrat.createAndStart(worker, layers[0].getProject());
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class Loader method importSequenceAsGrid.
/**
* Open one of the images to find out the dimensions, and get a good guess at what is the desirable scale for doing phase- and cross-correlations with about 512x512 images.
*/
/*
private int getCCScaleGuess(final File images_dir, final String[] all_images) {
try {
if (null != all_images && all_images.length > 0) {
Utils.showStatus("Opening one image ... ", false);
String sdir = images_dir.getAbsolutePath().replace('\\', '/');
if (!sdir.endsWith("/")) sdir += "/";
IJ.redirectErrorMessages();
ImagePlus imp = openImagePlus(sdir + all_images[0]);
if (null != imp) {
int w = imp.getWidth();
int h = imp.getHeight();
flush(imp);
imp = null;
int cc_scale = (int)((512.0 / (w > h ? w : h)) * 100);
if (cc_scale > 100) return 100;
return cc_scale;
}
}
} catch (Exception e) {
Utils.log2("Could not get an estimate for the optimal scale.");
}
return 25;
}
*/
/**
* Import a sequence of images as a grid, and put them in the layer. If the directory (@param dir) is null, it'll be asked for. The image_file_names can be null, and in any case it's only the names, not the paths.
*/
public Bureaucrat importSequenceAsGrid(final Layer first_layer, String dir, final String[] image_file_names) {
String[] all_images = null;
// first file
String file = null;
File images_dir = null;
if (null != dir && null != image_file_names) {
all_images = image_file_names;
images_dir = new File(dir);
} else if (null == dir) {
final String[] dn = Utils.selectFile("Select first image");
if (null == dn)
return null;
dir = dn[0];
file = dn[1];
images_dir = new File(dir);
} else {
images_dir = new File(dir);
if (!(images_dir.exists() && images_dir.isDirectory())) {
Utils.showMessage("Something went wrong:\n\tCan't find directory " + dir);
return null;
}
}
if (null == image_file_names)
all_images = images_dir.list(new ini.trakem2.io.ImageFileFilter("", null));
if (null == file && all_images.length > 0) {
file = all_images[0];
}
final int n_max = all_images.length;
// reasonable estimate
final int side = (int) Math.floor(Math.sqrt(n_max));
final GenericDialog gd = new GenericDialog("Conventions");
gd.addStringField("file_name_matches: ", "");
gd.addNumericField("first_image: ", 1, 0);
gd.addNumericField("last_image: ", n_max, 0);
gd.addCheckbox("Reverse list order", false);
gd.addNumericField("number_of_rows: ", side, 0);
gd.addNumericField("number_of_columns: ", side, 0);
gd.addNumericField("number_of_slices: ", 1, 0);
gd.addMessage("The top left coordinate for the imported grid:");
gd.addNumericField("base_x: ", 0, 3);
gd.addNumericField("base_y: ", 0, 3);
gd.addMessage("Amount of image overlap, in pixels");
// as asked by Joachim Walter
gd.addNumericField("bottom-top overlap: ", 0, 2);
gd.addNumericField("left-right overlap: ", 0, 2);
gd.addCheckbox("link images", false);
gd.addCheckbox("montage with phase correlation", true);
gd.addCheckbox("homogenize_contrast", false);
gd.showDialog();
if (gd.wasCanceled())
return null;
final String regex = gd.getNextString();
// avoid destroying backslashes
Utils.log2(new StringBuilder("using regex: ").append(regex).toString());
int first = (int) gd.getNextNumber();
if (first < 1)
first = 1;
int last = (int) gd.getNextNumber();
if (last < 1)
last = 1;
if (last < first) {
Utils.showMessage("Last is smaller that first!");
return null;
}
final boolean reverse_order = gd.getNextBoolean();
final int n_rows = (int) gd.getNextNumber();
final int n_cols = (int) gd.getNextNumber();
final int n_slices = (int) gd.getNextNumber();
final double bx = gd.getNextNumber();
final double by = gd.getNextNumber();
final double bt_overlap = gd.getNextNumber();
final double lr_overlap = gd.getNextNumber();
final boolean link_images = gd.getNextBoolean();
final boolean stitch_tiles = gd.getNextBoolean();
final boolean homogenize_contrast = gd.getNextBoolean();
String[] file_names = null;
if (null == image_file_names) {
file_names = images_dir.list(new ini.trakem2.io.ImageFileFilter(regex, null));
// assumes 001, 002, 003 ... that style, since it does binary sorting of strings
Arrays.sort(file_names);
if (reverse_order) {
// flip in place
for (int i = file_names.length / 2; i > -1; i--) {
final String tmp = file_names[i];
final int j = file_names.length - 1 - i;
file_names[i] = file_names[j];
file_names[j] = tmp;
}
}
} else {
file_names = all_images;
}
if (0 == file_names.length) {
Utils.showMessage("No images found.");
return null;
}
// check if the selected image is in the list. Otherwise, shift selected image to the first of the included ones.
boolean found_first = false;
for (int i = 0; i < file_names.length; i++) {
if (file.equals(file_names[i])) {
found_first = true;
break;
}
}
if (!found_first) {
file = file_names[0];
Utils.log("Using " + file + " as the reference image for size.");
}
// crop list
if (last > file_names.length)
last = file_names.length - 1;
if (first < 1)
first = 1;
if (1 != first || last != file_names.length) {
Utils.log("Cropping list.");
final String[] file_names2 = new String[last - first + 1];
System.arraycopy(file_names, first - 1, file_names2, 0, file_names2.length);
file_names = file_names2;
}
// should be multiple of rows and cols and slices
if (file_names.length != n_rows * n_cols * n_slices) {
Utils.log("ERROR: rows * cols * slices does not match with the number of selected images.");
Utils.log("n_images:" + file_names.length + " rows,cols,slices : " + n_rows + "," + n_cols + "," + n_slices + " total=" + n_rows * n_cols * n_slices);
return null;
}
// I luv java
final String[] file_names_ = file_names;
final String dir_ = dir;
// the first file
final String file_ = file;
final double bt_overlap_ = bt_overlap;
final double lr_overlap_ = lr_overlap;
return Bureaucrat.createAndStart(new Worker.Task("Importing", true) {
@Override
public void exec() {
StitchingTEM.PhaseCorrelationParam pc_param = null;
// Slice up list:
for (int sl = 0; sl < n_slices; sl++) {
if (Thread.currentThread().isInterrupted() || hasQuitted())
return;
Utils.log("Importing " + (sl + 1) + "/" + n_slices);
final int start = sl * n_rows * n_cols;
final ArrayList<String[]> cols = new ArrayList<String[]>();
for (int i = 0; i < n_cols; i++) {
final String[] col = new String[n_rows];
for (int j = 0; j < n_rows; j++) {
col[j] = file_names_[start + j * n_cols + i];
}
cols.add(col);
}
final Layer layer = 0 == sl ? first_layer : first_layer.getParent().getLayer(first_layer.getZ() + first_layer.getThickness() * sl, first_layer.getThickness(), true);
if (stitch_tiles && null == pc_param) {
pc_param = new StitchingTEM.PhaseCorrelationParam();
pc_param.setup(layer);
}
insertGrid(layer, dir_, file_, n_rows * n_cols, cols, bx, by, bt_overlap_, lr_overlap_, link_images, stitch_tiles, homogenize_contrast, pc_param, this);
}
}
}, first_layer.getProject());
}
use of ini.trakem2.display.Coordinate in project TrakEM2 by trakem2.
the class TMLHandler method endElement.
public void endElement(String namespace_URI, String local_name, String qualified_name) {
if (null == loader)
return;
if (skip) {
// reset
skip = false;
return;
}
String orig_qualified_name = qualified_name;
// Utils.log2("endElement: " + qualified_name);
// iterate over all open things and find the one that matches the qualified_name, and set it closed (pop it out of the list):
qualified_name = qualified_name.toLowerCase().trim();
if (0 == qualified_name.indexOf("t2_")) {
qualified_name = qualified_name.substring(3);
}
for (int i = al_open.size() - 1; i > -1; i--) {
Thing thing = al_open.get(i);
if (thing.getType().toLowerCase().equals(qualified_name)) {
al_open.remove(i);
break;
}
}
if (null != last_annotation && null != last_displayable) {
last_displayable.setAnnotation(last_annotation.toString().trim().replaceAll("<", "<"));
last_annotation = null;
}
// terminate non-single clause objects
if (orig_qualified_name.equals("t2_node")) {
// Remove one node from the stack
nodes.removeLast();
taggables.removeLast();
} else if (orig_qualified_name.equals("t2_connector")) {
if (null != last_connector) {
tree_root_nodes.put(last_connector, last_root_node);
last_root_node = null;
last_connector = null;
last_tree = null;
nodes.clear();
}
last_displayable = null;
} else if (orig_qualified_name.equals("t2_area_list")) {
last_area_list = null;
last_displayable = null;
} else if (orig_qualified_name.equals("t2_area")) {
if (null != reca) {
if (null != last_area_list) {
// it's local
last_area_list.addArea(last_area_list_layer_id, reca.getArea());
} else {
((AreaTree.AreaNode) nodes.getLast()).setData(reca.getArea());
}
reca = null;
}
} else if (orig_qualified_name.equals("ict_transform_list")) {
ct_list_stack.remove(ct_list_stack.size() - 1);
} else if (orig_qualified_name.equals("t2_patch")) {
if (last_patch_filters.size() > 0) {
last_patch.setFilters(last_patch_filters.toArray(new IFilter[last_patch_filters.size()]));
}
if (null != last_ct) {
last_patch.setCoordinateTransformSilently(last_ct);
last_ct = null;
} else if (!last_patch.checkCoordinateTransformFile()) {
Utils.log("ERROR: could not find a file for the coordinate transform #" + last_patch.getCoordinateTransformId() + " of Patch #" + last_patch.getId());
}
if (!last_patch.checkAlphaMaskFile()) {
Utils.log("ERROR: could not find a file for the alpha mask #" + last_patch.getAlphaMaskId() + " of Patch #" + last_patch.getId());
}
last_patch = null;
last_patch_filters.clear();
last_displayable = null;
} else if (orig_qualified_name.equals("t2_ball")) {
last_ball = null;
last_displayable = null;
} else if (orig_qualified_name.equals("t2_dissector")) {
last_dissector = null;
last_displayable = null;
} else if (orig_qualified_name.equals("t2_treeline")) {
if (null != last_treeline) {
// old format:
if (null == last_root_node && null != last_treeline_data && last_treeline_data.length() > 0) {
last_root_node = parseBranch(Utils.trim(last_treeline_data));
last_treeline_data = null;
}
// new
tree_root_nodes.put(last_treeline, last_root_node);
last_root_node = null;
// always:
last_treeline = null;
last_tree = null;
nodes.clear();
}
last_displayable = null;
} else if (orig_qualified_name.equals("t2_areatree")) {
if (null != last_areatree) {
tree_root_nodes.put(last_areatree, last_root_node);
last_root_node = null;
last_areatree = null;
last_tree = null;
// the absence of this line would have made the nodes list grow with all nodes of all areatrees, which is ok but consumes memory
nodes.clear();
}
last_displayable = null;
} else if (orig_qualified_name.equals("t2_stack")) {
if (null != last_ict) {
last_stack.setInvertibleCoordinateTransformSilently(last_ict);
last_ict = null;
}
last_stack = null;
last_displayable = null;
} else if (in(orig_qualified_name, all_displayables)) {
last_displayable = null;
}
}
Aggregations