use of net.imglib2.realtransform.Scale in project vcell by virtualcell.
the class DeconstructGeometryCommand method run.
@Override
public void run() {
// Crop to get a z-stack over time (remove channel dimension)
long maxX = fluorData.max(fluorData.dimensionIndex(Axes.X));
long maxY = fluorData.max(fluorData.dimensionIndex(Axes.Y));
long maxZ = fluorData.max(fluorData.dimensionIndex(Axes.Z));
long maxTime = fluorData.max(fluorData.dimensionIndex(Axes.TIME));
Img fluorImg = fluorData.getImgPlus().getImg();
FinalInterval intervals = Intervals.createMinMax(0, 0, 0, 0, 0, maxX, maxY, maxZ, 0, maxTime);
RandomAccessibleInterval fluorImgCropped = ops.transform().crop(fluorImg, intervals, true);
// Calculate scale factors
double[] scaleFactors = { 1, 1, 1, 1 };
for (int i = 0; i < geomData.numDimensions(); i++) {
scaleFactors[i] = geomData.dimension(i) / (double) fluorImgCropped.dimension(i);
}
// Scale the fluorescence dataset to match the geometry
NLinearInterpolatorFactory interpolatorFactory = new NLinearInterpolatorFactory();
RandomAccessibleInterval fluorScaled = ops.transform().scale(fluorImgCropped, scaleFactors, interpolatorFactory);
// Crop out the first slice of each z-stack in time series
intervals = Intervals.createMinMax(0, 0, 0, 0, fluorScaled.dimension(0) - 1, fluorScaled.dimension(1) - 1, 0, fluorScaled.dimension(3) - 1);
IntervalView fluorXYT = (IntervalView) ops.transform().crop(fluorScaled, intervals, true);
// Create a blank image of the same X-Y-Time dimensions
long[] dimensions = { fluorXYT.dimension(0), fluorXYT.dimension(1), fluorXYT.dimension(2) };
Img<DoubleType> result = ops.create().img(dimensions);
// Calculate constant d in TIRF exponential decay function
theta = theta * 2 * Math.PI / 360;
double n1 = 1.52;
double n2 = 1.38;
double d = lambda * Math.pow((Math.pow(n1, 2) * Math.pow(Math.sin(theta), 2) - Math.pow(n2, 2)), -0.5) / (4 * Math.PI);
// Iterate through each time point, using 3D geometry to generate 2D intensities
Cursor<DoubleType> cursor = fluorXYT.localizingCursor();
RandomAccess fluorRA = fluorScaled.randomAccess();
RandomAccess<RealType<?>> geomRA = geomData.randomAccess();
RandomAccess<DoubleType> resultRA = result.randomAccess();
maxZ = geomData.dimension(2) - 1;
while (cursor.hasNext()) {
cursor.fwd();
int[] positionXYZ = { cursor.getIntPosition(0), cursor.getIntPosition(1), (int) maxZ - 1 };
int[] positionXYZT = { cursor.getIntPosition(0), cursor.getIntPosition(1), (int) maxZ - 1, cursor.getIntPosition(2) };
resultRA.setPosition(cursor);
geomRA.setPosition(positionXYZ);
double sum = 0.0;
while (positionXYZ[2] >= 0 && geomRA.get().getRealDouble() != 0.0) {
fluorRA.setPosition(positionXYZT);
geomRA.setPosition(positionXYZ);
sum += geomRA.get().getRealDouble() * Math.exp(-zSpacing * positionXYZ[2] / d);
positionXYZ[2]--;
}
resultRA.get().set(sum);
}
System.out.println("done");
displayService.createDisplay(result);
}
use of net.imglib2.realtransform.Scale in project TrakEM2 by trakem2.
the class MatchIntensities method run.
/**
* @param layers
* @param radius
* @param scale
* @param numCoefficients
* @param lambda1
* @param lambda2
* @param neighborWeight
* @param roi
*/
public <M extends Model<M> & Affine1D<M>> void run(final List<Layer> layers, final int radius, final double scale, final int numCoefficients, final double lambda1, final double lambda2, final double neighborWeight, final Rectangle roi) throws InterruptedException, ExecutionException {
final int firstLayerIndex = layerset.getLayerIndex(layers.get(0).getId());
final int lastLayerIndex = layerset.getLayerIndex(layers.get(layers.size() - 1).getId());
// final PointMatchFilter filter = new RansacRegressionFilter();
final PointMatchFilter filter = new RansacRegressionReduceFilter();
/* collect patches */
Utils.log("Collecting patches ... ");
final ArrayList<Patch> patches = new ArrayList<Patch>();
for (final Layer layer : layers) patches.addAll((Collection) layer.getDisplayables(Patch.class, roi));
/* delete existing intensity coefficients */
Utils.log("Clearing existing intensity maps ... ");
for (final Patch p : patches) p.clearIntensityMap();
/* generate coefficient tiles for all patches
* TODO consider offering alternative models */
final HashMap<Patch, ArrayList<Tile<? extends M>>> coefficientsTiles = (HashMap) generateCoefficientsTiles(patches, new InterpolatedAffineModel1D<InterpolatedAffineModel1D<AffineModel1D, TranslationModel1D>, IdentityModel>(new InterpolatedAffineModel1D<AffineModel1D, TranslationModel1D>(new AffineModel1D(), new TranslationModel1D(), lambda1), new IdentityModel(), lambda2), numCoefficients * numCoefficients);
/* completed patches */
final HashSet<Patch> completedPatches = new HashSet<Patch>();
/* collect patch pairs */
Utils.log("Collecting patch pairs ... ");
final ArrayList<ValuePair<Patch, Patch>> patchPairs = new ArrayList<ValuePair<Patch, Patch>>();
for (final Patch p1 : patches) {
completedPatches.add(p1);
final Rectangle box1 = p1.getBoundingBox().intersection(roi);
final ArrayList<Patch> p2s = new ArrayList<Patch>();
/* across adjacent layers */
final int layerIndex = layerset.getLayerIndex(p1.getLayer().getId());
for (int i = Math.max(firstLayerIndex, layerIndex - radius); i <= Math.min(lastLayerIndex, layerIndex + radius); ++i) {
final Layer layer = layerset.getLayer(i);
if (layer != null)
p2s.addAll((Collection) layer.getDisplayables(Patch.class, box1));
}
for (final Patch p2 : p2s) {
/*
* if this patch had been processed earlier, all matches are
* already in
*/
if (completedPatches.contains(p2))
continue;
patchPairs.add(new ValuePair<Patch, Patch>(p1, p2));
}
}
final int numThreads = Integer.parseInt(layerset.getProperty("n_mipmap_threads", Integer.toString(Runtime.getRuntime().availableProcessors())));
Utils.log("Matching intensities using " + numThreads + " threads ... ");
final ExecutorService exec = Executors.newFixedThreadPool(numThreads);
final ArrayList<Future<?>> futures = new ArrayList<Future<?>>();
for (final ValuePair<Patch, Patch> patchPair : patchPairs) {
futures.add(exec.submit(new Matcher(roi, patchPair, (HashMap) coefficientsTiles, filter, scale, numCoefficients)));
}
for (final Future<?> future : futures) future.get();
/* connect tiles within patches */
Utils.log("Connecting coefficient tiles in the same patch ... ");
for (final Patch p1 : completedPatches) {
/* get the coefficient tiles */
final ArrayList<Tile<? extends M>> p1CoefficientsTiles = coefficientsTiles.get(p1);
for (int y = 1; y < numCoefficients; ++y) {
final int yr = numCoefficients * y;
final int yr1 = yr - numCoefficients;
for (int x = 0; x < numCoefficients; ++x) {
identityConnect(p1CoefficientsTiles.get(yr1 + x), p1CoefficientsTiles.get(yr + x), neighborWeight);
}
}
for (int y = 0; y < numCoefficients; ++y) {
final int yr = numCoefficients * y;
for (int x = 1; x < numCoefficients; ++x) {
final int yrx = yr + x;
identityConnect(p1CoefficientsTiles.get(yrx), p1CoefficientsTiles.get(yrx - 1), neighborWeight);
}
}
}
/* optimize */
Utils.log("Optimizing ... ");
final TileConfiguration tc = new TileConfiguration();
for (final ArrayList<Tile<? extends M>> coefficients : coefficientsTiles.values()) {
// for ( final Tile< ? > t : coefficients )
// if ( t.getMatches().size() == 0 )
// IJ.log( "bang" );
tc.addTiles(coefficients);
}
try {
tc.optimize(0.01f, iterations, iterations, 0.75f);
} catch (final NotEnoughDataPointsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (final IllDefinedDataPointsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
/* save coefficients */
final double[] ab = new double[2];
final FSLoader loader = (FSLoader) layerset.getProject().getLoader();
final String itsDir = loader.getUNUIdFolder() + "trakem2.its/";
for (final Entry<Patch, ArrayList<Tile<? extends M>>> entry : coefficientsTiles.entrySet()) {
final FloatProcessor as = new FloatProcessor(numCoefficients, numCoefficients);
final FloatProcessor bs = new FloatProcessor(numCoefficients, numCoefficients);
final Patch p = entry.getKey();
final double min = p.getMin();
final double max = p.getMax();
final ArrayList<Tile<? extends M>> tiles = entry.getValue();
for (int i = 0; i < numCoefficients * numCoefficients; ++i) {
final Tile<? extends M> t = tiles.get(i);
final Affine1D<?> affine = t.getModel();
affine.toArray(ab);
/* coefficients mapping into existing [min, max] */
as.setf(i, (float) ab[0]);
bs.setf(i, (float) ((max - min) * ab[1] + min - ab[0] * min));
}
final ImageStack coefficientsStack = new ImageStack(numCoefficients, numCoefficients);
coefficientsStack.addSlice(as);
coefficientsStack.addSlice(bs);
final String itsPath = itsDir + FSLoader.createIdPath(Long.toString(p.getId()), "it", ".tif");
new File(itsPath).getParentFile().mkdirs();
IJ.saveAs(new ImagePlus("", coefficientsStack), "tif", itsPath);
}
/* update mipmaps */
for (final Patch p : patches) p.getProject().getLoader().decacheImagePlus(p.getId());
final ArrayList<Future<Boolean>> mipmapFutures = new ArrayList<Future<Boolean>>();
for (final Patch p : patches) mipmapFutures.add(p.updateMipMaps());
for (final Future<Boolean> f : mipmapFutures) f.get();
Utils.log("Matching intensities done.");
}
use of net.imglib2.realtransform.Scale in project imagej-ops by imagej.
the class DefaultScaleView method calculate.
@Override
public RandomAccessibleInterval<T> calculate(RandomAccessibleInterval<T> input) {
final long[] newDims = Intervals.dimensionsAsLongArray(in());
for (int i = 0; i < Math.min(scaleFactors.length, in().numDimensions()); i++) {
newDims[i] = Math.round(in().dimension(i) * scaleFactors[i]);
}
IntervalView<T> interval = Views.interval(Views.raster(RealViews.affineReal(Views.interpolate(Views.extendMirrorSingle(input), interpolator), new Scale(scaleFactors))), new FinalInterval(newDims));
return interval;
}
use of net.imglib2.realtransform.Scale in project imagej-ops by imagej.
the class TubenessTest method testTubeness.
@Test
public void testTubeness() {
Img<UnsignedByteType> input = openUnsignedByteType(Ops.class, "TubesInput.png");
Img<DoubleType> expected = openDoubleImg("tube.tif");
final double scale = 5;
final double sigma = scale / Math.sqrt(2);
Img<DoubleType> actual = ops.create().img(input, new DoubleType());
ops.filter().tubeness(actual, input, sigma);
assertIterationsEqual(expected, actual);
}
use of net.imglib2.realtransform.Scale in project TrakEM2 by trakem2.
the class LinearIntensityMap method run.
@SuppressWarnings({ "rawtypes", "unchecked" })
public <S extends NumericType<S>> void run(final RandomAccessibleInterval<S> image) {
assert image.numDimensions() == dimensions.numDimensions() : "Number of dimensions do not match.";
final double[] s = new double[dimensions.numDimensions()];
for (int d = 0; d < s.length; ++d) s[d] = image.dimension(d) / dimensions.dimension(d);
final Scale scale = new Scale(s);
// System.out.println( "translation-n " + translation.numDimensions() );
final RandomAccessibleInterval<RealComposite<T>> stretchedCoefficients = Views.offsetInterval(Views.raster(RealViews.transform(RealViews.transform(coefficients, translation), scale)), image);
/* decide on type which mapping to use */
final S t = image.randomAccess().get();
if (ARGBType.class.isInstance(t))
mapARGB(Views.flatIterable((RandomAccessibleInterval<ARGBType>) image), Views.flatIterable(stretchedCoefficients));
else if (RealComposite.class.isInstance(t))
mapComposite(Views.flatIterable((RandomAccessibleInterval) image), Views.flatIterable(stretchedCoefficients));
else if (RealType.class.isInstance(t)) {
final RealType<?> r = (RealType) t;
if (r.getMinValue() > -Double.MAX_VALUE || r.getMaxValue() < Double.MAX_VALUE)
// TODO Bug in javac does not enable cast from RandomAccessibleInterval< S > to RandomAccessibleInterval< RealType >, remove when fixed
mapCrop(Views.flatIterable((RandomAccessibleInterval<RealType>) (Object) image), Views.flatIterable(stretchedCoefficients));
else
// TODO Bug in javac does not enable cast from RandomAccessibleInterval< S > to RandomAccessibleInterval< RealType >, remove when fixed
map(Views.flatIterable((RandomAccessibleInterval<RealType>) (Object) image), Views.flatIterable(stretchedCoefficients));
}
}
Aggregations