use of net.imglib2.Interval in project knime-core by knime.
the class CAIMDiscretizationNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
// measure the time
long startTime = System.currentTimeMillis();
// empty model
if (m_includedColumnNames.getIncludeList() == null || m_includedColumnNames.getIncludeList().size() == 0) {
return new PortObject[] { inData[0], new DiscretizationModel() };
}
LOGGER.debug("Start discretizing.");
// as the algorithm is for binary class problems only
// (positive, negative) the algorithm is performed for each class value
// labeled as positive class and the rest as negative
exec.setProgress(0.0, "Preparing...");
// check input data
BufferedDataTable data = (BufferedDataTable) inData[0];
// get class column index
m_classifyColumnIndex = data.getDataTableSpec().findColumnIndex(m_classColumnName.getStringValue());
assert m_classifyColumnIndex > -1;
// create the class - index mapping
createClassFromToIndexMaps(data.getDataTableSpec());
// create the array with the result discretization schemes for
// each included column
DiscretizationScheme[] resultSchemes = new DiscretizationScheme[m_includedColumnNames.getIncludeList().size()];
// for all included columns do the discretization
int currentColumn = 0;
for (String includedColumnName : m_includedColumnNames.getIncludeList()) {
LOGGER.debug("Process column: " + includedColumnName);
exec.setProgress("Discretizing column '" + includedColumnName + "'");
ExecutionContext subExecPerColumn = exec.createSubExecutionContext(1.0D / m_includedColumnNames.getIncludeList().size());
subExecPerColumn.checkCanceled();
// never discretize the column index (should never happen)
if (m_classColumnName.getStringValue().equals(includedColumnName)) {
continue;
}
// determine the column index of the current column
int columnIndex = data.getDataTableSpec().findColumnIndex(includedColumnName);
DataColumnDomain domain = data.getDataTableSpec().getColumnSpec(columnIndex).getDomain();
double minValue = ((DoubleValue) domain.getLowerBound()).getDoubleValue();
double maxValue = ((DoubleValue) domain.getUpperBound()).getDoubleValue();
// find all distinct values of the column and create
// a table with all possible interval boundaries (midpoint value of
// adjacent values)
subExecPerColumn.setProgress("Find possible boundaries.");
BoundaryScheme boundaryScheme = null;
// create subExec for sorting
ExecutionContext subExecSort = subExecPerColumn.createSubExecutionContext(0.1);
// long t1 = System.currentTimeMillis();
if (m_classOptimizedVersion) {
boundaryScheme = createAllIntervalBoundaries(data, columnIndex, subExecSort);
} else {
boundaryScheme = createAllIntervalBoundaries2(data, columnIndex, subExecSort);
}
subExecSort.setProgress(1.0D);
// long t2 = System.currentTimeMillis() - t1;
// LOGGER.error("Create boundaries time: " + (t2 / 1000.0)
// + " optimized: " + m_classOptimizedVersion);
// LOGGER.error("Boundaries: " + boundaryScheme.getHead());
LinkedDouble allIntervalBoundaries = boundaryScheme.getHead();
// create the initial discretization scheme
DiscretizationScheme discretizationScheme = new DiscretizationScheme(new Interval(minValue, maxValue, true, true));
double globalCAIM = 0;
// performe the iterative search for the best intervals
int numInsertedBounds = 0;
double currentCAIM = 0;
// create subExec for inserted bounds
ExecutionContext subExecBounds = subExecPerColumn.createSubExecutionContext(0.9);
while (currentCAIM > globalCAIM || numInsertedBounds < m_classValues.length - 1) {
subExecPerColumn.checkCanceled();
// create subExec for counting
ExecutionContext subExecCount = subExecBounds.createSubExecutionContext(1.0D / m_classValues.length);
// LOGGER.debug("Inserted bounds: " + numInsertedBounds);
// LOGGER.debug("intervall boundaries: " +
// allIntervalBoundaries);
// for all possible interval boundaries
// insert each one, calculate the caim value and add
// the one with the biggest caim
LinkedDouble intervalBoundary = allIntervalBoundaries.m_next;
currentCAIM = 0;
LinkedDouble bestBoundary = null;
long currentCountedBoundaries = 0;
while (intervalBoundary != null) {
subExecPerColumn.checkCanceled();
// set progress
currentCountedBoundaries++;
subExecCount.setProgress((double) currentCountedBoundaries / (double) boundaryScheme.getNumBoundaries(), "Count for possible boundary " + currentCountedBoundaries + " of " + boundaryScheme.getNumBoundaries());
// LOGGER.debug("current caim: " + currentCAIM);
DiscretizationScheme tentativeDS = new DiscretizationScheme(discretizationScheme);
tentativeDS.insertBound(intervalBoundary.m_value);
// create the quanta matrix
QuantaMatrix2D quantaMatrix = new QuantaMatrix2D(tentativeDS, m_classValueToIndexMap);
// pass the data for filling the matrix
quantaMatrix.countData(data, columnIndex, m_classifyColumnIndex);
// calculate the caim
double caim = quantaMatrix.calculateCaim();
if (caim > currentCAIM) {
currentCAIM = caim;
bestBoundary = intervalBoundary;
}
intervalBoundary = intervalBoundary.m_next;
}
// if there is no best boundary, break the first while loop
if (bestBoundary == null) {
break;
}
// in this case accept the best discretization scheme
if (currentCAIM > globalCAIM || numInsertedBounds < m_classValues.length) {
int numIntervals = discretizationScheme.getNumIntervals();
discretizationScheme.insertBound(bestBoundary.m_value);
// remove the linked list element from the list
bestBoundary.remove();
globalCAIM = currentCAIM;
if (numIntervals < discretizationScheme.getNumIntervals()) {
numInsertedBounds++;
subExecPerColumn.setProgress("Inserted bound " + numInsertedBounds);
// LOGGER.debug("Inserted boundary: "
// + bestBoundary.m_value);
} else {
throw new IllegalStateException("Only usefull bounds should be inserted: " + bestBoundary.m_value);
}
}
subExecCount.setProgress(1.0D);
}
resultSchemes[currentColumn] = discretizationScheme;
subExecBounds.setProgress(1.0D);
// ensure the full progress is set for this iteration
subExecPerColumn.setProgress(1.0D);
currentColumn++;
}
// set the model
DataTableSpec modelSpec = createModelSpec(m_includedColumnNames, data.getDataTableSpec());
m_discretizationModel = new DiscretizationModel(resultSchemes, modelSpec);
// create an output table that replaces the included columns by
// interval values
BufferedDataTable resultTable = createResultTable(exec, data, m_discretizationModel);
// log the runtime of the execute method
long runtime = System.currentTimeMillis() - startTime;
LOGGER.debug("Binning runtime: " + (runtime / 1000.0) + " sec.");
return new PortObject[] { resultTable, m_discretizationModel };
}
use of net.imglib2.Interval in project vcell by virtualcell.
the class PlotROIStats method crop.
private RandomAccessibleInterval<T> crop(RandomAccessibleInterval<T> data, Overlay overlay) {
int numDimensions = data.numDimensions();
long[] minDimensions = new long[numDimensions];
long[] maxDimensions = new long[numDimensions];
if (numDimensions > 0) {
minDimensions[0] = (long) overlay.realMin(0);
maxDimensions[0] = (long) overlay.realMax(0);
}
if (numDimensions > 1) {
minDimensions[1] = (long) overlay.realMin(0);
maxDimensions[1] = (long) overlay.realMax(0);
}
for (int i = 2; i < numDimensions; i++) {
minDimensions[i] = 0;
maxDimensions[i] = data.dimension(i) - 1;
}
FinalInterval interval = new FinalInterval(minDimensions, maxDimensions);
RandomAccessibleInterval<T> cropped = ops.transform().crop(data, interval);
return cropped;
}
use of net.imglib2.Interval in project vcell by virtualcell.
the class VCellResultService method importCsv.
public Dataset importCsv(File directory) throws FileNotFoundException {
File[] files = directory.listFiles();
// TODO: Better handling
if (files == null)
return null;
ArrayList<ArrayList<Float>> timeSeries = new ArrayList<>(files.length);
Scanner scanner;
int dataSize = 0;
for (File file : files) {
scanner = new Scanner(file);
scanner.useDelimiter("[,\n]");
while (scanner.hasNext() && !scanner.hasNextDouble()) {
scanner.next();
}
if (!scanner.hasNextDouble()) {
scanner.close();
return null;
}
ArrayList<Float> data = new ArrayList<>();
while (scanner.hasNextDouble()) {
data.add(scanner.nextFloat());
}
scanner.close();
timeSeries.add(data);
dataSize = data.size();
}
int[] dimensions = { dataSize, timeSeries.size() };
Img<FloatType> img = new ArrayImgFactory<FloatType>().create(dimensions, new FloatType());
Cursor<FloatType> cursor = img.localizingCursor();
while (cursor.hasNext()) {
cursor.next();
int xPos = cursor.getIntPosition(0);
int tPos = cursor.getIntPosition(1);
Float val = timeSeries.get(tPos).get(xPos);
cursor.get().set(val);
}
Dataset dataset = datasetService.create(img);
// Drop single dimensions
@SuppressWarnings("unchecked") ImgPlus<FloatType> imgPlus = (ImgPlus<FloatType>) dataset.getImgPlus();
FinalInterval interval = Intervals.createMinMax(0, 0, imgPlus.dimension(0) - 1, imgPlus.dimension(1) - 1);
ImgPlus<FloatType> cropped = ops.transform().crop(imgPlus, interval, true);
dataset.setImgPlus(cropped);
System.out.println(dataset.numDimensions());
dataset.setName(directory.getName());
return dataset;
}
use of net.imglib2.Interval in project vcell by virtualcell.
the class ConstructTIRFGeometry method run.
@Override
public void run() {
// Calculate constant d in TIRF exponential decay function
// Angle of incidence in radians
theta = theta * 2 * Math.PI / 360;
// Refractive index of glass
final double n1 = 1.52;
// Refractive index of cytosol
final double n2 = 1.38;
final double d = lambda * Math.pow((Math.pow(n1, 2) * Math.pow(Math.sin(theta), 2) - Math.pow(n2, 2)), -0.5) / (4 * Math.PI);
System.out.println("d: " + d);
final double fluorPerMolecule = 250;
// Get frame of interest to define geometry
long maxX = data.dimension(0) - 1;
long maxY = data.dimension(1) - 1;
Interval interval = Intervals.createMinMax(0, 0, sliceIndex, maxX, maxY, sliceIndex);
RandomAccessibleInterval<T> croppedRAI = ops.transform().crop(data, interval, true);
// Subtract lowest pixel value
IterableInterval<T> dataII = Views.iterable(croppedRAI);
double min = ops.stats().min(dataII).getRealDouble();
Cursor<T> dataCursor = dataII.cursor();
while (dataCursor.hasNext()) {
double val = dataCursor.next().getRealDouble();
dataCursor.get().setReal(val - min);
}
// Perform Gaussian blur
RandomAccessibleInterval<T> blurredRAI = ops.filter().gauss(croppedRAI, 2);
IterableInterval<T> blurredII = Views.iterable(blurredRAI);
// Segment slice by threshold and fill holes
IterableInterval<BitType> thresholded = ops.threshold().huang(blurredII);
Img<BitType> thresholdedImg = ops.convert().bit(thresholded);
RandomAccessibleInterval<BitType> thresholdedRAI = ops.morphology().fillHoles(thresholdedImg);
// Get the largest region
RandomAccessibleInterval<LabelingType<ByteType>> labeling = ops.labeling().cca(thresholdedRAI, ConnectedComponents.StructuringElement.EIGHT_CONNECTED);
LabelRegions<ByteType> labelRegions = new LabelRegions<>(labeling);
Iterator<LabelRegion<ByteType>> iterator = labelRegions.iterator();
LabelRegion<ByteType> maxRegion = iterator.next();
while (iterator.hasNext()) {
LabelRegion<ByteType> currRegion = iterator.next();
if (currRegion.size() > maxRegion.size()) {
maxRegion = currRegion;
}
}
// Generate z index map
double iMax = ops.stats().max(dataII).getRealDouble();
Img<UnsignedShortType> dataImg = ops.convert().uint16(dataII);
Img<UnsignedShortType> zMap = ops.convert().uint16(ops.create().img(dataII));
LabelRegionCursor cursor = maxRegion.localizingCursor();
RandomAccess<UnsignedShortType> zMapRA = zMap.randomAccess();
RandomAccess<UnsignedShortType> dataRA = dataImg.randomAccess();
while (cursor.hasNext()) {
cursor.fwd();
zMapRA.setPosition(cursor);
dataRA.setPosition(cursor);
double val = dataRA.get().getRealDouble();
// Log of 0 is undefined
if (val < 1) {
val = 1;
}
int z = (int) Math.round(-d * Math.log(val / iMax) / zRes);
zMapRA.get().set(z);
}
System.out.println("6");
// Use map to construct 3D geometry
// Add 5 slices of padding on top
int maxZ = (int) ops.stats().max(zMap).getRealDouble() + 5;
long[] resultDimensions = { maxX + 1, maxY + 1, maxZ };
Img<BitType> result = new ArrayImgFactory<BitType>().create(resultDimensions, new BitType());
RandomAccess<BitType> resultRA = result.randomAccess();
System.out.println(maxZ);
cursor.reset();
while (cursor.hasNext()) {
cursor.fwd();
zMapRA.setPosition(cursor);
int zIndex = zMapRA.get().get();
int[] position = { cursor.getIntPosition(0), cursor.getIntPosition(1), zIndex };
while (position[2] < maxZ) {
resultRA.setPosition(position);
resultRA.get().set(true);
position[2]++;
}
}
output = datasetService.create(result);
CalibratedAxis[] axes = new DefaultLinearAxis[] { new DefaultLinearAxis(Axes.X), new DefaultLinearAxis(Axes.Y), new DefaultLinearAxis(Axes.Z) };
output.setAxes(axes);
System.out.println("Done constructing geometry");
}
use of net.imglib2.Interval in project vcell by virtualcell.
the class ProjectService method load.
public Task<Project, String> load(File root) {
final Task<Project, String> task = new Task<Project, String>() {
@Override
protected Project doInBackground() throws Exception {
Project project = new Project(root.getName());
String rootPath = root.getAbsolutePath();
File[] dataFiles = Paths.get(rootPath, "data").toFile().listFiles();
File[] geometryFiles = Paths.get(rootPath, "geometry").toFile().listFiles();
File[] modelDirectories = Paths.get(rootPath, "models").toFile().listFiles();
File[] resultsFiles = Paths.get(rootPath, "results").toFile().listFiles();
int numFiles = dataFiles.length + geometryFiles.length + modelDirectories.length + resultsFiles.length;
int numLoaded = 0;
if (dataFiles != null) {
for (File dataFile : dataFiles) {
try {
setSubtask(dataFile.getName());
Dataset data = datasetIOService.open(dataFile.getAbsolutePath());
project.getData().add(data);
numLoaded++;
setProgress(numLoaded * 100 / numFiles);
} catch (IOException e) {
e.printStackTrace();
}
}
}
if (geometryFiles != null) {
for (File geometryFile : geometryFiles) {
try {
setSubtask(geometryFile.getName());
Dataset geometry = datasetIOService.open(geometryFile.getAbsolutePath());
// Geometry datasets are saved as 8-bit images so we must convert back to 1-bit
if (geometry.firstElement() instanceof UnsignedByteType) {
@SuppressWarnings("unchecked") Img<UnsignedByteType> img = (Img<UnsignedByteType>) geometry.getImgPlus().getImg();
Img<BitType> converted = opService.convert().bit(img);
ImgPlus<BitType> convertedImgPlus = new ImgPlus<>(converted, geometry.getName());
geometry.setImgPlus(convertedImgPlus);
}
project.getGeometry().add(geometry);
numLoaded++;
setProgress(numLoaded * 100 / numFiles);
} catch (IOException e) {
e.printStackTrace();
}
}
}
if (modelDirectories != null) {
for (File modelDirectory : modelDirectories) {
setSubtask(modelDirectory.getName());
SBMLDocument sbmlDocument = null;
BufferedImage image = null;
File[] modelFiles = modelDirectory.listFiles();
System.out.println(modelFiles.length);
// Invalid model directory
if (modelFiles.length > 2)
continue;
for (File modelFile : modelFiles) {
System.out.println(modelFile.getName());
if (FilenameUtils.getExtension(modelFile.getName()).equals("xml")) {
sbmlDocument = new SBMLReader().readSBML(modelFile);
System.out.println("Loaded sbml");
} else if (FilenameUtils.getExtension(modelFile.getName()).equals("png")) {
image = ImageIO.read(modelFile);
System.out.println("Loaded image");
}
}
if (sbmlDocument != null) {
VCellModel vCellModel = new VCellModel(modelDirectory.getName(), null, sbmlDocument);
vCellModel.setImage(image);
project.getModels().add(vCellModel);
System.out.println("Added model");
}
numLoaded++;
setProgress(numLoaded * 100 / numFiles);
}
}
if (resultsFiles != null) {
for (File resultsFile : resultsFiles) {
try {
setSubtask(resultsFile.getName());
Dataset results = datasetIOService.open(resultsFile.getAbsolutePath());
// Loading 1-dimensional tif images adds a dimension
// so must crop out empty dimensions
@SuppressWarnings("unchecked") ImgPlus<T> imgPlus = (ImgPlus<T>) results.getImgPlus();
int numDimensions = imgPlus.numDimensions();
long[] dimensions = new long[2 * imgPlus.numDimensions()];
for (int i = 0; i < numDimensions; i++) {
dimensions[i] = 0;
dimensions[i + numDimensions] = imgPlus.dimension(i) - 1;
}
FinalInterval interval = Intervals.createMinMax(dimensions);
ImgPlus<T> cropped = opService.transform().crop(imgPlus, interval, true);
results.setImgPlus(cropped);
project.getResults().add(results);
numLoaded++;
setProgress(numLoaded * 100 / numFiles);
} catch (IOException e) {
e.printStackTrace();
}
}
}
currentProjectRoot = root;
return project;
}
};
return task;
}
Aggregations