use of org.openstack4j.model.identity.v3.Project in project TrakEM2 by trakem2.
the class Compare method variabilityAnalysis.
/**
* @param reference_project If null, then the first one found in the Project.getProjects() lists is used.
* @param regex A String (can be null) to filter objects by, to limit what gets processed.
* If regex is not null, then only ProjectThing nodes with the matching regex are analyzed (shallow: none of their children are questioned, but pipes will be built from them all).
* @param generate_plots Whether to generate the variability plots at all.
* @param show_plots If generate_plots, whether to show the plots in a stack image window or to save them.
* @param show_3D Whether to show any 3D data.
* @param show_condensed_3D If show_3D, whether to show the condensed vector strings, i.e. the "average" pipes.
* @param show_sources_3D If show_3D, whether to show the source pipes from which the condensed vector string was generated.
* @param sources_color_table Which colors to give to the pipes of which Project.
* @param show_envelope_3D If show_3D, whether to generate the variability envelope.
* @param envelope_alpha If show_envelope_3D, the envelope takes an alpha value between 0 (total transparency) and 1 (total opacity)
* @param delta_envelope The delta to resample the envelope to. When smaller than or equal to 1, no envelope resampling occurs.
* @param show_axes_3D If show_3D, whether to display the reference axes as well.
* @param heat_map If show_3D, whether to color the variability with a Fire LUT.
* If not show_condensed_3D, then the variability is shown in color-coded 3D spheres placed at the entry point to the neuropile.
* @param map_condensed If not null, all VectorString3D are put into this map.
* @param projects The projects to use.
*/
public static Bureaucrat variabilityAnalysis(final Project reference_project, final String regex, final String[] ignore, final boolean show_cata_dialog, final boolean generate_plots, final boolean show_plots, final String plot_dir_, final boolean show_3D, final boolean show_condensed_3D, final boolean show_sources_3D, final Map<Project, Color> sources_color_table, final boolean show_envelope_3D, final float envelope_alpha, final double delta_envelope, final int envelope_type, final boolean show_axes_3D, final boolean heat_map, final Map<String, VectorString3D> map_condensed, final Project[] projects) {
// gather all open projects
final Project[] p = null == projects ? Project.getProjects().toArray(new Project[0]) : projects;
// make the reference_project be the first in the array
if (null != reference_project && reference_project != p[0]) {
for (int i = 0; i < p.length; i++) {
if (reference_project == p[i]) {
p[i] = p[0];
p[0] = reference_project;
break;
}
}
}
final Worker worker = new Worker("Comparing all to all") {
@Override
public void run() {
startedWorking();
try {
Utils.log2("Asking for CATAParameters...");
final CATAParameters cp = new CATAParameters();
cp.regex = regex;
cp.delta_envelope = delta_envelope;
cp.envelope_type = envelope_type;
if (show_cata_dialog && !cp.setup(false, regex, true, true)) {
finishedWorking();
return;
}
// so source points are stored in VectorString3D for each resampled and interpolated point
cp.with_source = true;
// Store a series of results, depending on options
final HashMap<String, Display3D> results = new HashMap<String, Display3D>();
String plot_dir = plot_dir_;
if (generate_plots && !show_plots) {
// Save plots
if (null == plot_dir) {
final DirectoryChooser dc = new DirectoryChooser("Choose plots directory");
plot_dir = dc.getDirectory();
if (null == plot_dir) {
finishedWorking();
return;
}
}
if (IJ.isWindows())
plot_dir = plot_dir.replace('\\', '/');
if (!plot_dir.endsWith("/"))
plot_dir += "/";
}
Utils.log2("Gathering chains...");
// Gather chains that do not match the ignore regexes
// will transform them as well to the reference found in the first project in the p array
Object[] ob = gatherChains(p, cp, ignore);
ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
// to keep track of each project's chains
final ArrayList[] p_chains = (ArrayList[]) ob[1];
ob = null;
if (null == chains) {
finishedWorking();
return;
}
Utils.log2("Collecting bundles...");
final HashMap<Project, HashMap<String, VectorString3D>> axes = new HashMap<Project, HashMap<String, VectorString3D>>();
// Sort out into groups by unique names of lineage bundles
final HashMap<String, ArrayList<Chain>> bundles = new HashMap<String, ArrayList<Chain>>();
for (final Chain chain : chains) {
String title = chain.getCellTitle();
final String t = title.toLowerCase();
// unnamed
if (0 == t.indexOf('[') || 0 == t.indexOf('#'))
continue;
Utils.log("Accepting " + title);
title = title.substring(0, title.indexOf(' '));
// lineage bundle instance chains
ArrayList<Chain> bc = bundles.get(title);
if (null == bc) {
bc = new ArrayList<Chain>();
bundles.put(title, bc);
}
bc.add(chain);
}
Utils.log2("Found " + bundles.size() + " bundles.");
chains = null;
if (null != cp.regex && show_axes_3D && axes.size() < 3) {
// Must find the Mushroom Body lobes separately
final String cp_regex = cp.regex;
cp.regex = "mb";
final Object[] o = gatherChains(p, cp, ignore);
final ArrayList<Chain> lobes = (ArrayList<Chain>) o[0];
Utils.logAll("Found " + lobes.size() + " chains for lobes");
for (final Chain chain : lobes) {
final String t = chain.getCellTitle().toLowerCase();
if (-1 != t.indexOf("peduncle") || -1 != t.indexOf("medial lobe") || -1 != t.indexOf("dorsal lobe")) {
Utils.logAll("adding " + t);
final Project pr = chain.pipes.get(0).getProject();
HashMap<String, VectorString3D> m = axes.get(pr);
if (null == m) {
m = new HashMap<String, VectorString3D>();
axes.put(pr, m);
}
m.put(t, chain.vs);
continue;
}
}
cp.regex = cp_regex;
} else {
Utils.logAll("Not: cp.regex = " + cp.regex + " show_axes_3D = " + show_axes_3D + " axes.size() = " + axes.size());
}
final HashMap<String, VectorString3D> condensed = new HashMap<String, VectorString3D>();
Utils.log2("Condensing each bundle...");
// Condense each into a single VectorString3D
for (final Map.Entry<String, ArrayList<Chain>> entry : bundles.entrySet()) {
final ArrayList<Chain> bc = entry.getValue();
if (bc.size() < 2) {
Utils.log2("Skipping single: " + entry.getKey());
continue;
}
final VectorString3D[] vs = new VectorString3D[bc.size()];
for (int i = 0; i < vs.length; i++) vs[i] = bc.get(i).vs;
final VectorString3D c = condense(cp, vs, this);
c.setCalibration(p[0].getRootLayerSet().getCalibrationCopy());
condensed.put(entry.getKey(), c);
if (this.hasQuitted())
return;
}
// Store:
if (null != map_condensed) {
map_condensed.putAll(condensed);
}
if (generate_plots) {
Utils.log2("Plotting stdDev for each condensed bundle...");
// Y axis: the stdDev at each point, computed from the group of points that contribute to each
for (final Map.Entry<String, VectorString3D> e : condensed.entrySet()) {
final String name = e.getKey();
final VectorString3D c = e.getValue();
final Plot plot = makePlot(cp, name, c);
// FAILS//plot.addLabel(10, cp.plot_height-5, name); // must be added after setting size
if (show_plots)
plot.show();
else if (null != plot_dir)
new FileSaver(plot.getImagePlus()).saveAsPng(plot_dir + name.replace('/', '-') + ".png");
}
}
if (show_3D) {
final HashMap<String, Color> heat_table = new HashMap<String, Color>();
if (heat_map || show_envelope_3D) {
// Create a Fire LUT
final ImagePlus lutimp = new ImagePlus("lut", new ByteProcessor(4, 4));
IJ.run(lutimp, "Fire", "");
final IndexColorModel icm = (IndexColorModel) lutimp.getProcessor().getColorModel();
final byte[] reds = new byte[256];
final byte[] greens = new byte[256];
final byte[] blues = new byte[256];
icm.getReds(reds);
icm.getGreens(greens);
icm.getBlues(blues);
final List<String> names = new ArrayList<String>(bundles.keySet());
Collections.sort(names);
// find max stdDev
double max = 0;
final HashMap<String, Double> heats = new HashMap<String, Double>();
for (final String name : names) {
final VectorString3D vs_merged = condensed.get(name);
if (null == vs_merged) {
Utils.logAll("WARNING could not find a condensed pipe for " + name);
continue;
}
final double[] stdDev = vs_merged.getStdDevAtEachPoint();
// double avg = 0;
// for (int i=0; i<stdDev.length; i++) avg += stdDev[i];
// avg = avg/stdDev.length;
Arrays.sort(stdDev);
// median is more representative than average
final double median = stdDev[stdDev.length / 2];
if (max < median)
max = median;
heats.put(name, median);
}
for (final Map.Entry<String, Double> e : heats.entrySet()) {
final String name = e.getKey();
final double median = e.getValue();
// scale between 0 and max to get a Fire LUT color:
int index = (int) ((median / max) * 255);
if (index > 255)
index = 255;
final Color color = new Color(0xff & reds[index], 0xff & greens[index], 0xff & blues[index]);
Utils.log2(new StringBuilder(name).append('\t').append(median).append('\t').append(reds[index]).append('\t').append(greens[index]).append('\t').append(blues[index]).toString());
heat_table.put(name, color);
}
}
final LayerSet common_ls = new LayerSet(p[0], -1, "Common", 10, 10, 0, 0, 0, 512, 512, false, 2, new AffineTransform());
final Display3D d3d = Display3D.get(common_ls);
float env_alpha = envelope_alpha;
if (env_alpha < 0) {
Utils.log2("WARNING envelope_alpha is invalid: " + envelope_alpha + "\n Using 0.4f instead");
env_alpha = 0.4f;
} else if (env_alpha > 1)
env_alpha = 1.0f;
for (final String name : bundles.keySet()) {
final ArrayList<Chain> bc = bundles.get(name);
final VectorString3D vs_merged = condensed.get(name);
if (null == vs_merged) {
Utils.logAll("WARNING: could not find a condensed vs for " + name);
continue;
}
if (show_sources_3D) {
if (null != sources_color_table) {
final HashSet<String> titles = new HashSet<String>();
for (final Chain chain : bc) {
final Color c = sources_color_table.get(chain.getRoot().getProject());
final String title = chain.getCellTitle();
String t = title;
int i = 2;
while (titles.contains(t)) {
t = title + "-" + i;
i += 1;
}
titles.add(t);
Display3D.addMesh(common_ls, chain.vs, t, null != c ? c : Color.gray);
}
} else {
for (final Chain chain : bc) Display3D.addMesh(common_ls, chain.vs, chain.getCellTitle(), Color.gray);
}
}
if (show_condensed_3D) {
Display3D.addMesh(common_ls, vs_merged, name + "-condensed", heat_map ? heat_table.get(name) : Color.red);
}
if (show_envelope_3D) {
double[] widths = makeEnvelope(cp, vs_merged);
if (cp.delta_envelope > 1) {
vs_merged.addDependent(widths);
vs_merged.resample(cp.delta_envelope);
widths = vs_merged.getDependent(0);
}
Display3D.addMesh(common_ls, vs_merged, name + "-envelope", heat_map ? heat_table.get(name) : Color.red, widths, env_alpha);
} else if (heat_map) {
// Show spheres in place of envelopes, at the starting tip (neuropile entry point)
final double x = vs_merged.getPoints(0)[0];
final double y = vs_merged.getPoints(1)[0];
final double z = vs_merged.getPoints(2)[0];
final double r = 10;
final Color color = heat_table.get(name);
if (null == color) {
Utils.logAll("WARNING: heat table does not have a color for " + name);
continue;
}
final Content sphere = d3d.getUniverse().addMesh(ij3d.Mesh_Maker.createSphere(x, y, z, r), new Color3f(heat_table.get(name)), name + "-sphere", 1);
}
}
if (show_axes_3D) {
for (int i = 0; i < p.length; i++) {
final Map<String, VectorString3D> m = axes.get(p[i]);
if (null == m) {
Utils.log2("No axes found for project " + p[i]);
continue;
}
for (final Map.Entry<String, VectorString3D> e : m.entrySet()) {
Display3D.addMesh(common_ls, e.getValue(), e.getKey() + "-" + i, Color.gray);
}
}
}
results.put("d3d", Display3D.get(common_ls));
}
this.result = results;
Utils.log2("Done.");
} catch (final Exception e) {
IJError.print(e);
} finally {
finishedWorking();
}
}
};
return Bureaucrat.createAndStart(worker, p[0]);
}
use of org.openstack4j.model.identity.v3.Project in project TrakEM2 by trakem2.
the class Compare method reliabilityAnalysisSpaceExploration.
// Graph data generation:
// - X axis: resampling distance, from 0.4 to 10 microns, in increments of 0.1 microns.
// - Y axis: weights for deletion and insertion: from 0 to 10, in increments of 0.1
// - Z1 axis: the percentage of properly scored first lineages (currently 75%)
// - Z2 axis: the percentage of the good one being within top 5 (currently 99%)
public static final Bureaucrat reliabilityAnalysisSpaceExploration(final String[] ignore) {
final double MIN_DELTA = 0.4;
final double MAX_DELTA = 20;
final double INC_DELTA = 0.1;
final double MIN_WEIGHT = 0;
final double MAX_WEIGHT = 2;
final double INC_WEIGHT = 0.1;
return Bureaucrat.createAndStart(new Worker.Task("Space Exploration") {
@Override
public void exec() {
final File f = new File(System.getProperty("user.dir") + "/lineage_space_exploration.data");
OutputStreamWriter dos = null;
try {
// encoding in Latin1
dos = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(f)), "8859_1");
for (double delta = MIN_DELTA; delta <= (MAX_DELTA + INC_DELTA / 2); delta += INC_DELTA) {
for (double weight = MIN_WEIGHT; weight <= (MAX_WEIGHT + INC_WEIGHT / 2); weight += INC_WEIGHT) {
// WM = 1
final Bureaucrat b = Compare.reliabilityAnalysis(ignore, false, false, false, delta, weight, weight, 1);
b.join();
final double[] result = (double[]) b.getWorker().getResult();
final StringBuilder sb = new StringBuilder();
sb.append(delta).append('\t').append(weight).append('\t').append(result[0]).append('\t').append(result[1]).append('\n');
dos.write(sb.toString());
// so I get to see something before the whole giant buffer is full
dos.flush();
Utils.log2("===========================\n\n");
// Utils.log2("delta: " + delta + " weight: " + weight + " top_one: " + result[0] + " top_5: " + result[1]);
Utils.log2(sb.toString());
Utils.log2("===========================\n\n");
}
}
dos.flush();
dos.close();
} catch (final Exception e) {
try {
dos.close();
} catch (final Exception ee) {
ee.printStackTrace();
}
}
}
}, Project.getProjects().toArray(new Project[0]));
}
use of org.openstack4j.model.identity.v3.Project in project TrakEM2 by trakem2.
the class Compare method gatherChains.
/**
* Gather chains for all projects considering the cp.regex, and transforms all relative to the reference Project p[0].
* Will ignore any for which a match exists in @param ignore.
*/
public static final Object[] gatherChains(final Project[] p, final CATAParameters cp, final String[] ignore) throws Exception {
String regex_exclude = null;
if (null != ignore) {
final StringBuilder sb = new StringBuilder();
for (final String ig : ignore) {
sb.append("(.*").append(ig).append(".*)|");
}
sb.setLength(sb.length() - 1);
regex_exclude = sb.toString();
}
Utils.logAll("Compare/gatherChains: using ignore string: " + regex_exclude);
Utils.logAll("Compare/gatherChains: using regex: " + cp.regex);
// gather all chains
// to keep track of each project's chains
final ArrayList[] p_chains = new ArrayList[p.length];
final ArrayList<Chain> chains = new ArrayList<Chain>();
for (int i = 0; i < p.length; i++) {
// for each project:
if (null == cp.regex) {
p_chains[i] = createPipeChains(p[i].getRootProjectThing(), p[i].getRootLayerSet(), regex_exclude);
} else {
// Search (shallow) for cp.regex matches
for (final ProjectThing pt : p[i].getRootProjectThing().findChildren(cp.regex, regex_exclude, true)) {
final ArrayList<Chain> ac = createPipeChains(pt, p[i].getRootLayerSet(), regex_exclude);
if (null == p_chains[i])
p_chains[i] = ac;
else
p_chains[i].addAll(ac);
}
// empty
if (null == p_chains[i])
p_chains[i] = new ArrayList<Chain>();
}
chains.addAll(p_chains[i]);
// calibrate
final Calibration cal = p[i].getRootLayerSet().getCalibrationCopy();
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) chain.vs.calibrate(cal);
}
final int n_chains = chains.size();
// register all, or relative
if (4 == cp.transform_type) {
// compute global average delta
if (0 == cp.delta) {
for (final Chain chain : chains) {
cp.delta += (chain.vs.getAverageDelta() / n_chains);
}
}
Utils.log2("Using delta: " + cp.delta);
for (final Chain chain : chains) {
// BEFORE making it relative
chain.vs.resample(cp.delta, cp.with_source);
chain.vs.relative();
}
} else {
if (3 == cp.transform_type) {
// '3' means moving least squares computed from 3D landmarks
Utils.log2("Moving Least Squares Registration based on common fiducial points");
// Find fiducial points, if any
final HashMap<Project, Map<String, Tuple3d>> fiducials = new HashMap<Project, Map<String, Tuple3d>>();
for (final Project pr : p) {
final Set<ProjectThing> fids = pr.getRootProjectThing().findChildrenOfTypeR("fiducial_points");
if (null == fids || 0 == fids.size()) {
Utils.log("No fiducial points found in project: " + pr);
} else {
// the first fiducial group
fiducials.put(pr, Compare.extractPoints(fids.iterator().next()));
}
}
if (!fiducials.isEmpty()) {
// Register all VectorString3D relative to the first project:
final List<VectorString3D> lvs = new ArrayList<VectorString3D>();
final Calibration cal2 = p[0].getRootLayerSet().getCalibrationCopy();
for (final Chain chain : chains) {
final Project pr = chain.pipes.get(0).getProject();
// first project is reference, no need to transform.
if (pr == p[0])
continue;
lvs.clear();
lvs.add(chain.vs);
chain.vs = transferVectorStrings(lvs, fiducials.get(pr), fiducials.get(p[0])).get(0);
// Set (but do not apply!) the calibration of the reference project
chain.vs.setCalibration(cal2);
}
}
} else if (cp.transform_type < 3) {
// '0', '1' and '2' involve a 3D affine computed from the 3 axes
// no need //VectorString3D[][] vs_axes = new VectorString3D[p.length][];
Vector3d[][] o = new Vector3d[p.length][];
for (int i = 0; i < p.length; i++) {
// 1 - find pipes to work as axes for each project
final ArrayList<ZDisplayable> pipes = p[i].getRootLayerSet().getZDisplayables(Line3D.class, true);
final String[] pipe_names = new String[pipes.size()];
for (int k = 0; k < pipes.size(); k++) {
pipe_names[k] = p[i].getMeaningfulTitle(pipes.get(k));
}
final int[] s = findFirstXYZAxes(cp.preset, pipes, pipe_names);
// if axes are -1, forget it: not found
if (-1 == s[0] || -1 == s[1] || -1 == s[2]) {
Utils.log("Can't find axes for project " + p[i]);
o = null;
return null;
}
// obtain axes and origin
final Object[] pack = obtainOrigin(new Line3D[] { (Line3D) pipes.get(s[0]), (Line3D) pipes.get(s[1]), (Line3D) pipes.get(s[2]) }, cp.transform_type, // will be null for the first, which will then be non-null and act as the reference for the others.
o[0]);
// no need //vs_axes[i] = (VectorString3D[])pack[0];
o[i] = (Vector3d[]) pack[1];
}
/* // OLD WAY
// match the scales to make the largest be 1.0
final double scaling_factor = VectorString3D.matchOrigins(o, transform_type);
Utils.log2("matchOrigins scaling factor: " + scaling_factor + " for transform_type " + transform_type);
*/
// transform all except the first (which acts as reference)
final Transform3D M_ref = Compare.createTransform(o[0]);
for (int i = 1; i < p.length; i++) {
final Vector3d trans = new Vector3d(-o[i][3].x, -o[i][3].y, -o[i][3].z);
final Transform3D M_query = Compare.createTransform(o[i]);
// The transfer T transform: from query space to reference space.
final Transform3D T = new Transform3D(M_ref);
T.mulInverse(M_query);
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
// in place
chain.vs.transform(T);
}
}
}
// compute global average delta, after correcting calibration and transformation
if (0 == cp.delta) {
for (final Chain chain : chains) {
cp.delta += (chain.vs.getAverageDelta() / n_chains);
}
}
Utils.log2("Using delta: " + cp.delta);
// After calibration and transformation, resample all to the same delta
for (final Chain chain : chains) chain.vs.resample(cp.delta, cp.with_source);
}
return new Object[] { chains, p_chains };
}
use of org.openstack4j.model.identity.v3.Project in project openstack4j by ContainX.
the class KeystoneProjectServiceTests method projects_getByName_not_exist_test.
public void projects_getByName_not_exist_test() throws Exception {
respondWith(JSON_PROJECTS_GET_BY_NAME_EMPTY);
Project project = osv3().identity().projects().getByName(PROJECT_NAME, PROJECT_DOMAIN_ID);
assertNull(project);
}
use of org.openstack4j.model.identity.v3.Project in project openstack4j by ContainX.
the class KeystoneProjectServiceTests method projects_crud_test.
public void projects_crud_test() throws Exception {
Project project = Builders.project().name(PROJECT_NAME).description(PROJECT_DESCRIPTION).domainId(PROJECT_DOMAIN_ID).setExtra(PROJECT_EXTRA_KEY_1, PROJECT_EXTRA_VALUE_1).enabled(true).build();
respondWith(JSON_PROJECTS_CREATE);
Project newProject = osv3().identity().projects().create(project);
assertEquals(newProject.getName(), PROJECT_NAME);
assertEquals(newProject.getDomainId(), PROJECT_DOMAIN_ID);
assertEquals(newProject.getDescription(), PROJECT_DESCRIPTION);
assertEquals(newProject.getExtra(PROJECT_EXTRA_KEY_1), PROJECT_EXTRA_VALUE_1);
PROJECT_ID = newProject.getId();
respondWith(JSON_PROJECTS_GET_BYID);
Project project_setToUpdate = osv3().identity().projects().get(PROJECT_ID);
respondWith(JSON_PROJECTS_UPDATE);
Project updatedProject = osv3().identity().projects().update(project_setToUpdate.toBuilder().description(PROJECT_DESCRIPTION_UPDATE).setExtra(PROJECT_EXTRA_KEY_2, PROJECT_EXTRA_VALUE_2).build());
assertEquals(updatedProject.getId(), PROJECT_ID);
assertEquals(updatedProject.getName(), PROJECT_NAME);
assertEquals(updatedProject.getDomainId(), PROJECT_DOMAIN_ID);
assertEquals(updatedProject.getDescription(), PROJECT_DESCRIPTION_UPDATE);
assertEquals(updatedProject.getExtra(PROJECT_EXTRA_KEY_1), PROJECT_EXTRA_VALUE_1);
assertEquals(updatedProject.getExtra(PROJECT_EXTRA_KEY_2), PROJECT_EXTRA_VALUE_2);
}
Aggregations