use of gaiasky.data.octreegen.MetadataBinaryIO in project gaiasky by langurmonkey.
the class OctreeGeneratorRun method generateOctree.
private OctreeNode generateOctree() throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException {
long startMs = TimeUtils.millis();
OctreeGeneratorParams ogp = new OctreeGeneratorParams(maxPart, postprocess, childCount, parentCount);
IOctreeGenerator og = new OctreeGeneratorMag(ogp);
List<IParticleRecord> listLoader = null, list;
Map<Long, Integer> xmatchTable = null;
long[] countsPerMagGaia = null;
//
if (loaderClass != null) {
String fullLoaderClass = "gaiasky.data.group." + loaderClass;
IStarGroupDataProvider loader = (IStarGroupDataProvider) Class.forName(fullLoaderClass).getDeclaredConstructor().newInstance();
loader.setOutputFormatVersion(outputVersion);
loader.setColumns(columns);
loader.setParallaxErrorFactorFaint(plxerrfaint);
loader.setParallaxErrorFactorBright(plxerrbright);
loader.setParallaxZeroPoint(plxzeropoint);
loader.setFileNumberCap(fileNumCap);
loader.setStarNumberCap(starNumCap);
loader.setDistanceCap(distPcCap);
loader.setAdditionalFiles(additionalFiles);
loader.setRUWECap(ruwe);
countsPerMagGaia = loader.getCountsPerMag();
if (hip != null && xmatchFile != null && !xmatchFile.isEmpty()) {
// Load xmatchTable
xmatchTable = readXmatchTable(xmatchFile);
if (!xmatchTable.isEmpty()) {
// IDs which must be loaded regardless (we need them to update x-matched HIP stars)
loader.setMustLoadIds(new HashSet<>(xmatchTable.keySet()));
}
}
/* LOAD CATALOG */
listLoader = loader.loadData(input);
}
//
if (hip != null) {
STILDataProvider stil = new STILDataProvider();
// All hip stars for which we have a Gaia star, bypass plx >= 0 condition in STILDataProvider
if (xmatchTable != null && !xmatchTable.isEmpty()) {
Set<Long> mustLoad = new HashSet<>();
for (int hipNumber : xmatchTable.values()) {
mustLoad.add(Long.valueOf(hipNumber));
}
stil.setMustLoadIds(mustLoad);
}
List<IParticleRecord> listHip = stil.loadData(hip);
// Update HIP names using external source, if needed
if (hipNamesDir != null) {
HipNames hipNames = new HipNames();
hipNames.load(Paths.get(hipNamesDir));
Map<Integer, Array<String>> hn = hipNames.getHipNames();
for (IParticleRecord pb : listHip) {
IParticleRecord star = pb;
if (hn.containsKey(star.hip())) {
Array<String> names = hn.get(star.hip());
for (String name : names) star.addName(name);
}
}
}
// Combine counts per magnitude
long[] countsPerMagHip = stil.getCountsPerMag();
combineCountsPerMag(countsPerMagGaia, countsPerMagHip);
// Create HIP map
Map<Integer, IParticleRecord> hipMap = new HashMap<>();
for (IParticleRecord star : listHip) {
hipMap.put(star.hip(), star);
}
// Check x-match file
int hipnum = listHip.size();
int starhits = 0;
int notFoundHipStars = 0;
Vector3d aux1 = new Vector3d();
Vector3d aux2 = new Vector3d();
if (listLoader != null) {
for (IParticleRecord pb : listLoader) {
IParticleRecord gaiaStar = pb;
// Check if star is also in HIP catalog
if (xmatchTable == null || !xmatchTable.containsKey(gaiaStar.id())) {
// No hit, add to main list
listHip.add(gaiaStar);
} else {
// Update hipStar using gaiaStar data, only when:
int hipId = xmatchTable.get(gaiaStar.id());
if (hipMap.containsKey(hipId)) {
// Hip Star
IParticleRecord hipStar = hipMap.get(hipId);
// Check parallax errors
Double gaiaPllxErr = gaiaStar.getExtra("pllx_err");
Double hipPllxErr = hipStar.getExtra("e_plx");
if (gaiaPllxErr <= hipPllxErr) {
// SIZE
float size = gaiaStar.size();
// POSITION
double x = gaiaStar.x(), y = gaiaStar.y(), z = gaiaStar.z();
aux1.set(x, y, z);
boolean negativeGaiaDistance = Math.abs(aux1.len() - AbstractStarGroupDataProvider.NEGATIVE_DIST) < 1e-10;
if (negativeGaiaDistance) {
// Negative distance in Gaia star!
// Use Gaia position, HIP distance and name(s)
// Fetch Gaia RA/DEC
Coordinates.cartesianToSpherical(aux1, aux2);
double gaiaRA = aux2.x;
double gaiaDEC = aux2.y;
// Fetch HIP distance
aux1.set(hipStar.x(), hipStar.y(), hipStar.z());
Coordinates.cartesianToSpherical(aux1, aux2);
double hipDIST = aux2.z;
// Compute new cartesian position
aux1.set(gaiaRA, gaiaDEC, hipDIST);
Coordinates.sphericalToCartesian(aux1, aux2);
x = aux2.x;
y = aux2.y;
z = aux2.z;
size = hipStar.size();
}
hipStar.setId(gaiaStar.id());
hipStar.setPos(x, y, z);
hipStar.setVelocityVector(gaiaStar.pmx(), gaiaStar.pmy(), gaiaStar.pmz());
hipStar.setProperMotion(gaiaStar.mualpha(), gaiaStar.mudelta(), gaiaStar.radvel());
hipStar.setMag(gaiaStar.appmag(), gaiaStar.absmag());
hipStar.setCol(gaiaStar.col());
hipStar.setSize(size);
hipStar.addNames(gaiaStar.names());
starhits++;
}
} else {
notFoundHipStars++;
}
}
}
logger.info(starhits + " of " + hipnum + " HIP stars' data updated due to being matched to a Gaia star (" + notFoundHipStars + " not found - negative parallax?)");
// Free up some memory
listLoader.clear();
}
// Main list is listHip
list = listHip;
} else {
list = listLoader;
}
if (list == null || list.isEmpty()) {
logger.info("No stars were loaded, please check out the parameters");
return null;
}
long loadingMs = TimeUtils.millis();
double loadingSecs = ((loadingMs - startMs) / 1000.0);
logger.info("TIME STATS: Data loaded in " + loadingSecs + " seconds");
logger.info("Generating octree with " + list.size() + " actual stars");
// Pre-processing (sorting, removing too distant stars)
Vector3d pos0 = new Vector3d();
Iterator<IParticleRecord> it = list.iterator();
while (it.hasNext()) {
IParticleRecord s = it.next();
double dist = pos0.set(s.x(), s.y(), s.z()).len();
if (dist * Constants.U_TO_PC > distPcCap) {
// Remove star
it.remove();
}
}
logger.info("Sorting list by magnitude with " + list.size() + " objects");
list.sort(new StarBrightnessComparator());
logger.info("Catalog sorting done");
OctreeNode octree = og.generateOctree(list);
PrintStream out = new PrintStream(System.out, true, StandardCharsets.UTF_8);
out.println(octree.toString(true));
long generatingMs = TimeUtils.millis();
double generatingSecs = ((generatingMs - loadingMs) / 1000.0);
logger.info("TIME STATS: Octree generated in " + generatingSecs + " seconds");
/**
* NUMBERS *
*/
logger.info("Octree generated with " + octree.numNodesRec() + " octants and " + octree.numObjectsRec + " particles");
logger.info(og.getDiscarded() + " particles have been discarded due to density");
/**
* CLEAN CURRENT OUT DIR *
*/
File metadataFile = new File(outFolder, "metadata.bin");
delete(metadataFile);
File particlesFolder = new File(outFolder, "particles/");
delete(particlesFolder);
/**
* WRITE METADATA *
*/
metadataFile.createNewFile();
logger.info("Writing metadata (" + octree.numNodesRec() + " nodes): " + metadataFile.getAbsolutePath());
MetadataBinaryIO metadataWriter = new MetadataBinaryIO();
metadataWriter.writeMetadata(octree, new FileOutputStream(metadataFile));
/**
* WRITE PARTICLES *
*/
IStarGroupIO particleWriter = new StarGroupBinaryIO();
particlesFolder.mkdirs();
int version = outputVersion < BinaryDataProvider.MIN_OUTPUT_VERSION || outputVersion > BinaryDataProvider.MAX_OUTPUT_VERSION ? BinaryDataProvider.DEFAULT_OUTPUT_VERSION : outputVersion;
logger.info("Using output format version " + version);
writeParticlesToFiles(particleWriter, octree, version);
long writingMs = TimeUtils.millis();
double writingSecs = (writingMs - generatingMs) / 1000.0;
double totalSecs = loadingSecs + generatingSecs + writingSecs;
int[][] stats = octree.stats();
NumberFormat formatter = new DecimalFormat("##########0.0000");
if (countsPerMagGaia != null) {
logger.info("=========================");
logger.info("STAR COUNTS PER MAGNITUDE");
logger.info("=========================");
for (int level = 0; level < countsPerMagGaia.length; level++) {
logger.info("Magnitude " + level + ": " + countsPerMagGaia[level] + " stars (" + formatter.format((double) countsPerMagGaia[level] * 100d / (double) list.size()) + "%)");
}
logger.info();
}
logger.info("============");
logger.info("OCTREE STATS");
logger.info("============");
logger.info("Octants: " + octree.numNodesRec());
logger.info("Particles: " + list.size());
logger.info("Depth: " + octree.getMaxDepth());
int level = 0;
for (int[] levelinfo : stats) {
logger.info(" Level " + level + ": " + levelinfo[0] + " octants, " + levelinfo[1] + " stars (" + formatter.format((double) levelinfo[1] * 100d / (double) list.size()) + "%)");
level++;
}
logger.info();
logger.info("================");
logger.info("FINAL TIME STATS");
logger.info("================");
logger.info("Loading: " + loadingSecs + " secs (" + formatTimeSecs((long) loadingSecs) + ")");
logger.info("Generating: " + generatingSecs + " secs (" + formatTimeSecs((long) generatingSecs) + ")");
logger.info("Writing: " + writingSecs + " secs (" + formatTimeSecs((long) writingSecs) + ")");
logger.info("Total: " + totalSecs + " secs (" + formatTimeSecs((long) totalSecs) + ")");
return octree;
}
use of gaiasky.data.octreegen.MetadataBinaryIO in project gaiasky by langurmonkey.
the class OctreeGroupLoader method loadOctreeData.
@Override
protected AbstractOctreeWrapper loadOctreeData() {
/*
* LOAD METADATA
*/
logger.info(I18n.txt("notif.loading", metadata));
MetadataBinaryIO metadataReader = new MetadataBinaryIO();
OctreeNode root = metadataReader.readMetadataMapped(metadata);
if (root != null) {
logger.info(I18n.txt("notif.nodeloader", root.numNodesRec(), metadata));
logger.info(I18n.txt("notif.loading", particles));
/*
* CREATE OCTREE WRAPPER WITH ROOT NODE - particle group is by default
* parallel, so we never use OctreeWrapperConcurrent
*/
AbstractOctreeWrapper octreeWrapper = new OctreeWrapper("Universe", root);
octreeWrapper.setFadeout(new double[] { 8e3, 5e5 });
// Catalog info
String name = this.name != null ? this.name : "LOD data";
String description = this.description != null ? this.description : "Octree-based LOD dataset";
CatalogInfo ci = new CatalogInfo(name, description, null, CatalogInfoSource.LOD, 1.5f, octreeWrapper);
ci.nParticles = params.containsKey("nobjects") ? (Long) params.get("nobjects") : -1;
ci.sizeBytes = params.containsKey("size") ? (Long) params.get("size") : -1;
EventManager.publish(Event.CATALOG_ADD, this, ci, false);
dataVersionHint = name.contains("DR2") || name.contains("dr2") || description.contains("DR2") || description.contains("dr2") ? 0 : 1;
/*
* LOAD LOD LEVELS - LOAD PARTICLE DATA
*/
try {
int depthLevel = Math.min(OctreeNode.maxDepth, PRELOAD_DEPTH);
loadLod(depthLevel, octreeWrapper);
flushLoadedIds();
} catch (IOException e) {
logger.error(e);
}
return octreeWrapper;
} else {
logger.info("Dataset not found: " + metadata + " - " + particles);
return null;
}
}
Aggregations