use of ca.nrc.cadc.caom2.types.Point in project caom2db by opencadc.
the class PostgreSQLGenerator method safeSetShape.
/**
* Store list of points value in a double[] column.
*
* @param sb
* @param ps
* @param col
* @param val
* @throws SQLException
*/
@Override
protected void safeSetShape(StringBuilder sb, PreparedStatement ps, int col, Shape val) throws SQLException {
if (val == null) {
ps.setObject(col, null);
if (sb != null) {
sb.append("null,");
}
return;
}
log.debug("[safeSetShape] in: " + val);
if (val instanceof Polygon) {
Polygon poly = (Polygon) val;
// 2 numbers per point
Double[] dval = new Double[2 * poly.getPoints().size()];
int i = 0;
for (Point p : ((Polygon) val).getPoints()) {
dval[i++] = p.cval1;
dval[i++] = p.cval2;
}
java.sql.Array arr = ps.getConnection().createArrayOf("float8", dval);
ps.setObject(col, arr);
if (sb != null) {
sb.append("[");
for (double d : dval) {
sb.append(d).append(",");
}
// replace last comma with closing ]
sb.setCharAt(sb.length() - 1, ']');
}
return;
}
if (val instanceof Circle) {
Circle circ = (Circle) val;
Double[] dval = new Double[3];
dval[0] = val.getCenter().cval1;
dval[1] = val.getCenter().cval2;
dval[2] = circ.getRadius();
java.sql.Array arr = ps.getConnection().createArrayOf("float8", dval);
ps.setObject(col, arr);
if (sb != null) {
sb.append("[");
for (double d : dval) {
sb.append(d).append(",");
}
// replace last comma with closing ]
sb.setCharAt(sb.length() - 1, ']');
}
}
}
use of ca.nrc.cadc.caom2.types.Point in project caom2db by opencadc.
the class PostgreSQLGenerator method getCircle.
@Override
protected Circle getCircle(ResultSet rs, int col) throws SQLException {
double[] coords = Util.getDoubleArray(rs, col);
if (coords == null) {
return null;
}
if (coords.length == 3) {
double cval1 = coords[0];
double cval2 = coords[1];
double rad = coords[2];
Circle ret = new Circle(new Point(cval1, cval2), rad);
log.debug("[getCircle] " + ret);
return ret;
}
throw new IllegalStateException("array length " + coords.length + " invalid for Circle");
}
use of ca.nrc.cadc.caom2.types.Point in project caom2db by opencadc.
the class PostgreSQLGenerator method generatePolygonApproximation.
ca.nrc.cadc.dali.Polygon generatePolygonApproximation(Circle val, int numVerts) {
if (numVerts < 4) {
throw new IllegalArgumentException("number of vertices in approximation too small (min: 4)");
}
CartesianTransform trans = CartesianTransform.getTransform(val);
Point cen = trans.transform(val.getCenter());
double phi = 2.0 * Math.PI / ((double) numVerts);
// compute distance to vertices so that the edges are tangent and circle is
// inside the polygon
double vdist = val.getRadius() / Math.cos(phi / 2.0);
// log.info("phi = " + phi + " vdist=" + vdist);
CartesianTransform inv = trans.getInverseTransform();
ca.nrc.cadc.dali.Polygon ret = new ca.nrc.cadc.dali.Polygon();
for (int i = 0; i < numVerts; i++) {
double x = cen.cval1 + vdist * Math.cos(i * phi);
double y = cen.cval2 + vdist * Math.sin(i * phi);
Point p = new Point(x, y);
p = inv.transform(p);
ret.getVertices().add(new ca.nrc.cadc.dali.Point(p.cval1, p.cval2));
}
return ret;
}
use of ca.nrc.cadc.caom2.types.Point in project caom2db by opencadc.
the class ArtifactValidator method compareMetadata.
void compareMetadata(TreeSet<ArtifactMetadata> logicalMetadata, TreeSet<ArtifactMetadata> physicalMetadata, long start) throws Exception {
boolean supportSkipURITable = supportSkipURITable();
long logicalCount = logicalMetadata.size();
long physicalCount = physicalMetadata.size();
long correct = 0;
long diffLength = 0;
long diffType = 0;
long diffChecksum = 0;
long notInLogical = 0;
ArtifactMetadata nextLogical = null;
for (ArtifactMetadata nextPhysical : physicalMetadata) {
if (logicalMetadata.contains(nextPhysical)) {
nextLogical = logicalMetadata.ceiling(nextPhysical);
logicalMetadata.remove(nextLogical);
if (matches(nextLogical.getChecksum(), nextPhysical.getChecksum())) {
if (matches(nextLogical.contentLength, nextPhysical.contentLength)) {
if (matches(nextLogical.contentType, nextPhysical.contentType)) {
correct++;
} else {
// content type mismatch
diffType++;
logJSON(new String[] { "logType", "detail", "anomaly", "diffType", "observationID", nextLogical.observationID, "artifactURI", nextLogical.getArtifactURI().toString(), "caomContentType", nextLogical.contentType, "storageContentType", nextPhysical.contentType, "caomCollection", collection }, false);
}
} else {
// content length mismatch
diffLength++;
if (supportSkipURITable) {
addToOrUpdateSkipTable(nextLogical, LENGTH_DIFF);
}
logJSON(new String[] { "logType", "detail", "anomaly", "diffLength", "observationID", nextLogical.observationID, "artifactURI", nextLogical.getArtifactURI().toASCIIString(), "caomContentLength", safeToString(nextLogical.contentLength), "storageContentLength", safeToString(nextPhysical.contentLength), "caomCollection", collection }, false);
}
} else {
// checksum mismatch
diffChecksum++;
if (supportSkipURITable) {
addToOrUpdateSkipTable(nextLogical, CHECKSUM_DIFF);
}
logJSON(new String[] { "logType", "detail", "anomaly", "diffChecksum", "observationID", nextLogical.observationID, "artifactURI", nextLogical.getArtifactURI().toString(), "caomChecksum", nextLogical.getChecksum(), "caomSize", safeToString(nextLogical.contentLength), "storageChecksum", nextPhysical.getChecksum(), "storageSize", safeToString(nextPhysical.contentLength), "caomCollection", collection }, false);
}
} else {
notInLogical++;
logJSON(new String[] { "logType", "detail", "anomaly", "notInCAOM", "artifactURI", nextPhysical.getArtifactURI().toString() }, false);
}
}
// at this point, any artifact that is in logicalArtifacts, is not in physicalArtifacts
long missingFromStorage = 0;
long notPublic = 0;
StoragePolicy storagePolicy = artifactStore.getStoragePolicy(this.collection);
Date now = new Date();
for (ArtifactMetadata metadata : logicalMetadata) {
String errorMessage = null;
Artifact artifact = new Artifact(metadata.getArtifactURI(), metadata.productType, metadata.releaseType);
Date releaseDate = AccessUtil.getReleaseDate(artifact, metadata.metaRelease, metadata.dataRelease);
String releaseDateString = "null";
boolean miss = false;
if (releaseDate == null) {
// proprietary artifact, skip
log.debug("null release date, skipping");
if (StoragePolicy.PUBLIC_ONLY == storagePolicy) {
notPublic++;
} else {
// missing proprietary artifact, but won't be added to skip table
miss = true;
}
} else {
releaseDateString = df.format(releaseDate);
if (releaseDate.after(now)) {
// proprietary artifact, add to skip table for future download
errorMessage = ArtifactHarvester.PROPRIETARY;
if (StoragePolicy.PUBLIC_ONLY == storagePolicy) {
notPublic++;
} else {
// missing proprietary artifact, add to skip table
miss = true;
}
} else {
// missing public artifact, add to skip table
miss = true;
}
// add to HavestSkipURI table if there is not already a row in the table
if (supportSkipURITable) {
addToOrUpdateSkipTable(metadata, errorMessage);
}
}
if (miss) {
missingFromStorage++;
logJSON(new String[] { "logType", "detail", "anomaly", "missingFromStorage", "releaseDate", releaseDateString, "observationID", metadata.observationID, "artifactURI", metadata.getArtifactURI().toASCIIString(), "caomCollection", collection }, false);
}
}
if (reportOnly) {
// diff
logJSON(new String[] { "logType", "summary", "collection", collection, "totalInCAOM", Long.toString(logicalCount), "totalInStorage", Long.toString(physicalCount), "totalCorrect", Long.toString(correct), "totalDiffChecksum", Long.toString(diffChecksum), "totalDiffLength", Long.toString(diffLength), "totalDiffType", Long.toString(diffType), "totalNotInCAOM", Long.toString(notInLogical), "totalMissingFromStorage", Long.toString(missingFromStorage), "totalNotPublic", Long.toString(notPublic), "time", Long.toString(System.currentTimeMillis() - start) }, true);
} else {
// validate
logJSON(new String[] { "logType", "summary", "collection", collection, "totalInCAOM", Long.toString(logicalCount), "totalInStorage", Long.toString(physicalCount), "totalCorrect", Long.toString(correct), "totalDiffChecksum", Long.toString(diffChecksum), "totalDiffLength", Long.toString(diffLength), "totalDiffType", Long.toString(diffType), "totalNotInCAOM", Long.toString(notInLogical), "totalMissingFromStorage", Long.toString(missingFromStorage), "totalNotPublic", Long.toString(notPublic), "totalNewSkipURI", Long.toString(newSkipURICount), "totalUpdateSkipURI", Long.toString(updateSkipURICount), "time", Long.toString(System.currentTimeMillis() - start) }, true);
}
}
use of ca.nrc.cadc.caom2.types.Point in project caom2db by opencadc.
the class DeletionHarvester method doit.
/**
* Does the work
*
* @return progress status
*/
private Progress doit() {
log.info("batch: " + entityClass.getSimpleName());
Progress ret = new Progress();
int expectedNum = Integer.MAX_VALUE;
if (batchSize != null) {
expectedNum = batchSize.intValue();
}
boolean correct = true;
try {
HarvestState state = harvestStateDAO.get(source, cname);
log.info("last harvest: " + format(state.curLastModified));
if (initHarvestState && state.curLastModified == null) {
state.curLastModified = initDate;
harvestStateDAO.put(state);
state = harvestStateDAO.get(source, cname);
log.info("harvest state initialised to: " + df.format(state.curLastModified));
}
startDate = state.curLastModified;
if (firstIteration) {
if (super.minDate != null) {
// override state
startDate = super.minDate;
}
endDate = super.maxDate;
// harvest up to a little in the past because the head of the
// sequence may be volatile
long fiveMinAgo = System.currentTimeMillis() - 5 * 60000L;
if (endDate == null) {
endDate = new Date(fiveMinAgo);
} else {
endDate = new Date(Math.min(fiveMinAgo, endDate.getTime()));
}
}
firstIteration = false;
List<DeletedObservation> entityList = null;
String source = null;
if (deletedDAO != null) {
source = "deletedDAO";
entityList = deletedDAO.getList(src.getCollection(), startDate, endDate, batchSize);
} else {
source = "repoClient";
entityList = repoClient.getDeleted(src.getCollection(), startDate, endDate, batchSize);
}
if (entityList == null) {
throw new RuntimeException("Error gathering deleted observations from " + source);
}
if (entityList.size() == expectedNum) {
detectLoop(entityList);
}
ret.found = entityList.size();
log.info("found: " + entityList.size());
ListIterator<DeletedObservation> iter = entityList.listIterator();
while (iter.hasNext()) {
DeletedObservation de = iter.next();
// allow garbage collection asap
iter.remove();
log.debug("Observation read from deletion end-point: " + de.getID() + " date = " + de.getLastModified());
if (!dryrun) {
txnManager.startTransaction();
}
boolean ok = false;
try {
if (!dryrun) {
state.curLastModified = de.getLastModified();
state.curID = de.getID();
ObservationState cur = obsDAO.getState(de.getID());
if (cur != null) {
log.debug("Observation: " + de.getID() + " found in DB");
Date lastUpdate = cur.getMaxLastModified();
Date deleted = de.getLastModified();
log.debug("to be deleted: " + de.getClass().getSimpleName() + " " + de.getURI() + " " + de.getID() + "deleted date " + format(de.getLastModified()) + " modified date " + format(cur.getMaxLastModified()));
if (deleted.after(lastUpdate)) {
log.info("delete: " + de.getClass().getSimpleName() + " " + de.getURI() + " " + de.getID());
obsDAO.delete(de.getID());
ret.deleted++;
} else {
log.info("skip out-of-date delete: " + de.getClass().getSimpleName() + " " + de.getURI() + " " + de.getID() + " " + format(de.getLastModified()));
ret.skipped++;
}
} else {
log.debug("Observation: " + de.getID() + " not found in DB");
}
// track progress
harvestStateDAO.put(state);
log.debug("committing transaction");
txnManager.commitTransaction();
log.debug("commit: OK");
}
ok = true;
} catch (Throwable t) {
log.error("unexpected exception", t);
} finally {
if (!ok && !dryrun) {
log.warn("failed to process " + de + ": trying to rollback the transaction");
txnManager.rollbackTransaction();
log.warn("rollback: OK");
ret.abort = true;
}
}
}
if (ret.found < expectedNum) {
ret.done = true;
if (state != null && state.curLastModified != null && ret.found > 0) {
// tweak HarvestState so we don't keep picking up the same
// one
// 1
Date n = new Date(state.curLastModified.getTime() + 1L);
// ms
// ahead
Date now = new Date();
if (now.getTime() - n.getTime() > 600 * 1000L) {
n = new Date(state.curLastModified.getTime() + 100L);
}
// ahead
state.curLastModified = n;
log.info("reached last " + entityClass.getSimpleName() + ": setting curLastModified to " + format(state.curLastModified));
harvestStateDAO.put(state);
}
}
} catch (Throwable t) {
log.error("unexpected exception", t);
ret.abort = true;
correct = false;
} finally {
if (correct) {
log.debug("DONE");
}
}
return ret;
}
Aggregations