use of ca.nrc.cadc.caom2.types.Polygon in project caom2db by opencadc.
the class PostgreSQLGeneratorTest method testCircleToPolygonApproximatiom.
@Test
public void testCircleToPolygonApproximatiom() {
try {
Circle c = new Circle(new Point(12.0, 34.0), 1.0);
double ca = c.getArea();
double cs = c.getSize();
for (int i = 4; i < 32; i += 2) {
ca.nrc.cadc.dali.Polygon dpoly = gen.generatePolygonApproximation(c, i);
List<Vertex> verts = new ArrayList<Vertex>();
List<Point> points = new ArrayList<Point>();
SegmentType t = SegmentType.MOVE;
for (ca.nrc.cadc.dali.Point dp : dpoly.getVertices()) {
points.add(new Point(dp.getLongitude(), dp.getLatitude()));
verts.add(new Vertex(dp.getLongitude(), dp.getLatitude(), t));
t = SegmentType.LINE;
}
verts.add(Vertex.CLOSE);
MultiPolygon mp = new MultiPolygon(verts);
Polygon poly = new Polygon(points, mp);
double pa = poly.getArea();
double ps = poly.getSize();
double da = pa / ca;
log.info("n=" + i + " poly: " + ps + " " + pa + " (" + da + ")");
}
log.info("circle: " + ca + " " + cs);
} catch (Exception unexpected) {
log.error("unexpected exception", unexpected);
Assert.fail("unexpected exception: " + unexpected);
}
}
use of ca.nrc.cadc.caom2.types.Polygon in project caom2db by opencadc.
the class AbstractObservationDAOTest method getTestPlane.
protected Plane getTestPlane(boolean full, String productID, int depth, boolean poly) throws Exception {
Plane p = new Plane(productID);
if (full) {
p.metaProducer = URI.create("test:plane/roundrip-1.0");
p.creatorID = URI.create("ivo://example.com/TEST?" + productID);
p.calibrationLevel = CalibrationLevel.CALIBRATED;
p.dataProductType = DataProductType.IMAGE;
p.metaRelease = TEST_DATE;
p.dataRelease = TEST_DATE;
p.provenance = new Provenance("doit");
p.provenance.lastExecuted = TEST_DATE;
p.provenance.producer = "MyProducer";
p.provenance.project = "MyProject";
p.provenance.reference = new URI("http://www.example.com/MyProject/doit");
p.provenance.runID = "RUNID123";
p.provenance.version = "0.1alpha4";
p.provenance.getKeywords().addAll(TEST_KEYWORDS);
p.provenance.getInputs().add(new PlaneURI(new ObservationURI("FOO", "bar"), "in1"));
p.provenance.getInputs().add(new PlaneURI(new ObservationURI("FOO", "bar"), "in2"));
p.metrics = new Metrics();
p.metrics.sourceNumberDensity = 100.0;
p.metrics.background = 2.7;
p.metrics.backgroundStddev = 0.3;
p.metrics.fluxDensityLimit = 1.0e-5;
p.metrics.magLimit = 28.5;
p.metrics.sampleSNR = 11.0;
p.quality = new DataQuality(Quality.JUNK);
// previously was computed metadata
p.energy = new Energy();
p.energy.bandpassName = "V";
p.energy.bounds = new SampledInterval(400e-6, 900e-6);
p.energy.bounds.getSamples().add(new Interval(400e-6, 500e-6));
p.energy.bounds.getSamples().add(new Interval(800e-6, 900e-6));
p.energy.dimension = 2L;
p.energy.getEnergyBands().add(EnergyBand.OPTICAL);
p.energy.resolvingPower = 2.0;
p.energy.resolvingPowerBounds = new Interval(1.8, 2.2);
p.energy.restwav = 600e-9;
p.energy.sampleSize = 100e-6;
p.energy.transition = new EnergyTransition("H", "alpha");
p.polarization = new Polarization();
p.polarization.dimension = 3L;
p.polarization.states = new TreeSet<>();
p.polarization.states.add(PolarizationState.I);
p.polarization.states.add(PolarizationState.Q);
p.polarization.states.add(PolarizationState.U);
p.position = new Position();
if (poly) {
MultiPolygon mp = new MultiPolygon();
mp.getVertices().add(new Vertex(2.0, 2.0, SegmentType.MOVE));
mp.getVertices().add(new Vertex(1.0, 4.0, SegmentType.LINE));
mp.getVertices().add(new Vertex(3.0, 3.0, SegmentType.LINE));
mp.getVertices().add(new Vertex(0.0, 0.0, SegmentType.CLOSE));
List<Point> points = new ArrayList<Point>();
for (Vertex v : mp.getVertices()) {
if (!SegmentType.CLOSE.equals(v.getType())) {
points.add(new Point(v.cval1, v.cval2));
}
}
p.position.bounds = new Polygon(points, mp);
} else {
p.position.bounds = new Circle(new Point(0.0, 89.0), 2.0);
}
p.position.dimension = new Dimension2D(1024, 2048);
p.position.resolution = 0.05;
p.position.resolutionBounds = new Interval(0.04, 0.06);
p.position.sampleSize = 0.025;
p.position.timeDependent = false;
p.time = new Time();
p.time.bounds = new SampledInterval(50000.25, 50000.75);
p.time.bounds.getSamples().add(new Interval(50000.25, 50000.40));
p.time.bounds.getSamples().add(new Interval(50000.50, 50000.75));
p.time.dimension = 2L;
p.time.exposure = 666.0;
p.time.resolution = 0.5;
p.time.resolutionBounds = new Interval(0.22, 0.88);
p.time.sampleSize = 0.15;
p.custom = new CustomAxis("FDEP");
p.custom.bounds = new SampledInterval(100.0, 200.0);
p.custom.bounds.getSamples().add(new Interval(100.0, 140.0));
p.custom.bounds.getSamples().add(new Interval(160.0, 200.0));
p.custom.bounds.validate();
p.custom.dimension = 1024L;
p.observable = new Observable("phot.flux");
p.getMetaReadGroups().add(URI.create("ivo://example.net/gms?GroupA"));
p.getMetaReadGroups().add(URI.create("ivo://example.net/gms?GroupB"));
p.getDataReadGroups().add(URI.create("ivo://example.net/gms?GroupC"));
p.getDataReadGroups().add(URI.create("ivo://example.net/gms?GroupD"));
}
if (depth <= 2)
return p;
p.getArtifacts().add(getTestArtifact(full, new URI("http://www.example.com/stuff/" + productID + "a"), depth));
p.getArtifacts().add(getTestArtifact(full, new URI("http://www.example.com/stuff/" + productID + "b"), depth));
Assert.assertEquals(2, p.getArtifacts().size());
return p;
}
use of ca.nrc.cadc.caom2.types.Polygon in project caom2db by opencadc.
the class AbstractObservationDAOTest method testEqual.
private void testEqual(Plane expected, Plane actual) {
testEntity(expected, actual);
Assert.assertEquals("productID", expected.getProductID(), actual.getProductID());
Assert.assertEquals("creatorID", expected.creatorID, actual.creatorID);
Assert.assertEquals("calibrationLevel", expected.calibrationLevel, actual.calibrationLevel);
Assert.assertEquals("dataProductType", expected.dataProductType, actual.dataProductType);
testEqualSeconds("plane.metaRelease", expected.metaRelease, actual.metaRelease);
testEqualSeconds("plane.dataRelease", expected.dataRelease, actual.dataRelease);
if (expected.provenance != null) {
Assert.assertEquals("provenance.name", expected.provenance.getName(), actual.provenance.getName());
Assert.assertEquals("provenance.reference", expected.provenance.reference, actual.provenance.reference);
Assert.assertEquals("provenance.version", expected.provenance.version, actual.provenance.version);
Assert.assertEquals("provenance.project", expected.provenance.project, actual.provenance.project);
Assert.assertEquals("provenance.producer", expected.provenance.producer, actual.provenance.producer);
Assert.assertEquals("provenance.runID", expected.provenance.runID, actual.provenance.runID);
testEqualSeconds("provenance.lastExecuted", expected.provenance.lastExecuted, actual.provenance.lastExecuted);
Assert.assertEquals("provenance.inputs", expected.provenance.getInputs(), actual.provenance.getInputs());
testEqual("provenance.keywords", expected.provenance.getKeywords(), actual.provenance.getKeywords());
} else
Assert.assertNull(actual.provenance);
if (expected.metrics != null) {
Assert.assertEquals("metrics.sourceNumberDensity", expected.metrics.sourceNumberDensity, actual.metrics.sourceNumberDensity);
Assert.assertEquals("metrics.background", expected.metrics.background, actual.metrics.background);
Assert.assertEquals("metrics.backgroundStdde", expected.metrics.backgroundStddev, actual.metrics.backgroundStddev);
Assert.assertEquals("metrics.fluxDensityLimit", expected.metrics.fluxDensityLimit, actual.metrics.fluxDensityLimit);
Assert.assertEquals("metrics.magLimit", expected.metrics.magLimit, actual.metrics.magLimit);
} else
Assert.assertNull(actual.metrics);
if (expected.position != null) {
Assert.assertNotNull("plane.position", actual.position);
if (expected.position.bounds != null && Polygon.class.equals(expected.position.bounds.getClass())) {
Polygon ep = (Polygon) expected.position.bounds;
Polygon ap = (Polygon) actual.position.bounds;
Assert.assertEquals("num points", ep.getPoints().size(), ap.getPoints().size());
for (int i = 0; i < ep.getPoints().size(); i++) {
Point ept = ep.getPoints().get(i);
Point apt = ap.getPoints().get(i);
Assert.assertEquals("point.cval1", ept.cval1, apt.cval1, 0.0);
Assert.assertEquals("point.cval2", ept.cval2, apt.cval2, 0.0);
}
Assert.assertEquals("num vertices", ep.getSamples().getVertices().size(), ap.getSamples().getVertices().size());
for (int i = 0; i < ep.getSamples().getVertices().size(); i++) {
Vertex ev = ep.getSamples().getVertices().get(i);
Vertex av = ap.getSamples().getVertices().get(i);
Assert.assertEquals("vertex.cval2", ev.getType(), av.getType());
Assert.assertEquals("vertex.cval1", ev.cval1, av.cval1, 0.0);
Assert.assertEquals("vertex.cval2", ev.cval2, av.cval2, 0.0);
}
} else if (expected.position.bounds != null && Circle.class.equals(expected.position.bounds.getClass())) {
Circle ep = (Circle) expected.position.bounds;
Circle ap = (Circle) actual.position.bounds;
Assert.assertEquals("center.cval1", ep.getCenter().cval1, ap.getCenter().cval1, 0.0);
Assert.assertEquals("center.cval2", ep.getCenter().cval2, ap.getCenter().cval2, 0.0);
Assert.assertEquals("radius", ep.getRadius(), ap.getRadius(), 0.0);
} else
Assert.assertNull(actual.position.bounds);
if (expected.position.dimension != null) {
Assert.assertEquals("position.dimension.naxis1", expected.position.dimension.naxis1, actual.position.dimension.naxis1);
Assert.assertEquals("position.dimension.naxis2", expected.position.dimension.naxis2, actual.position.dimension.naxis2);
} else
Assert.assertNull(actual.position.dimension);
Assert.assertEquals("position.resolution", expected.position.resolution, actual.position.resolution);
Assert.assertEquals("position.sampleSize", expected.position.sampleSize, actual.position.sampleSize);
Assert.assertEquals("position.timeDependent", expected.position.timeDependent, actual.position.timeDependent);
}
if (expected.energy != null) {
Assert.assertNotNull("plane.energy", actual.energy);
if (expected.energy.bounds != null) {
Assert.assertNotNull("energy.bounds", actual.energy.bounds);
Assert.assertEquals("energy.bounds.lower", expected.energy.bounds.getLower(), actual.energy.bounds.getLower(), 0.0);
Assert.assertEquals("energy.bounds.upper", expected.energy.bounds.getUpper(), actual.energy.bounds.getUpper(), 0.0);
Assert.assertEquals("energy.bounds.samples.size", expected.energy.bounds.getSamples().size(), actual.energy.bounds.getSamples().size());
for (int i = 0; i < expected.energy.bounds.getSamples().size(); i++) {
Interval esi = expected.energy.bounds.getSamples().get(i);
Interval asi = actual.energy.bounds.getSamples().get(i);
Assert.assertEquals("SubInterval.lb", esi.getLower(), asi.getLower(), 0.0);
Assert.assertEquals("SubInterval.ub", esi.getUpper(), asi.getUpper(), 0.0);
}
} else
Assert.assertNull("energy.bounds", actual.energy.bounds);
Assert.assertEquals("energy.bandpassName", expected.energy.bandpassName, actual.energy.bandpassName);
Assert.assertEquals("energy.dimension", expected.energy.dimension, actual.energy.dimension);
Iterator<EnergyBand> ee = expected.energy.getEnergyBands().iterator();
Iterator<EnergyBand> ae = actual.energy.getEnergyBands().iterator();
while (ee.hasNext()) {
EnergyBand ex = ee.next();
EnergyBand ac = ae.next();
Assert.assertEquals("energy.energyBand", ex, ac);
}
Assert.assertEquals("energy.resolvingPower", expected.energy.resolvingPower, actual.energy.resolvingPower);
Assert.assertEquals("energy.restwav", expected.energy.restwav, actual.energy.restwav);
Assert.assertEquals("energy.sampleSize", expected.energy.sampleSize, actual.energy.sampleSize);
Assert.assertEquals("energy.transition", expected.energy.transition, actual.energy.transition);
}
if (expected.time != null) {
Assert.assertNotNull("plane.time", actual.time);
if (expected.time.bounds != null) {
Assert.assertNotNull("time.bounds", actual.time.bounds);
Assert.assertEquals("time.bounds.lower", expected.time.bounds.getLower(), actual.time.bounds.getLower(), 0.0);
Assert.assertEquals("time.bounds.upper", expected.time.bounds.getUpper(), actual.time.bounds.getUpper(), 0.0);
Assert.assertEquals("time.bounds.samples.size", expected.time.bounds.getSamples().size(), actual.time.bounds.getSamples().size());
for (int i = 0; i < expected.time.bounds.getSamples().size(); i++) {
Interval esi = expected.time.bounds.getSamples().get(i);
Interval asi = actual.time.bounds.getSamples().get(i);
Assert.assertEquals("SubInterval.lb", esi.getLower(), asi.getLower(), 0.0);
Assert.assertEquals("SubInterval.ub", esi.getUpper(), asi.getUpper(), 0.0);
}
} else
Assert.assertNull("time.bounds", actual.time.bounds);
Assert.assertEquals("time.dimension", expected.time.dimension, actual.time.dimension);
Assert.assertEquals("time.exposure", expected.time.exposure, actual.time.exposure);
Assert.assertEquals("time.resolution", expected.time.resolution, actual.time.resolution);
Assert.assertEquals("time.sampleSize", expected.time.sampleSize, actual.time.sampleSize);
}
if (expected.polarization != null) {
Assert.assertNotNull("plane.polarization", actual.polarization);
if (expected.polarization.states != null) {
Assert.assertNotNull("polarization.states", actual.polarization.states);
Assert.assertEquals("polarization.states.size", expected.polarization.states.size(), actual.polarization.states.size());
Iterator<PolarizationState> ei = expected.polarization.states.iterator();
Iterator<PolarizationState> ai = actual.polarization.states.iterator();
while (ei.hasNext()) {
Assert.assertEquals("polarization.state", ei.next(), ai.next());
}
Assert.assertEquals("polarization.dimension", expected.polarization.dimension, actual.polarization.dimension);
}
}
Assert.assertEquals("metaReadGroups.size", expected.getMetaReadGroups().size(), actual.getMetaReadGroups().size());
Iterator<URI> emra = expected.getMetaReadGroups().iterator();
Iterator<URI> amra = actual.getMetaReadGroups().iterator();
while (emra.hasNext() || amra.hasNext()) {
Assert.assertEquals(emra.next(), amra.next());
}
Assert.assertEquals("dataReadGroups.size", expected.getMetaReadGroups().size(), actual.getMetaReadGroups().size());
Iterator<URI> edra = expected.getDataReadGroups().iterator();
Iterator<URI> adra = actual.getDataReadGroups().iterator();
while (edra.hasNext() || adra.hasNext()) {
Assert.assertEquals(edra.next(), adra.next());
}
log.debug("num artifacts: " + expected.getArtifacts().size() + " == " + actual.getArtifacts().size());
Assert.assertEquals("number of artifacts", expected.getArtifacts().size(), actual.getArtifacts().size());
Iterator<Artifact> ea = expected.getArtifacts().iterator();
Iterator<Artifact> aa = actual.getArtifacts().iterator();
while (ea.hasNext()) {
Artifact ex = ea.next();
Artifact ac = aa.next();
testEqual(ex, ac);
}
testEntityChecksums(expected, actual);
}
use of ca.nrc.cadc.caom2.types.Polygon in project caom2db by opencadc.
the class ObservationHarvester method doit.
private Progress doit() {
Progress ret = new Progress();
if (!ready) {
log.error("Observation Harvester not ready");
ret.abort = true;
return ret;
}
long t = System.currentTimeMillis();
long timeState = -1;
long timeQuery = -1;
long timeTransaction = -1;
int expectedNum = Integer.MAX_VALUE;
if (batchSize != null) {
expectedNum = batchSize.intValue();
}
try {
// hint
System.gc();
t = System.currentTimeMillis();
HarvestState state = null;
if (!skipped) {
state = harvestStateDAO.get(source, Observation.class.getSimpleName());
startDate = state.curLastModified;
log.debug("state " + state);
}
timeState = System.currentTimeMillis() - t;
t = System.currentTimeMillis();
if (firstIteration) {
if (full) {
startDate = null;
} else if (super.minDate != null) {
startDate = super.minDate;
}
endDate = super.maxDate;
if (!skipped) {
// harvest up to a little in the past because the head of
// the sequence may be volatile
long fiveMinAgo = System.currentTimeMillis() - 5 * 60000L;
if (endDate == null) {
endDate = new Date(fiveMinAgo);
} else {
endDate = new Date(Math.min(fiveMinAgo, endDate.getTime()));
}
}
}
firstIteration = false;
List<SkippedWrapperURI<ObservationResponse>> entityList;
if (skipped) {
entityList = getSkipped(startDate);
} else {
log.info("harvest window: " + format(startDate) + " :: " + format(endDate) + " [" + batchSize + "]");
List<ObservationResponse> obsList;
if (srcObservationDAO != null) {
obsList = srcObservationDAO.getList(src.getCollection(), startDate, endDate, batchSize + 1);
} else {
obsList = srcObservationService.getList(src.getCollection(), startDate, endDate, batchSize + 1);
}
entityList = wrap(obsList);
}
// HarvestState (normal case because query: >= startDate)
if (!entityList.isEmpty() && !skipped) {
ListIterator<SkippedWrapperURI<ObservationResponse>> iter = entityList.listIterator();
Observation curBatchLeader = iter.next().entity.observation;
if (curBatchLeader != null) {
log.debug("currentBatch: " + curBatchLeader.getURI() + " " + format(curBatchLeader.getMaxLastModified()));
log.debug("harvestState: " + format(state.curID) + " " + format(state.curLastModified));
if (curBatchLeader.getID().equals(state.curID) && curBatchLeader.getMaxLastModified().equals(state.curLastModified)) {
iter.remove();
expectedNum--;
}
}
}
ret.found = entityList.size();
log.debug("found: " + entityList.size());
timeQuery = System.currentTimeMillis() - t;
t = System.currentTimeMillis();
ListIterator<SkippedWrapperURI<ObservationResponse>> iter1 = entityList.listIterator();
// int i = 0;
while (iter1.hasNext()) {
SkippedWrapperURI<ObservationResponse> ow = iter1.next();
Observation o = null;
if (ow.entity != null) {
o = ow.entity.observation;
}
HarvestSkipURI hs = ow.skip;
// allow garbage collection during loop
iter1.remove();
String skipMsg = null;
if (!dryrun) {
if (destObservationDAO.getTransactionManager().isOpen()) {
throw new RuntimeException("BUG: found open transaction at start of next observation");
}
log.debug("starting transaction");
destObservationDAO.getTransactionManager().startTransaction();
}
boolean ok = false;
try {
// o could be null in skip mode cleanup
if (o != null) {
String treeSize = computeTreeSize(o);
log.info("put: " + o.getClass().getSimpleName() + " " + o.getURI() + " " + format(o.getMaxLastModified()) + " " + treeSize);
} else if (hs != null) {
log.info("put (retry error): " + hs.getName() + " " + hs.getSkipID() + " " + format(hs.getLastModified()));
} else {
log.info("put (error): Observation " + ow.entity.observationState.getURI() + " " + format(ow.entity.observationState.maxLastModified));
}
if (!dryrun) {
if (skipped) {
startDate = hs.getTryAfter();
}
if (o != null) {
if (state != null) {
state.curLastModified = o.getMaxLastModified();
state.curID = o.getID();
}
// try to avoid DataIntegrityViolationException due
// to missed deletion followed by insert with new
// UUID
ObservationState cur = destObservationDAO.getState(o.getURI());
if (cur != null && !cur.getID().equals(o.getID())) {
// missed harvesting a deletion: trust source
log.info("delete: " + o.getClass().getSimpleName() + " " + cur.getURI() + " " + cur.getID() + " (ObservationURI conflict avoided)");
destObservationDAO.delete(cur.getID());
}
// verify we retrieved the observation intact
if (!nochecksum) {
validateChecksum(o);
}
// extended content verification
CaomValidator.validate(o);
for (Plane p : o.getPlanes()) {
for (Artifact a : p.getArtifacts()) {
CaomWCSValidator.validate(a);
}
}
// optionally augment the observation
if (computePlaneMetadata) {
log.debug("computePlaneMetadata: " + o.getURI());
for (Plane p : o.getPlanes()) {
ComputeUtil.computeTransientState(o, p);
}
}
if (acGenerator != null) {
log.debug("generateReadAccessTuples: " + o.getURI());
acGenerator.generateTuples(o);
}
// everything is OK
destObservationDAO.put(o);
if (!skipped) {
harvestStateDAO.put(state);
}
if (hs == null) {
// normal harvest mode: try to cleanup skip
// records immediately
hs = harvestSkipDAO.get(source, cname, o.getURI().getURI());
}
if (hs != null) {
log.info("delete: " + hs + " " + format(hs.getLastModified()));
harvestSkipDAO.delete(hs);
}
} else if (skipped && ow.entity == null) {
// observation was simply missing from source == missed deletion
ObservationURI uri = new ObservationURI(hs.getSkipID());
log.info("delete: " + uri);
destObservationDAO.delete(uri);
log.info("delete: " + hs + " " + format(hs.getLastModified()));
harvestSkipDAO.delete(hs);
} else if (ow.entity.error != null) {
// try to make progress on failures
if (state != null && ow.entity.observationState.maxLastModified != null) {
state.curLastModified = ow.entity.observationState.maxLastModified;
// unknown
state.curID = null;
}
throw new HarvestReadException(ow.entity.error);
}
log.debug("committing transaction");
destObservationDAO.getTransactionManager().commitTransaction();
log.debug("commit: OK");
}
ok = true;
ret.ingested++;
} catch (Throwable oops) {
log.debug("exception during harvest", oops);
skipMsg = null;
String str = oops.toString();
if (oops instanceof HarvestReadException) {
// unwrap HarvestReadException from above
oops = oops.getCause();
// unwrap intervening RuntimeException(s)
while (oops.getCause() != null && oops instanceof RuntimeException) {
oops = oops.getCause();
}
log.error("HARVEST PROBLEM - failed to read observation: " + ow.entity.observationState.getURI() + " - " + oops.getMessage());
ret.handled++;
} else if (oops instanceof IllegalStateException) {
if (oops.getMessage().contains("XML failed schema validation")) {
log.error("CONTENT PROBLEM - XML failed schema validation: " + oops.getMessage());
ret.handled++;
} else if (oops.getMessage().contains("failed to read")) {
log.error("CONTENT PROBLEM - " + oops.getMessage(), oops.getCause());
ret.handled++;
}
} else if (oops instanceof IllegalArgumentException) {
log.error("CONTENT PROBLEM - invalid observation: " + ow.entity.observationState.getURI() + " - " + oops.getMessage());
if (oops.getCause() != null) {
log.error("cause: " + oops.getCause());
}
ret.handled++;
} else if (oops instanceof MismatchedChecksumException) {
log.error("CONTENT PROBLEM - mismatching checksums: " + ow.entity.observationState.getURI());
ret.handled++;
} else if (str.contains("duplicate key value violates unique constraint \"i_observationuri\"")) {
log.error("CONTENT PROBLEM - duplicate observation: " + ow.entity.observationState.getURI());
ret.handled++;
} else if (oops instanceof TransientException) {
log.error("CONTENT PROBLEM - " + oops.getMessage());
ret.handled++;
} else if (oops instanceof Error) {
log.error("FATAL - probably installation or environment", oops);
ret.abort = true;
} else if (oops instanceof NullPointerException) {
log.error("BUG", oops);
ret.abort = true;
} else if (oops instanceof BadSqlGrammarException) {
log.error("BUG", oops);
BadSqlGrammarException bad = (BadSqlGrammarException) oops;
SQLException sex1 = bad.getSQLException();
if (sex1 != null) {
log.error("CAUSE", sex1);
SQLException sex2 = sex1.getNextException();
log.error("NEXT CAUSE", sex2);
}
ret.abort = true;
} else if (oops instanceof DataAccessResourceFailureException) {
log.error("SEVERE PROBLEM - probably out of space in database", oops);
ret.abort = true;
} else if (str.contains("spherepoly_from_array")) {
log.error("CONTENT PROBLEM - failed to persist: " + ow.entity.observationState.getURI() + " - " + oops.getMessage());
oops = new IllegalArgumentException("invalid polygon (spoly): " + oops.getMessage(), oops);
ret.handled++;
} else {
log.error("unexpected exception", oops);
}
// message for HarvestSkipURI record
skipMsg = oops.getMessage();
} finally {
if (!ok && !dryrun) {
destObservationDAO.getTransactionManager().rollbackTransaction();
log.debug("rollback: OK");
timeTransaction += System.currentTimeMillis() - t;
try {
log.debug("starting HarvestSkipURI transaction");
HarvestSkipURI skip = null;
if (o != null) {
skip = harvestSkipDAO.get(source, cname, o.getURI().getURI());
} else {
skip = harvestSkipDAO.get(source, cname, ow.entity.observationState.getURI().getURI());
}
Date tryAfter = ow.entity.observationState.maxLastModified;
if (o != null) {
tryAfter = o.getMaxLastModified();
}
if (skip == null) {
if (o != null) {
skip = new HarvestSkipURI(source, cname, o.getURI().getURI(), tryAfter, skipMsg);
} else {
skip = new HarvestSkipURI(source, cname, ow.entity.observationState.getURI().getURI(), tryAfter, skipMsg);
}
} else {
skip.errorMessage = skipMsg;
skip.setTryAfter(tryAfter);
}
log.debug("starting HarvestSkipURI transaction");
destObservationDAO.getTransactionManager().startTransaction();
if (!skipped) {
// track the harvest state progress
harvestStateDAO.put(state);
}
// track the fail
log.info("put: " + skip);
harvestSkipDAO.put(skip);
if (!src.getIdentifier().equals(dest.getIdentifier())) {
// delete previous version of observation (if any)
log.info("delete: " + ow.entity.observationState.getURI());
destObservationDAO.delete(ow.entity.observationState.getURI());
}
log.debug("committing HarvestSkipURI transaction");
destObservationDAO.getTransactionManager().commitTransaction();
log.debug("commit HarvestSkipURI: OK");
} catch (Throwable oops) {
log.warn("failed to insert HarvestSkipURI", oops);
destObservationDAO.getTransactionManager().rollbackTransaction();
log.debug("rollback HarvestSkipURI: OK");
ret.abort = true;
}
ret.failed++;
}
}
if (ret.abort) {
return ret;
}
}
if (ret.found < expectedNum) {
ret.done = true;
}
} catch (InterruptedException | ExecutionException e) {
log.error("SEVERE PROBLEM - ThreadPool harvesting Observations failed: " + e.getMessage());
ret.abort = true;
} finally {
timeTransaction = System.currentTimeMillis() - t;
log.debug("time to get HarvestState: " + timeState + "ms");
log.debug("time to run ObservationListQuery: " + timeQuery + "ms");
log.debug("time to run transactions: " + timeTransaction + "ms");
}
return ret;
}
Aggregations