use of com.runwaysdk.system.scheduler.JobHistory in project geoprism-registry by terraframe.
the class FhirExportJob method toJson.
public JsonObject toJson() {
SimpleDateFormat format = new SimpleDateFormat(GeoObjectImportConfiguration.DATE_FORMAT);
format.setTimeZone(GeoRegistryUtil.SYSTEM_TIMEZONE);
final MasterListVersion version = this.getVersion();
final MasterList masterlist = version.getMasterlist();
final ServerGeoObjectType type = masterlist.getGeoObjectType();
final JobHistory history = this.getAllJobHistory().getAll().get(0);
final GeoprismUser user = GeoprismUser.get(this.getRunAsUser().getOid());
try {
final JsonObject object = new JsonObject();
object.addProperty(FhirExportJob.OID, this.getOid());
object.add(FhirExportJob.VERSION, this.getVersion().toJSON(false));
object.addProperty(FhirExportJob.IMPLEMENTATION, this.getImplementation());
object.addProperty(FhirExportJob.TYPE, type.getLabel().getValue());
object.addProperty(JobHistory.STATUS, history.getStatus().get(0).getDisplayLabel());
object.addProperty("date", format.format(version.getPublishDate()));
object.addProperty("author", user.getUsername());
object.addProperty("createDate", format.format(history.getCreateDate()));
object.addProperty("lastUpdateDate", format.format(history.getLastUpdateDate()));
object.addProperty("workProgress", history.getWorkProgress());
object.addProperty("workTotal", history.getWorkTotal());
object.addProperty("historyoryId", history.getOid());
if (history.getErrorJson() != null && history.getErrorJson().length() > 0) {
object.addProperty("message", history.getLocalizedError(Session.getCurrentLocale()));
}
return object;
} catch (JSONException e) {
throw new ProgrammingErrorException(e);
}
}
use of com.runwaysdk.system.scheduler.JobHistory in project geoprism-registry by terraframe.
the class SchedulerTestUtils method clearImportData.
@Request
public static void clearImportData() {
List<JobHistoryRecord> stoppedJobs = SchedulerManager.interruptAllRunningJobs();
if (stoppedJobs.size() > 0) {
try {
// Wait a few seconds for the job to stop
Thread.sleep(2000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
ValidationProblemQuery vpq = new ValidationProblemQuery(new QueryFactory());
OIterator<? extends ValidationProblem> vpit = vpq.getIterator();
while (vpit.hasNext()) {
ValidationProblem.lock(vpit.next().getOid()).delete();
}
ImportErrorQuery ieq = new ImportErrorQuery(new QueryFactory());
OIterator<? extends ImportError> ieit = ieq.getIterator();
while (ieit.hasNext()) {
ImportError.lock(ieit.next().getOid()).delete();
}
JobHistoryRecordQuery query = new JobHistoryRecordQuery(new QueryFactory());
OIterator<? extends JobHistoryRecord> jhrs = query.getIterator();
while (jhrs.hasNext()) {
JobHistoryRecord jhr = JobHistoryRecord.lock(jhrs.next().getOid());
jhr.appLock();
JobHistory hist = jhr.getChild();
if (hist instanceof ImportHistory) {
hist = ImportHistory.lock(hist.getOid());
hist.appLock();
ExecutableJob job = jhr.getParent();
job.appLock();
// If any tests are currently running, they will be errored out as a result of this.
if (hist.getStatus().get(0).equals(AllJobStatus.RUNNING) || hist.getStatus().get(0).equals(AllJobStatus.NEW) || hist.getStatus().get(0).equals(AllJobStatus.QUEUED)) {
logger.error("History with oid [" + hist.getOid() + "] currently has status [" + hist.getStatus().get(0).getEnumName() + "] which is concerning because it is about to be deleted. This will probably cause errors in the running job.");
}
// hist = ImportHistory.lock(hist.getOid());
// hist.appLock();
// hist = ImportHistory.lock(hist.getOid());
// VaultFile vf = ( (ImportHistory) hist ).getImportFile();
// hist.setValue(ImportHistory.IMPORTFILE, null);
// JobHistoryHistoryComment comment = hist.getHistoryComment();
// hist.setValue(JobHistory.HISTORYCOMMENT, null);
// JobHistoryHistoryInformation information = hist.getHistoryInformation();
// hist.setValue(JobHistory.HISTORYINFORMATION, null);
// hist.apply();
// vf.delete();
// comment.delete();
// information.delete();
// hist = ImportHistory.lock(hist.getOid());
// hist.appLock();
// hist = ImportHistory.lock(hist.getOid());
// hist.delete();
// This will also delete the history.
JobHistoryRecord.lock(jhr.getOid()).delete();
ExecutableJob.lock(job.getOid()).delete();
}
}
SynonymQuery sq = new SynonymQuery(new QueryFactory());
sq.WHERE(sq.getDisplayLabel().localize().EQ("00"));
OIterator<? extends Synonym> it = sq.getIterator();
while (it.hasNext()) {
Synonym.lock(it.next().getOid()).delete();
}
}
use of com.runwaysdk.system.scheduler.JobHistory in project geoprism-registry by terraframe.
the class ListTypeTest method waitUntilPublished.
@Request
private void waitUntilPublished(String oid) {
List<? extends JobHistory> histories = null;
int waitTime = 0;
while (histories == null) {
if (waitTime > 10000) {
Assert.fail("Job was never scheduled. Unable to find any associated history.");
}
QueryFactory qf = new QueryFactory();
PublishListTypeVersionJobQuery jobQuery = new PublishListTypeVersionJobQuery(qf);
jobQuery.WHERE(jobQuery.getListType().EQ(oid));
JobHistoryQuery jhq = new JobHistoryQuery(qf);
jhq.WHERE(jhq.job(jobQuery));
List<? extends JobHistory> potentialHistories = jhq.getIterator().getAll();
if (potentialHistories.size() > 0) {
histories = potentialHistories;
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
Assert.fail("Interrupted while waiting");
}
waitTime += 1000;
}
}
for (JobHistory history : histories) {
try {
SchedulerTestUtils.waitUntilStatus(history.getOid(), AllJobStatus.SUCCESS);
} catch (InterruptedException e) {
e.printStackTrace();
Assert.fail("Interrupted while waiting");
}
}
}
use of com.runwaysdk.system.scheduler.JobHistory in project geoprism-registry by terraframe.
the class PublishMasterListJob method toJson.
public JsonObject toJson() {
SimpleDateFormat format = new SimpleDateFormat(GeoObjectImportConfiguration.DATE_FORMAT);
format.setTimeZone(GeoRegistryUtil.SYSTEM_TIMEZONE);
final MasterList masterlist = this.getMasterList();
final ServerGeoObjectType type = masterlist.getGeoObjectType();
List<? extends JobHistory> allHist = this.getAllJobHistory().getAll();
final GeoprismUser user = GeoprismUser.get(this.getRunAsUser().getOid());
try {
final JsonObject object = new JsonObject();
object.addProperty(PublishMasterListJob.OID, this.getOid());
object.addProperty(PublishMasterListJob.MASTERLIST, this.getMasterListOid());
object.addProperty(PublishMasterListJob.TYPE, type.getLabel().getValue());
if (allHist.size() > 0) {
final JobHistory history = allHist.get(0);
object.addProperty(JobHistory.STATUS, history.getStatus().get(0).getDisplayLabel());
object.addProperty("author", user.getUsername());
object.addProperty("createDate", format.format(history.getCreateDate()));
object.addProperty("lastUpdateDate", format.format(history.getLastUpdateDate()));
object.addProperty("workProgress", history.getWorkProgress());
object.addProperty("workTotal", history.getWorkTotal());
object.addProperty("historyoryId", history.getOid());
if (history.getStatus().get(0).equals(AllJobStatus.FAILURE) && history.getErrorJson().length() > 0) {
String errorJson = history.getErrorJson();
JsonObject error = JsonParser.parseString(errorJson).getAsJsonObject();
JsonObject exception = new JsonObject();
exception.addProperty("type", error.get("type").getAsString());
exception.addProperty("message", history.getLocalizedError(Session.getCurrentLocale()));
object.add("exception", exception);
}
}
return object;
} catch (JSONException e) {
throw new ProgrammingErrorException(e);
}
}
use of com.runwaysdk.system.scheduler.JobHistory in project geoprism-registry by terraframe.
the class PublishShapefileJob method toJson.
public JsonObject toJson() {
SimpleDateFormat format = new SimpleDateFormat(GeoObjectImportConfiguration.DATE_FORMAT);
format.setTimeZone(GeoRegistryUtil.SYSTEM_TIMEZONE);
final MasterListVersion version = this.getVersion();
final MasterList masterlist = version.getMasterlist();
final ServerGeoObjectType type = masterlist.getGeoObjectType();
final JobHistory history = this.getAllJobHistory().getAll().get(0);
final GeoprismUser user = GeoprismUser.get(this.getRunAsUser().getOid());
try {
final JsonObject object = new JsonObject();
object.addProperty(PublishShapefileJob.OID, this.getOid());
object.add(PublishShapefileJob.VERSION, this.getVersion().toJSON(false));
object.addProperty(PublishShapefileJob.TYPE, type.getLabel().getValue());
object.addProperty(JobHistory.STATUS, history.getStatus().get(0).getDisplayLabel());
object.addProperty("date", format.format(version.getPublishDate()));
object.addProperty("author", user.getUsername());
object.addProperty("createDate", format.format(history.getCreateDate()));
object.addProperty("lastUpdateDate", format.format(history.getLastUpdateDate()));
object.addProperty("workProgress", history.getWorkProgress());
object.addProperty("workTotal", history.getWorkTotal());
object.addProperty("historyoryId", history.getOid());
return object;
} catch (JSONException e) {
throw new ProgrammingErrorException(e);
}
}
Aggregations