use of com.runwaysdk.dataaccess.ProgrammingErrorException in project geoprism-registry by terraframe.
the class DHIS2ServiceFactory method instanceGetDhis2Service.
public synchronized DHIS2TransportServiceIF instanceGetDhis2Service(DHIS2ExternalSystem system) throws UnexpectedResponseException, InvalidLoginException, HTTPException, BadServerUriException {
if (this.dhis2 == null) {
HTTPConnector connector = new HTTPConnector();
connector.setServerUrl(system.getUrl());
connector.setCredentials(system.getUsername(), system.getPassword());
DHIS2TransportService dhis2 = new DHIS2TransportService(connector);
try {
dhis2.initialize();
if (dhis2.getVersionRemoteServerApi() > DHIS2FeatureService.LAST_TESTED_DHIS2_API_VERSION) {
Integer compatLayerVersion = dhis2.getVersionRemoteServerApi() - 2;
if (compatLayerVersion < DHIS2FeatureService.LAST_TESTED_DHIS2_API_VERSION) {
compatLayerVersion = DHIS2FeatureService.LAST_TESTED_DHIS2_API_VERSION;
}
dhis2.setVersionApiCompat(compatLayerVersion);
}
} catch (IncompatibleServerVersionException e) {
throw new ProgrammingErrorException(e);
}
return dhis2;
}
return this.dhis2;
}
use of com.runwaysdk.dataaccess.ProgrammingErrorException in project geoprism-registry by terraframe.
the class DHIS2SynchronizationManager method synchronize.
public void synchronize() {
this.init();
final ExternalSystem es = dhis2Config.getSystem();
long rowIndex = 0;
long total = 0;
long exportCount = 0;
SortedSet<DHIS2SyncLevel> levels = dhis2Config.getLevels();
Boolean includeTranslations = LocalizationFacade.getInstalledLocales().size() > 0;
// First calculate the total number of records
HashMap<Integer, Long> countAtLevel = new HashMap<Integer, Long>();
int expectedLevel = 0;
for (DHIS2SyncLevel level : levels) {
if (level.getLevel() != expectedLevel) {
throw new ProgrammingErrorException("Unexpected level number [" + level.getLevel() + "].");
}
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long count = this.getCount(level.getGeoObjectType());
total += count;
countAtLevel.put(level.getLevel(), count);
}
expectedLevel++;
}
history.appLock();
history.setWorkTotal(total);
history.apply();
// Now do the work
for (DHIS2SyncLevel level : levels) {
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long skip = 0;
long pageSize = 1000;
long count = countAtLevel.get(level.getLevel());
while (skip < count) {
List<VertexServerGeoObject> objects = this.query(level.getGeoObjectType(), skip, pageSize);
for (VertexServerGeoObject go : objects) {
try {
this.exportGeoObject(dhis2Config, level, levels, rowIndex, go, includeTranslations);
exportCount++;
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.apply();
if (level.getOrgUnitGroupId() != null && level.getOrgUnitGroupId().length() > 0) {
final String externalId = go.getExternalId(es);
level.getOrCreateOrgUnitGroupIdSet(level.getOrgUnitGroupId()).add(externalId);
}
} catch (DHIS2SyncError ee) {
recordExportError(ee, history);
}
rowIndex++;
}
;
// Export OrgUnitGroup changes
if (level.getOrgUnitGroupIdSet().size() > 0) {
try {
Map<String, Set<String>> orgUnitGroupIdSet = level.getOrgUnitGroupIdSet();
// Fetch and populate all the org unit groups with the ids of org units that we will be exporting
MetadataGetResponse<OrganisationUnitGroup> resp = dhis2.metadataGet(OrganisationUnitGroup.class);
this.service.validateDhis2Response(resp);
List<OrganisationUnitGroup> orgUnitGroups = resp.getObjects();
if (orgUnitGroups != null) {
Iterator<? extends OrganisationUnitGroup> it = orgUnitGroups.iterator();
while (it.hasNext()) {
OrganisationUnitGroup group = it.next();
if (orgUnitGroupIdSet.containsKey(group.getId())) {
orgUnitGroupIdSet.get(group.getId()).addAll(group.getOrgUnitIds());
group.setOrgUnitIds(orgUnitGroupIdSet.get(group.getId()));
orgUnitGroupIdSet.remove(group.getId());
} else {
it.remove();
}
}
if (orgUnitGroups.size() > 0) {
JsonObject payload = new JsonObject();
JsonArray jaOrgUnitGroups = new JsonArray();
for (OrganisationUnitGroup group : orgUnitGroups) {
GsonBuilder builder = new GsonBuilder();
JsonObject joOrgUnitGroup = builder.create().toJsonTree(group, group.getClass()).getAsJsonObject();
joOrgUnitGroup.remove("created");
joOrgUnitGroup.remove("lastUpdated");
joOrgUnitGroup.remove("symbol");
joOrgUnitGroup.remove("publicAccess");
joOrgUnitGroup.remove("user");
joOrgUnitGroup.remove("userGroupAccesses");
joOrgUnitGroup.remove("attributeValues");
joOrgUnitGroup.remove("translations");
joOrgUnitGroup.remove("userAccesses");
jaOrgUnitGroups.add(joOrgUnitGroup);
}
payload.add(DHIS2Objects.ORGANISATION_UNIT_GROUPS, jaOrgUnitGroups);
List<NameValuePair> params = new ArrayList<NameValuePair>();
MetadataImportResponse resp2 = dhis2.metadataPost(params, new StringEntity(payload.toString(), Charset.forName("UTF-8")));
this.service.validateDhis2Response(resp2);
}
}
} catch (InvalidLoginException e) {
LoginException cgrlogin = new LoginException(e);
throw cgrlogin;
} catch (HTTPException | BadServerUriException e) {
HttpError cgrhttp = new HttpError(e);
throw cgrhttp;
}
}
skip += pageSize;
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
}
}
}
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.clearStage();
history.addStage(ExportStage.COMPLETE);
history.apply();
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
handleExportErrors();
}
use of com.runwaysdk.dataaccess.ProgrammingErrorException in project geoprism-registry by terraframe.
the class FhirBulkDataImporter method getExportResults.
private List<String> getExportResults(CloseableHttpClient myClient, final String statusUrl) {
final List<String> outputs = new LinkedList<>();
try {
boolean complete = false;
while (!complete) {
try (CloseableHttpResponse response = myClient.execute(new HttpGet(statusUrl))) {
if (response.getStatusLine().getStatusCode() == 202) {
int retry = Integer.parseInt(response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
// Wait for response to be ready
Thread.sleep((retry * 1000));
} else if (response.getStatusLine().getStatusCode() == 200) {
try {
StringWriter writer = new StringWriter();
IOUtils.copy(response.getEntity().getContent(), writer, "utf-8");
String content = writer.toString();
JsonObject object = JsonParser.parseString(content).getAsJsonObject();
if (object.has("output")) {
JsonArray output = object.get("output").getAsJsonArray();
for (int i = 0; i < output.size(); i++) {
JsonObject item = output.get(i).getAsJsonObject();
String url = item.get("url").getAsString();
outputs.add(url);
}
}
} finally {
complete = true;
}
} else {
// ERROR
StringWriter writer = new StringWriter();
IOUtils.copy(response.getEntity().getContent(), writer, "utf-8");
String content = writer.toString();
throw new ProgrammingErrorException(content);
}
}
}
} catch (IOException | InterruptedException e) {
throw new ProgrammingErrorException(e);
}
return outputs;
}
use of com.runwaysdk.dataaccess.ProgrammingErrorException in project geoprism-registry by terraframe.
the class FhirBulkDataImporter method initiateBulkExport.
private String initiateBulkExport(CloseableHttpClient myClient, FhirContext ctx) {
try {
Parameters params = new Parameters();
params.addParameter().setName("_type").setValue(new StringType("Organization,Location"));
HttpPost post = new HttpPost(system.getUrl() + "/" + "$export");
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(ctx, params));
try (CloseableHttpResponse response = myClient.execute(post)) {
if (response.getStatusLine().getStatusCode() == 202) {
return response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue();
} else {
System.out.println(response.getStatusLine().getStatusCode());
StringWriter writer = new StringWriter();
IOUtils.copy(response.getEntity().getContent(), writer, "utf-8");
String message = writer.toString();
throw new ProgrammingErrorException(message);
}
}
} catch (IOException e) {
throw new ProgrammingErrorException(e);
}
}
use of com.runwaysdk.dataaccess.ProgrammingErrorException in project geoprism-registry by terraframe.
the class FhirExportSynchronizationManager method generateBundle.
public Bundle generateBundle(FhirConnection connection) {
SortedSet<FhirSyncLevel> levels = this.config.getLevels();
int expectedLevel = 0;
Bundle bundle = new Bundle();
for (FhirSyncLevel level : levels) {
if (level.getLevel() != expectedLevel) {
throw new ProgrammingErrorException("Unexpected level number [" + level.getLevel() + "].");
}
ListTypeVersion version = ListTypeVersion.get(level.getVersionId());
FhirDataPopulator populator = FhirFactory.getPopulator(level.getImplementation());
ListTypeFhirExporter exporter = new ListTypeFhirExporter(version, connection, populator, false);
exporter.populateBundle(bundle);
expectedLevel++;
}
return bundle;
}
Aggregations