use of net.geoprism.dhis2.dhis2adapter.response.MetadataImportResponse in project geoprism-registry by terraframe.
the class OrgUnitRelationshipTest method testMetadataPost.
@Test
public void testMetadataPost() throws Exception {
String file = IOUtils.toString(DHIS2BridgeTest.class.getResourceAsStream("/default/metadataPost-OrgUnitChangeParent.json"), "UTF-8");
DHIS2Bridge facade = TestBridgeBuilder.buildDefault(file, 200);
facade.initialize();
// This payload changes the parent of OU_559 (Ngelehun CHC) from Badjia (OU_539) to Baoma (OU_540)
final String payload = "{\n" + " \"organisationUnits\": [\n" + " {\n" + " \"id\": \"DiszpKrYNg8\",\n" + " \"name\": \"Ngelehun CHC\",\n" + " \"shortName\": \"Ngelehun CHC\",\n" + " \"openingDate\": \"1970-01-01T00:00:00.000\",\n" + " \"parent\": {\n" + " \"id\": \"vWbkYPRmKyS\"\n" + " },\n" + " \"path\": \"/ImspTQPwCqd/vWbkYPRmKyS/MXXDvFpfZmP\",\n" + " \"level\": 3\n" + " }\n" + " ]\n" + "}";
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("importMode", "VALIDATE"));
MetadataImportResponse resp = facade.metadataPost(params, new StringEntity(payload, Charset.forName("UTF-8")));
// System.out.println(resp.getResponse());
}
use of net.geoprism.dhis2.dhis2adapter.response.MetadataImportResponse in project geoprism-registry by terraframe.
the class TranslationsTest method testMetadataPost.
@Test
public void testMetadataPost() throws Exception {
String file = "{\"importParams\":{\"userOverrideMode\":\"NONE\",\"importMode\":\"VALIDATE\",\"identifier\":\"UID\",\"preheatMode\":\"REFERENCE\",\"importStrategy\":\"CREATE_AND_UPDATE\",\"atomicMode\":\"ALL\",\"mergeMode\":\"REPLACE\",\"flushMode\":\"AUTO\",\"skipSharing\":false,\"skipTranslation\":false,\"skipValidation\":false,\"metadataSyncImport\":false,\"username\":\"admin\"},\"status\":\"OK\",\"typeReports\":[{\"klass\":\"org.hisp.dhis.organisationunit.OrganisationUnit\",\"stats\":{\"created\":0,\"updated\":1,\"deleted\":0,\"ignored\":0,\"total\":1}}],\"stats\":{\"created\":0,\"updated\":1,\"deleted\":0,\"ignored\":0,\"total\":1}}";
DHIS2Bridge facade = TestBridgeBuilder.buildDefault(file, 200);
facade.initialize();
// Change some localization of Sierra Leone using the meatadata api.
final String payload = "{\n" + " \"organisationUnits\": [\n" + " {\n" + " \"id\": \"ImspTQPwCqd\",\n" + " \"name\": \"Sierra Leone\",\n" + " \"shortName\": \"Sierra Leone\",\n" + " \"openingDate\": \"1970-01-01T00:00:00.000\",\n" + " \"translations\": [\n" + " {\n" + " \"property\": \"SHORT_NAME\",\n" + " \"locale\": \"km\",\n" + " \"value\": \"Localization Test km\"\n" + " },\n" + " {\n" + " \"property\": \"NAME\",\n" + " \"locale\": \"km\",\n" + " \"value\": \"Localization Test km\"\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}";
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("importMode", "VALIDATE"));
MetadataImportResponse resp = facade.metadataPost(params, new StringEntity(payload, Charset.forName("UTF-8")));
}
use of net.geoprism.dhis2.dhis2adapter.response.MetadataImportResponse in project geoprism-registry by terraframe.
the class DHIS2SynchronizationManager method synchronize.
public void synchronize() {
this.init();
final ExternalSystem es = dhis2Config.getSystem();
long rowIndex = 0;
long total = 0;
long exportCount = 0;
SortedSet<DHIS2SyncLevel> levels = dhis2Config.getLevels();
Boolean includeTranslations = LocalizationFacade.getInstalledLocales().size() > 0;
// First calculate the total number of records
HashMap<Integer, Long> countAtLevel = new HashMap<Integer, Long>();
int expectedLevel = 0;
for (DHIS2SyncLevel level : levels) {
if (level.getLevel() != expectedLevel) {
throw new ProgrammingErrorException("Unexpected level number [" + level.getLevel() + "].");
}
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long count = this.getCount(level.getGeoObjectType());
total += count;
countAtLevel.put(level.getLevel(), count);
}
expectedLevel++;
}
history.appLock();
history.setWorkTotal(total);
history.apply();
// Now do the work
for (DHIS2SyncLevel level : levels) {
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long skip = 0;
long pageSize = 1000;
long count = countAtLevel.get(level.getLevel());
while (skip < count) {
List<VertexServerGeoObject> objects = this.query(level.getGeoObjectType(), skip, pageSize);
for (VertexServerGeoObject go : objects) {
try {
this.exportGeoObject(dhis2Config, level, levels, rowIndex, go, includeTranslations);
exportCount++;
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.apply();
if (level.getOrgUnitGroupId() != null && level.getOrgUnitGroupId().length() > 0) {
final String externalId = go.getExternalId(es);
level.getOrCreateOrgUnitGroupIdSet(level.getOrgUnitGroupId()).add(externalId);
}
} catch (DHIS2SyncError ee) {
recordExportError(ee, history);
}
rowIndex++;
}
;
// Export OrgUnitGroup changes
if (level.getOrgUnitGroupIdSet().size() > 0) {
try {
Map<String, Set<String>> orgUnitGroupIdSet = level.getOrgUnitGroupIdSet();
// Fetch and populate all the org unit groups with the ids of org units that we will be exporting
MetadataGetResponse<OrganisationUnitGroup> resp = dhis2.metadataGet(OrganisationUnitGroup.class);
this.service.validateDhis2Response(resp);
List<OrganisationUnitGroup> orgUnitGroups = resp.getObjects();
if (orgUnitGroups != null) {
Iterator<? extends OrganisationUnitGroup> it = orgUnitGroups.iterator();
while (it.hasNext()) {
OrganisationUnitGroup group = it.next();
if (orgUnitGroupIdSet.containsKey(group.getId())) {
orgUnitGroupIdSet.get(group.getId()).addAll(group.getOrgUnitIds());
group.setOrgUnitIds(orgUnitGroupIdSet.get(group.getId()));
orgUnitGroupIdSet.remove(group.getId());
} else {
it.remove();
}
}
if (orgUnitGroups.size() > 0) {
JsonObject payload = new JsonObject();
JsonArray jaOrgUnitGroups = new JsonArray();
for (OrganisationUnitGroup group : orgUnitGroups) {
GsonBuilder builder = new GsonBuilder();
JsonObject joOrgUnitGroup = builder.create().toJsonTree(group, group.getClass()).getAsJsonObject();
joOrgUnitGroup.remove("created");
joOrgUnitGroup.remove("lastUpdated");
joOrgUnitGroup.remove("symbol");
joOrgUnitGroup.remove("publicAccess");
joOrgUnitGroup.remove("user");
joOrgUnitGroup.remove("userGroupAccesses");
joOrgUnitGroup.remove("attributeValues");
joOrgUnitGroup.remove("translations");
joOrgUnitGroup.remove("userAccesses");
jaOrgUnitGroups.add(joOrgUnitGroup);
}
payload.add(DHIS2Objects.ORGANISATION_UNIT_GROUPS, jaOrgUnitGroups);
List<NameValuePair> params = new ArrayList<NameValuePair>();
MetadataImportResponse resp2 = dhis2.metadataPost(params, new StringEntity(payload.toString(), Charset.forName("UTF-8")));
this.service.validateDhis2Response(resp2);
}
}
} catch (InvalidLoginException e) {
LoginException cgrlogin = new LoginException(e);
throw cgrlogin;
} catch (HTTPException | BadServerUriException e) {
HttpError cgrhttp = new HttpError(e);
throw cgrhttp;
}
}
skip += pageSize;
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
}
}
}
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.clearStage();
history.addStage(ExportStage.COMPLETE);
history.apply();
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
handleExportErrors();
}
Aggregations