use of net.geoprism.registry.model.graph.VertexServerGeoObject in project geoprism-registry by terraframe.
the class SearchTablePatch method createRecords.
@Transaction
public void createRecords(SearchService service) {
MdVertexDAOIF mdVertex = MdVertexDAO.getMdVertexDAO(GeoVertex.CLASS);
long pageSize = 1000;
long skip = 0;
int count = 0;
do {
StringBuilder builder = new StringBuilder();
builder.append("SELECT FROM " + mdVertex.getDBClassName());
builder.append(" ORDER BY oid");
builder.append(" SKIP " + skip + " LIMIT " + pageSize);
GraphQuery<VertexObject> query = new GraphQuery<VertexObject>(builder.toString());
List<VertexObject> results = query.getResults();
for (VertexObject result : results) {
ServerGeoObjectType type = ServerGeoObjectType.get((MdVertexDAOIF) result.getMdClass());
service.insert(new VertexServerGeoObject(type, result));
}
skip += pageSize;
count = results.size();
} while (count > 0);
}
use of net.geoprism.registry.model.graph.VertexServerGeoObject in project geoprism-registry by terraframe.
the class VertexGeoObjectStrategy method constructFromGeoObject.
@Override
public VertexServerGeoObject constructFromGeoObject(GeoObject geoObject, boolean isNew) {
if (!isNew) {
VertexObject vertex = VertexServerGeoObject.getVertex(type, geoObject.getUid());
if (vertex == null) {
InvalidRegistryIdException ex = new InvalidRegistryIdException();
ex.setRegistryId(geoObject.getUid());
throw ex;
}
return new VertexServerGeoObject(type, vertex);
} else {
if (!RegistryIdService.getInstance().isIssuedId(geoObject.getUid())) {
InvalidRegistryIdException ex = new InvalidRegistryIdException();
ex.setRegistryId(geoObject.getUid());
throw ex;
}
VertexObject vertex = VertexServerGeoObject.newInstance(type);
return new VertexServerGeoObject(type, vertex);
}
}
use of net.geoprism.registry.model.graph.VertexServerGeoObject in project geoprism-registry by terraframe.
the class DHIS2SynchronizationManager method synchronize.
public void synchronize() {
this.init();
final ExternalSystem es = dhis2Config.getSystem();
long rowIndex = 0;
long total = 0;
long exportCount = 0;
SortedSet<DHIS2SyncLevel> levels = dhis2Config.getLevels();
Boolean includeTranslations = LocalizationFacade.getInstalledLocales().size() > 0;
// First calculate the total number of records
HashMap<Integer, Long> countAtLevel = new HashMap<Integer, Long>();
int expectedLevel = 0;
for (DHIS2SyncLevel level : levels) {
if (level.getLevel() != expectedLevel) {
throw new ProgrammingErrorException("Unexpected level number [" + level.getLevel() + "].");
}
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long count = this.getCount(level.getGeoObjectType());
total += count;
countAtLevel.put(level.getLevel(), count);
}
expectedLevel++;
}
history.appLock();
history.setWorkTotal(total);
history.apply();
// Now do the work
for (DHIS2SyncLevel level : levels) {
if (level.getSyncType() != null && !DHIS2SyncLevel.Type.NONE.equals(level.getSyncType())) {
long skip = 0;
long pageSize = 1000;
long count = countAtLevel.get(level.getLevel());
while (skip < count) {
List<VertexServerGeoObject> objects = this.query(level.getGeoObjectType(), skip, pageSize);
for (VertexServerGeoObject go : objects) {
try {
this.exportGeoObject(dhis2Config, level, levels, rowIndex, go, includeTranslations);
exportCount++;
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.apply();
if (level.getOrgUnitGroupId() != null && level.getOrgUnitGroupId().length() > 0) {
final String externalId = go.getExternalId(es);
level.getOrCreateOrgUnitGroupIdSet(level.getOrgUnitGroupId()).add(externalId);
}
} catch (DHIS2SyncError ee) {
recordExportError(ee, history);
}
rowIndex++;
}
;
// Export OrgUnitGroup changes
if (level.getOrgUnitGroupIdSet().size() > 0) {
try {
Map<String, Set<String>> orgUnitGroupIdSet = level.getOrgUnitGroupIdSet();
// Fetch and populate all the org unit groups with the ids of org units that we will be exporting
MetadataGetResponse<OrganisationUnitGroup> resp = dhis2.metadataGet(OrganisationUnitGroup.class);
this.service.validateDhis2Response(resp);
List<OrganisationUnitGroup> orgUnitGroups = resp.getObjects();
if (orgUnitGroups != null) {
Iterator<? extends OrganisationUnitGroup> it = orgUnitGroups.iterator();
while (it.hasNext()) {
OrganisationUnitGroup group = it.next();
if (orgUnitGroupIdSet.containsKey(group.getId())) {
orgUnitGroupIdSet.get(group.getId()).addAll(group.getOrgUnitIds());
group.setOrgUnitIds(orgUnitGroupIdSet.get(group.getId()));
orgUnitGroupIdSet.remove(group.getId());
} else {
it.remove();
}
}
if (orgUnitGroups.size() > 0) {
JsonObject payload = new JsonObject();
JsonArray jaOrgUnitGroups = new JsonArray();
for (OrganisationUnitGroup group : orgUnitGroups) {
GsonBuilder builder = new GsonBuilder();
JsonObject joOrgUnitGroup = builder.create().toJsonTree(group, group.getClass()).getAsJsonObject();
joOrgUnitGroup.remove("created");
joOrgUnitGroup.remove("lastUpdated");
joOrgUnitGroup.remove("symbol");
joOrgUnitGroup.remove("publicAccess");
joOrgUnitGroup.remove("user");
joOrgUnitGroup.remove("userGroupAccesses");
joOrgUnitGroup.remove("attributeValues");
joOrgUnitGroup.remove("translations");
joOrgUnitGroup.remove("userAccesses");
jaOrgUnitGroups.add(joOrgUnitGroup);
}
payload.add(DHIS2Objects.ORGANISATION_UNIT_GROUPS, jaOrgUnitGroups);
List<NameValuePair> params = new ArrayList<NameValuePair>();
MetadataImportResponse resp2 = dhis2.metadataPost(params, new StringEntity(payload.toString(), Charset.forName("UTF-8")));
this.service.validateDhis2Response(resp2);
}
}
} catch (InvalidLoginException e) {
LoginException cgrlogin = new LoginException(e);
throw cgrlogin;
} catch (HTTPException | BadServerUriException e) {
HttpError cgrhttp = new HttpError(e);
throw cgrhttp;
}
}
skip += pageSize;
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
}
}
}
history.appLock();
history.setWorkProgress(rowIndex);
history.setExportedRecords(exportCount);
history.clearStage();
history.addStage(ExportStage.COMPLETE);
history.apply();
NotificationFacade.queue(new GlobalNotificationMessage(MessageType.DATA_EXPORT_JOB_CHANGE, null));
handleExportErrors();
}
use of net.geoprism.registry.model.graph.VertexServerGeoObject in project geoprism-registry by terraframe.
the class DHIS2TermAttributeMapping method writeCustomAttributes.
@Override
public void writeCustomAttributes(JsonArray attributeValues, VertexServerGeoObject serverGo, DHIS2SyncConfig dhis2Config, DHIS2SyncLevel syncLevel, String lastUpdateDate, String createDate) {
ServerGeoObjectType got = syncLevel.getGeoObjectType();
AttributeType attr = got.getAttribute(this.getCgrAttrName()).get();
Object value = serverGo.getValue(attr.getName());
if (value == null || (value instanceof String && ((String) value).length() == 0)) {
return;
}
if (this.isOrgUnitGroup()) {
if (attr instanceof AttributeTermType) {
String termId = this.getTermId(value);
String orgUnitGroupId = this.getTermMapping(termId);
if (orgUnitGroupId == null) {
MissingDHIS2TermOrgUnitGroupMapping ex = new MissingDHIS2TermOrgUnitGroupMapping();
ex.setTermCode(termId);
throw ex;
}
Set<String> orgUnitGroupIdSet = syncLevel.getOrgUnitGroupIdSet(orgUnitGroupId);
if (orgUnitGroupIdSet == null) {
orgUnitGroupIdSet = syncLevel.newOrgUnitGroupIdSet(orgUnitGroupId);
}
orgUnitGroupIdSet.add(serverGo.getExternalId(dhis2Config.getSystem()));
} else {
logger.error("Unsupported attribute type [" + attr.getClass().getName() + "] with name [" + attr.getName() + "] when matched to OrgUnitGroup.");
return;
}
} else {
super.writeCustomAttributes(attributeValues, serverGo, dhis2Config, syncLevel, lastUpdateDate, createDate);
}
}
use of net.geoprism.registry.model.graph.VertexServerGeoObject in project geoprism-registry by terraframe.
the class GeoObjectImporter method importRowInTrans.
@Transaction
public void importRowInTrans(FeatureRow row, RowData data) {
// Refresh the session because it might expire on long imports
final long curWorkProgress = this.progressListener.getWorkProgress();
if ((this.lastImportSessionRefresh + GeoObjectImporter.refreshSessionRecordCount) < curWorkProgress) {
SessionFacade.renewSession(Session.getCurrentSession().getOid());
this.lastImportSessionRefresh = curWorkProgress;
}
GeoObjectOverTime go = null;
String goJson = null;
ServerGeoObjectIF serverGo = null;
ServerGeoObjectIF parent = null;
boolean isNew = false;
GeoObjectParentErrorBuilder parentBuilder = new GeoObjectParentErrorBuilder();
try {
String code = this.getCode(row);
if (code == null || code.length() <= 0) {
RequiredMappingException ex = new RequiredMappingException();
ex.setAttributeLabel(GeoObjectTypeMetadata.getAttributeDisplayLabel(DefaultAttribute.CODE.getName()));
throw ex;
}
if (this.configuration.getImportStrategy().equals(ImportStrategy.UPDATE_ONLY) || this.configuration.getImportStrategy().equals(ImportStrategy.NEW_AND_UPDATE)) {
serverGo = service.getGeoObjectByCode(code, this.configuration.getType(), false);
}
if (serverGo == null) {
if (this.configuration.getImportStrategy().equals(ImportStrategy.UPDATE_ONLY)) {
net.geoprism.registry.DataNotFoundException ex = new net.geoprism.registry.DataNotFoundException();
ex.setTypeLabel(GeoObjectMetadata.get().getClassDisplayLabel());
ex.setDataIdentifier(code);
ex.setAttributeLabel(GeoObjectMetadata.get().getAttributeDisplayLabel(DefaultAttribute.CODE.getName()));
throw ex;
}
isNew = true;
serverGo = service.newInstance(this.configuration.getType());
serverGo.setCode(code);
serverGo.setInvalid(false);
} else {
serverGo.lock();
}
try {
LocalizedValue entityName = this.getName(row);
if (entityName != null && this.hasValue(entityName)) {
serverGo.setDisplayLabel(entityName, this.configuration.getStartDate(), this.configuration.getEndDate());
}
Geometry geometry = (Geometry) this.getFormatSpecificImporter().getGeometry(row);
if (geometry != null) {
// geometry.getSRID().
if (geometry.isValid()) {
serverGo.setGeometry(geometry, this.configuration.getStartDate(), this.configuration.getEndDate());
} else {
// throw new SridException();
throw new InvalidGeometryException();
}
}
if (isNew) {
serverGo.setUid(ServiceFactory.getIdService().getUids(1)[0]);
}
// Set exists first so we can validate attributes on it
// ShapefileFunction existsFunction = this.configuration.getFunction(DefaultAttribute.EXISTS.getName());
//
// if (existsFunction != null)
// {
// Object value = existsFunction.getValue(row);
//
// if (value != null && !this.isEmptyString(value))
// {
// this.setValue(serverGo, this.configuration.getType().getAttribute(DefaultAttribute.EXISTS.getName()).get(), DefaultAttribute.EXISTS.getName(), value);
// }
// }
// else if (isNew)
// {
// ValueOverTime defaultExists = ((VertexServerGeoObject) serverGo).buildDefaultExists();
// if (defaultExists != null)
// {
// serverGo.setValue(DefaultAttribute.EXISTS.getName(), Boolean.TRUE, defaultExists.getStartDate(), defaultExists.getEndDate());
// }
// }
this.setValue(serverGo, this.configuration.getType().getAttribute(DefaultAttribute.EXISTS.getName()).get(), DefaultAttribute.EXISTS.getName(), true);
Map<String, AttributeType> attributes = this.configuration.getType().getAttributeMap();
Set<Entry<String, AttributeType>> entries = attributes.entrySet();
for (Entry<String, AttributeType> entry : entries) {
String attributeName = entry.getKey();
if (!attributeName.equals(GeoObject.CODE) && !attributeName.equals(DefaultAttribute.EXISTS.getName())) {
ShapefileFunction function = this.configuration.getFunction(attributeName);
if (function != null) {
Object value = function.getValue(row);
AttributeType attributeType = entry.getValue();
if (value != null && !this.isEmptyString(value)) {
// if (!(existsFunction == null && isNew))
// {
// try
// {
// ((VertexServerGeoObject) serverGo).enforceAttributeSetWithinRange(serverGo.getDisplayLabel().getValue(), attributeName, this.configuration.getStartDate(), this.configuration.getEndDate());
// }
// catch (ValueOutOfRangeException e)
// {
// final SimpleDateFormat format = ValueOverTimeDTO.getTimeFormatter();
//
// ImportOutOfRangeException ex = new ImportOutOfRangeException();
// ex.setStartDate(format.format(this.configuration.getStartDate()));
//
// if (ValueOverTime.INFINITY_END_DATE.equals(this.configuration.getEndDate()))
// {
// ex.setEndDate(LocalizationFacade.localize("changeovertime.present"));
// }
// else
// {
// ex.setEndDate(format.format(this.configuration.getEndDate()));
// }
//
// throw ex;
// }
// }
this.setValue(serverGo, attributeType, attributeName, value);
} else if (this.configuration.getCopyBlank()) {
this.setValue(serverGo, attributeType, attributeName, null);
}
}
}
}
go = serverGo.toGeoObjectOverTime(false);
goJson = go.toJSON().toString();
/*
* Try to get the parent and ensure that this row is not ignored. The
* getParent method will throw a IgnoreRowException if the parent is
* configured to be ignored.
*/
if (this.configuration.isPostalCode() && PostalCodeFactory.isAvailable(this.configuration.getType())) {
parent = this.parsePostalCode(row);
} else if (this.configuration.getHierarchy() != null && this.configuration.getLocations().size() > 0) {
parent = this.getParent(row);
}
parentBuilder.setParent(parent);
if (this.progressListener.hasValidationProblems()) {
throw new RuntimeException("Did not expect to encounter validation problems during import.");
}
data.setGoJson(goJson);
data.setNew(isNew);
data.setParentBuilder(parentBuilder);
serverGo.apply(true);
} finally {
if (serverGo != null) {
serverGo.unlock();
}
}
if (this.configuration.isExternalImport()) {
ShapefileFunction function = this.configuration.getExternalIdFunction();
Object value = function.getValue(row);
serverGo.createExternalId(this.configuration.getExternalSystem(), String.valueOf(value), this.configuration.getImportStrategy());
}
if (parent != null) {
parent.addChild(serverGo, this.configuration.getHierarchy(), this.configuration.getStartDate(), this.configuration.getEndDate());
} else if (isNew) {
// GeoEntity child = GeoEntity.getByKey(serverGo.getCode());
// GeoEntity root = GeoEntity.getByKey(GeoEntity.ROOT);
//
// child.addLink(root,
// this.configuration.getHierarchy().getEntityType());
}
// We must ensure that any problems created during the transaction are
// logged now instead of when the request returns. As such, if any
// problems exist immediately throw a ProblemException so that normal
// exception handling can occur.
List<ProblemIF> problems = RequestState.getProblemsInCurrentRequest();
List<ProblemIF> problems2 = new LinkedList<ProblemIF>();
for (ProblemIF problem : problems) {
problems2.add(problem);
}
if (problems.size() != 0) {
throw new ProblemException(null, problems2);
}
this.progressListener.setImportedRecords(this.progressListener.getImportedRecords() + 1);
} catch (IgnoreRowException e) {
// Do nothing
} catch (Throwable t) {
buildRecordException(goJson, isNew, parentBuilder, t);
}
this.progressListener.setWorkProgress(curWorkProgress + 1);
}
Aggregations