use of org.sagebionetworks.repo.model.MigratableObjectData in project Synapse-Repository-Services by Sage-Bionetworks.
the class DBOUserGroupDAOImpl method getMigrationObjectData.
@Override
public QueryResults<MigratableObjectData> getMigrationObjectData(long offset, long limit, boolean includeDependencies) throws DatastoreException {
// get a page of user groups
List<MigratableObjectData> ods = null;
{
MapSqlParameterSource param = new MapSqlParameterSource();
param.addValue(OFFSET_PARAM_NAME, offset);
param.addValue(LIMIT_PARAM_NAME, limit);
ods = simpleJdbcTemplate.query(SELECT_ALL_PAGINATED_WITH_ETAG, new RowMapper<MigratableObjectData>() {
@Override
public MigratableObjectData mapRow(ResultSet rs, int rowNum) throws SQLException {
// NOTE this is an outer join, so we have to handle the case in which there
// is no etag for the given user group
String ugId = rs.getString(COL_USER_GROUP_ID);
String etag = rs.getString(COL_USER_PROFILE_ETAG);
if (etag == null)
etag = DEFAULT_ETAG;
MigratableObjectData od = new MigratableObjectData();
MigratableObjectDescriptor id = new MigratableObjectDescriptor();
id.setId(ugId);
id.setType(MigratableObjectType.PRINCIPAL);
od.setId(id);
od.setEtag(etag);
// UserGroups have no dependencies
od.setDependencies(new HashSet<MigratableObjectDescriptor>(0));
return od;
}
}, param);
}
QueryResults<MigratableObjectData> queryResults = new QueryResults<MigratableObjectData>();
queryResults.setResults(ods);
queryResults.setTotalNumberOfResults((int) getCount());
return queryResults;
}
use of org.sagebionetworks.repo.model.MigratableObjectData in project Synapse-Repository-Services by Sage-Bionetworks.
the class NodeDAOImpl method getMigrationObjectDataWithoutDependencies.
public QueryResults<MigratableObjectData> getMigrationObjectDataWithoutDependencies(long offset, long limit) throws DatastoreException {
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue(OFFSET_PARAM_NAME, offset);
params.addValue(LIMIT_PARAM_NAME, limit);
List<MigratableObjectData> ods = this.simpleJdbcTemplate.query(SQL_GET_NODES_PAGINATED, new RowMapper<MigratableObjectData>() {
@Override
public MigratableObjectData mapRow(ResultSet rs, int rowNum) throws SQLException {
MigratableObjectData data = new MigratableObjectData();
data.setId(ObjectDescriptorUtils.createEntityObjectDescriptor(rs.getLong(COL_NODE_ID)));
data.setEtag(rs.getString(COL_NODE_ETAG));
data.setDependencies(new HashSet<MigratableObjectDescriptor>(0));
return data;
}
}, params);
QueryResults<MigratableObjectData> queryResults = new QueryResults<MigratableObjectData>();
queryResults.setResults(ods);
queryResults.setTotalNumberOfResults((int) getCount());
return queryResults;
}
use of org.sagebionetworks.repo.model.MigratableObjectData in project Synapse-Repository-Services by Sage-Bionetworks.
the class CreationJobBuilder method call.
@Override
public BuilderResponse call() throws Exception {
// Get the two clients
int createsSubmitted = 0;
int pendingCreates = 0;
// Walk over the source list
Map<MigratableObjectType, Set<String>> batchesToCreate = new HashMap<MigratableObjectType, Set<String>>();
for (MigratableObjectData source : sourceList) {
// Is this entity already in the destination?
if (!destMap.containsKey(source.getId())) {
// We can only add this entity if its dependencies are in the destination
if (JobUtil.dependenciesFulfilled(source, destMap.keySet())) {
MigratableObjectType objectType = source.getId().getType();
Set<String> batchToCreate = batchesToCreate.get(objectType);
if (batchToCreate == null) {
batchToCreate = new HashSet<String>();
batchesToCreate.put(objectType, batchToCreate);
}
batchToCreate.add(source.getId().getId());
createsSubmitted++;
if (batchToCreate.size() >= this.batchSize) {
Job createJob = new Job(batchToCreate, objectType, Type.CREATE);
this.queue.add(createJob);
batchesToCreate.remove(objectType);
}
} else {
// This will get picked up in a future round.
pendingCreates++;
}
}
}
// Submit any creates left over
for (MigratableObjectType objectType : batchesToCreate.keySet()) {
Set<String> batchToCreate = batchesToCreate.get(objectType);
if (!batchToCreate.isEmpty()) {
Job createJob = new Job(batchToCreate, objectType, Type.CREATE);
this.queue.add(createJob);
}
}
batchesToCreate.clear();
// Report the results.
return new BuilderResponse(createsSubmitted, pendingCreates);
}
use of org.sagebionetworks.repo.model.MigratableObjectData in project Synapse-Repository-Services by Sage-Bionetworks.
the class UpdateJobBuilder method call.
@Override
public BuilderResponse call() throws Exception {
// Get the two clients
int updateSubmitted = 0;
// Walk over the source list
Map<MigratableObjectType, Set<String>> batchesToUpdate = new HashMap<MigratableObjectType, Set<String>>();
for (MigratableObjectData source : sourceList) {
// We only care about entities that already exist
MigratableObjectData destObject = destMap.get(source.getId());
if (destObject != null) {
// Do the eTags match?
if (etagsDiffer(source.getEtag(), destObject.getEtag()) && // also check dependencies
JobUtil.dependenciesFulfilled(source, destMap.keySet())) {
System.out.println("UpdateJobBuilder: Need to update " + source.getId() + " source etag is " + source.getEtag() + " while dest etag is " + destObject.getEtag());
// Tags do not match. New dependencies are in place. Let's migrate it!
MigratableObjectType objectType = source.getId().getType();
Set<String> batchToUpdate = batchesToUpdate.get(objectType);
if (batchToUpdate == null) {
batchToUpdate = new HashSet<String>();
batchesToUpdate.put(objectType, batchToUpdate);
}
batchToUpdate.add(source.getId().getId());
updateSubmitted++;
if (batchToUpdate.size() >= this.batchSize) {
Job createJob = new Job(batchToUpdate, objectType, Type.UPDATE);
this.queue.add(createJob);
batchesToUpdate.remove(objectType);
}
}
}
}
// Submit any updates left over
for (MigratableObjectType objectType : batchesToUpdate.keySet()) {
Set<String> batchToUpdate = batchesToUpdate.get(objectType);
if (!batchToUpdate.isEmpty()) {
Job updateJob = new Job(batchToUpdate, objectType, Type.UPDATE);
this.queue.add(updateJob);
}
}
batchesToUpdate.clear();
// Report the results.
return new BuilderResponse(updateSubmitted, 0);
}
use of org.sagebionetworks.repo.model.MigratableObjectData in project Synapse-Repository-Services by Sage-Bionetworks.
the class MigrationDriverTest method testPopulateQueueStartingData.
/**
* @throws ExecutionException
* @throws InterruptedException
*/
@Test
public void testPopulateQueueStartingData() throws InterruptedException, ExecutionException {
// this is how most new stacks will start with three bootstrapped entities.
List<MigratableObjectData> dest = new ArrayList<MigratableObjectData>();
dest.add(XestUtil.createMigratableObjectData("99", "0", null));
dest.add(XestUtil.createMigratableObjectData("101", "0", "99"));
dest.add(XestUtil.createMigratableObjectData("102", "0", "99"));
// Populate the queue for this setup
int MAX_BATCH_SIZE = 1;
RepositoryMigrationDriver.populateQueue(threadPool, jobQueue, source, dest, MAX_BATCH_SIZE);
// Only the first entity can be created since it is root.
int expectedCreate = 1;
// since there are no common objects bet src and dst, there's nothing to update
int expectedUpdate = 0;
// we will delete *everything* in dest
int expectedDelete = 3;
Job head = jobQueue.peek();
assertEquals("First job: " + head.getJobType() + " " + head.getObjectIds(), expectedCreate + expectedUpdate + expectedDelete, jobQueue.size());
int createCount = 0;
int deleteCount = 0;
int updateCount = 0;
Set<String> sourceIds = new HashSet<String>();
for (MigratableObjectData mod : source) sourceIds.add(mod.getId().getId());
Set<String> destIds = new HashSet<String>();
for (MigratableObjectData mod : dest) destIds.add(mod.getId().getId());
for (Job job : jobQueue) {
if (job.getJobType() == Type.CREATE) {
// this is because we set MAX_BATCH_SIZE = 1
assertEquals(1, job.getObjectIds().size());
assertTrue(sourceIds.contains(job.getObjectIds().iterator().next()));
createCount++;
} else if (job.getJobType() == Type.DELETE) {
// this is because we set MAX_BATCH_SIZE = 1
assertEquals(1, job.getObjectIds().size());
assertTrue(destIds.contains(job.getObjectIds().iterator().next()));
deleteCount++;
} else if (job.getJobType() == Type.UPDATE) {
updateCount++;
}
}
// Check the final counts.
assertEquals(expectedCreate, createCount);
assertEquals(expectedUpdate, updateCount);
assertEquals(expectedDelete, deleteCount);
}
Aggregations