use of org.dcm4chee.arc.store.InstanceLocations in project dcm4chee-arc-light by dcm4che.
the class CStoreSCUImpl method openAssociation.
private Association openAssociation(RetrieveContext ctx) throws DicomServiceException {
try {
try {
ApplicationEntity localAE = ctx.getLocalApplicationEntity();
Association storeas = localAE.connect(ctx.getDestinationAE(), createAARQ(ctx));
for (Iterator<InstanceLocations> iter = ctx.getMatches().iterator(); iter.hasNext(); ) {
InstanceLocations inst = iter.next();
if (storeas.getTransferSyntaxesFor(inst.getSopClassUID()).isEmpty()) {
iter.remove();
ctx.incrementFailed();
ctx.addFailedSOPInstanceUID(inst.getSopInstanceUID());
LOG.info("{}: failed to send {} to {} - no Presentation Context accepted", ctx.getRequestAssociation(), inst, ctx.getDestinationAETitle());
}
}
return storeas;
} catch (Exception e) {
throw new DicomServiceException(Status.UnableToPerformSubOperations, e);
}
} catch (DicomServiceException e) {
ctx.setException(e);
retrieveStart.fire(ctx);
throw e;
}
}
use of org.dcm4chee.arc.store.InstanceLocations in project dcm4chee-arc-light by dcm4che.
the class StowTaskImpl method run.
@Override
public void run() {
retrieveStart.fire(ctx);
try {
for (InstanceLocations match : ctx.getMatches()) {
if (!ctx.copyToRetrieveCache(match)) {
matches.offer(new WrappedInstanceLocations(match));
}
}
ctx.copyToRetrieveCache(null);
matches.offer(new WrappedInstanceLocations(null));
runStoreOperations();
} finally {
if (semaphore != null) {
try {
semaphore.acquire(concurrency);
} catch (InterruptedException e) {
LOG.warn("{}: failed to wait for pending responses:\n", target, e);
}
}
target.getResteasyClient().close();
ctx.getRetrieveService().updateLocations(ctx);
SafeClose.close(ctx);
}
retrieveEnd.fire(ctx);
}
use of org.dcm4chee.arc.store.InstanceLocations in project dcm4chee-arc-light by dcm4che.
the class UpdateMetadataScheduler method updateMetadata.
private void updateMetadata(ArchiveDeviceExtension arcDev, Storage storage, Series.MetadataUpdate metadataUpdate, AtomicInteger success, AtomicInteger skipped) {
try (RetrieveContext ctx = retrieveService.newRetrieveContextSeriesMetadata(metadataUpdate)) {
if (claim(metadataUpdate, storage) && retrieveService.calculateMatches(ctx)) {
LOG.debug("Creating/Updating Metadata for Series[pk={}] on {}", metadataUpdate.seriesPk, storage.getStorageDescriptor());
WriteContext writeCtx = createWriteContext(storage, ctx.getMatches().iterator().next());
try {
try (ZipOutputStream out = new ZipOutputStream(storage.openOutputStream(writeCtx))) {
for (InstanceLocations match : ctx.getMatches()) {
out.putNextEntry(new ZipEntry(match.getSopInstanceUID()));
JsonGenerator gen = Json.createGenerator(out);
arcDev.encodeAsJSONNumber(new JSONWriter(gen)).write(loadMetadata(ctx, match));
gen.flush();
out.closeEntry();
}
out.finish();
}
storage.commitStorage(writeCtx);
ejb.commit(metadataUpdate.seriesPk, createMetadata(writeCtx));
} catch (Exception e) {
LOG.warn("Failed to Create/Update Metadata for Series[pk={}] on {}:\n", metadataUpdate.seriesPk, storage.getStorageDescriptor(), e);
try {
ejb.incrementMetadataUpdateFailures(metadataUpdate.seriesPk, nextRetry(arcDev, metadataUpdate.updateFailures));
} catch (Exception e1) {
LOG.warn("Failed to update Metadata Update time", e1);
}
try {
storage.revokeStorage(writeCtx);
} catch (Exception e1) {
LOG.warn("Failed to revoke storage", e1);
}
return;
}
LOG.debug("Created/Updated Metadata for Series[pk={}] on {}", metadataUpdate.seriesPk, storage.getStorageDescriptor());
success.getAndIncrement();
} else {
skipped.getAndIncrement();
}
} catch (Exception e) {
LOG.error("Unexpected exception on closing Retrieve Context for {}:\n", metadataUpdate, e);
}
}
use of org.dcm4chee.arc.store.InstanceLocations in project dcm4chee-arc-light by dcm4che.
the class CopyToRetrieveCacheTask method run.
@Override
public void run() {
try {
InstanceLocations instanceLocations;
while ((instanceLocations = scheduled.take().instanceLocations) != null) {
final InstanceLocations inst = instanceLocations;
semaphore.acquire();
arcdev.getDevice().execute(() -> {
try {
if (copy(inst)) {
String studyIUID = inst.getAttributes().getString(Tag.StudyInstanceUID);
String seriesIUID = inst.getAttributes().getString(Tag.SeriesInstanceUID);
synchronized (uidMap) {
Set<String> seriesIUIDs = uidMap.get(studyIUID);
if (seriesIUIDs == null)
uidMap.put(studyIUID, seriesIUIDs = new HashSet<>());
seriesIUIDs.add(seriesIUID);
}
}
completed.offer(new WrappedInstanceLocations(inst));
} finally {
semaphore.release();
}
});
}
LOG.debug("Wait for finishing copying {} instances to retrieve cache", maxParallel - semaphore.availablePermits());
semaphore.acquire(maxParallel);
LOG.debug("All instances copied to retrieve cache");
} catch (InterruptedException e) {
LOG.error("Failed to schedule copy to retrieve cache:\n", e);
}
StoreService storeService = ctx.getRetrieveService().getStoreService();
for (Map.Entry<String, Set<String>> entry : uidMap.entrySet()) {
String studyIUID = entry.getKey();
storeService.addStorageID(studyIUID, storageID);
for (String seriesIUID : entry.getValue()) {
storeService.scheduleMetadataUpdate(studyIUID, seriesIUID);
}
}
completed.offer(new WrappedInstanceLocations(null));
LOG.debug("Leave run()");
}
use of org.dcm4chee.arc.store.InstanceLocations in project dcm4chee-arc-light by dcm4che.
the class LocationQuery method execute.
private void execute(Map<Long, StudyInfo> studyInfoMap, Predicate[] predicates) {
HashMap<Long, InstanceLocations> instMap = new HashMap<>();
HashMap<Long, Attributes> seriesAttrsMap = new HashMap<>();
HashMap<Long, Map<String, CodeEntity>> rejectedInstancesOfSeriesMap = new HashMap<>();
for (Tuple tuple : em.createQuery(q.where(predicates)).getResultList()) {
Long instPk = tuple.get(instance.get(Instance_.pk));
InstanceLocations match = instMap.get(instPk);
if (match == null) {
Long seriesPk = tuple.get(series.get(Series_.pk));
Attributes seriesAttrs = seriesAttrsMap.get(seriesPk);
Map<String, CodeEntity> rejectedInstancesOfSeries = rejectedInstancesOfSeriesMap.get(seriesPk);
if (seriesAttrs == null) {
SeriesAttributes seriesAttributes = new SeriesAttributes(em, cb, seriesPk);
studyInfoMap.put(seriesAttributes.studyInfo.getStudyPk(), seriesAttributes.studyInfo);
ctx.getSeriesInfos().add(seriesAttributes.seriesInfo);
ctx.setPatientUpdatedTime(seriesAttributes.patientUpdatedTime);
seriesAttrsMap.put(seriesPk, seriesAttrs = seriesAttributes.attrs);
if (ctx.getSeriesMetadataUpdate() != null)
rejectedInstancesOfSeriesMap.put(seriesPk, rejectedInstancesOfSeries = getRejectedInstancesOfSeries(seriesAttrs));
}
Attributes instAttrs = AttributesBlob.decodeAttributes(tuple.get(instanceAttrBlob), null);
Attributes.unifyCharacterSets(seriesAttrs, instAttrs);
instAttrs.addAll(seriesAttrs, true);
match = instanceLocationsFromDB(tuple, instAttrs, rejectedInstancesOfSeries != null ? rejectedInstancesOfSeries.get(instAttrs.getString(Tag.SOPInstanceUID)) : null);
ctx.getMatches().add(match);
instMap.put(instPk, match);
}
addLocation(match, tuple);
}
}
Aggregations