use of org.dcm4che3.imageio.codec.ImageDescriptor in project dcm4chee-arc-light by dcm4che.
the class UncompressedFramesOutput method initDicomInputStream.
private void initDicomInputStream() throws IOException {
RetrieveService service = ctx.getRetrieveService();
dis = service.openDicomInputStream(ctx, inst);
frameLength = new ImageDescriptor(dis.readDataset(-1, Tag.PixelData)).getFrameLength();
if (dis.tag() != Tag.PixelData) {
throw new IOException("Missing pixel data in requested object");
}
}
use of org.dcm4che3.imageio.codec.ImageDescriptor in project dcm4chee-arc-light by dcm4che.
the class DecompressSupport method initEncapsulatedPixelData.
protected void initEncapsulatedPixelData() throws IOException {
RetrieveService service = ctx.getRetrieveService();
dis = service.openDicomInputStream(ctx, inst);
Attributes attrs = dis.readDataset(-1, Tag.PixelData);
if (dis.tag() != Tag.PixelData || dis.length() != -1)
throw new IOException("No or incorrect encapsulated compressed pixel data in requested object");
ImageDescriptor imageDescriptor = new ImageDescriptor(attrs);
String tsuid = dis.getTransferSyntax();
TransferSyntaxType tsType = TransferSyntaxType.forUID(tsuid);
encapsulatedPixelData = new EncapsulatedPixelDataImageInputStream(dis, imageDescriptor, tsType);
initDecompressor(tsuid, tsType, imageDescriptor);
if (tsType == TransferSyntaxType.RLE)
initBufferedImage(imageDescriptor);
}
use of org.dcm4che3.imageio.codec.ImageDescriptor in project dcm4chee-arc-light by dcm4che.
the class StoreServiceImpl method selectCompressionRule.
private ArchiveCompressionRule selectCompressionRule(Transcoder transcoder, StoreContext storeContext) {
ImageDescriptor imageDescriptor = transcoder.getImageDescriptor();
if (// not an image
imageDescriptor == null)
return null;
if (// already compressed
transcoder.getSourceTransferSyntaxType() != TransferSyntaxType.NATIVE)
return null;
StoreSession session = storeContext.getStoreSession();
Optional<ArchiveCompressionRule> matchingRule = session.getArchiveAEExtension().compressionRules().filter(rule -> rule.match(session.getRemoteHostName(), session.getCallingAET(), session.getLocalHostName(), session.getCalledAET(), storeContext.getAttributes())).findFirst();
if (matchingRule.isPresent()) {
if (!imageDescriptor.isMultiframeWithEmbeddedOverlays()) {
return matchingRule.get();
}
LOG.info("Compression of multi-frame image with embedded overlays not supported");
}
return null;
}
use of org.dcm4che3.imageio.codec.ImageDescriptor in project dcm4che by dcm4che.
the class DicomImageReader method iisOfFrame.
/**
* Generate an image input stream for the given frame, -1 for all frames (video, multi-component single frame)
* Does not necessarily support the length operation without seeking/reading to the end of the input.
*
* @param frameIndex
* @return
* @throws IOException
*/
public ImageInputStream iisOfFrame(int frameIndex) throws IOException {
ImageInputStream iisOfFrame;
if (epdiis != null) {
seekFrame(frameIndex);
iisOfFrame = epdiis;
} else if (pixelDataFragments == null) {
return null;
} else {
iisOfFrame = new SegmentedInputImageStream(iis, pixelDataFragments, frames == 1 ? -1 : frameIndex);
((SegmentedInputImageStream) iisOfFrame).setImageDescriptor(imageDescriptor);
}
return patchJpegLS != null ? new PatchJPEGLSImageInputStream(iisOfFrame, patchJpegLS) : iisOfFrame;
}
use of org.dcm4che3.imageio.codec.ImageDescriptor in project dcm4che by dcm4che.
the class Transcoder method adjustDataset.
private void adjustDataset() {
PhotometricInterpretation pmi = imageDescriptor.getPhotometricInterpretation();
if (decompressor != null) {
if (imageDescriptor.getSamples() == 3) {
if (pmi.isYBR() && TransferSyntaxType.isYBRCompression(srcTransferSyntax)) {
pmi = PhotometricInterpretation.RGB;
dataset.setString(Tag.PhotometricInterpretation, VR.CS, pmi.toString());
}
dataset.setInt(Tag.PlanarConfiguration, VR.US, srcTransferSyntaxType.getPlanarConfiguration());
} else {
if (srcTransferSyntaxType.adjustBitsStoredTo12(dataset)) {
LOG.info("Adjust invalid Bits Stored: {} of {} to 12", imageDescriptor.getBitsStored(), srcTransferSyntaxType);
}
}
}
if (compressor != null) {
if (pmi == PhotometricInterpretation.PALETTE_COLOR && lossyCompression) {
palette2rgb = true;
dataset.removeSelected(cmTags);
dataset.setInt(Tag.SamplesPerPixel, VR.US, 3);
dataset.setInt(Tag.BitsAllocated, VR.US, 8);
dataset.setInt(Tag.BitsStored, VR.US, 8);
dataset.setInt(Tag.HighBit, VR.US, 7);
pmi = PhotometricInterpretation.RGB;
LOG.warn("Converting PALETTE_COLOR model into a lossy format is not recommended, prefer a lossless format");
} else if ((pmi.isSubSampled() && !srcTransferSyntaxType.isPixeldataEncapsulated()) || (pmi == PhotometricInterpretation.YBR_FULL && (TransferSyntaxType.isYBRCompression(destTransferSyntax) || destTransferSyntaxType == TransferSyntaxType.JPEG_LS))) {
ybr2rgb = true;
pmi = PhotometricInterpretation.RGB;
LOG.debug("Conversion to an RGB color model is required before compression.");
} else {
if (destTransferSyntaxType.adjustBitsStoredTo12(dataset)) {
LOG.debug("Adjust Bits Stored: {} for {} to 12", imageDescriptor.getBitsStored(), destTransferSyntaxType);
}
}
dataset.setString(Tag.PhotometricInterpretation, VR.CS, pmiForCompression(pmi).toString());
compressorImageDescriptor = new ImageDescriptor(dataset, bitsCompressed);
pmi = pmi.compress(destTransferSyntax);
dataset.setString(Tag.PhotometricInterpretation, VR.CS, pmi.toString());
if (dataset.getInt(Tag.SamplesPerPixel, 1) > 1)
dataset.setInt(Tag.PlanarConfiguration, VR.US, destTransferSyntaxType.getPlanarConfiguration());
if (lossyCompression) {
dataset.setString(Tag.LossyImageCompression, VR.CS, "01");
try {
dataset.setFloat(Tag.LossyImageCompressionRatio, VR.DS, ((Number) compressParam.getClass().getMethod("getCompressionRatiofactor").invoke(compressParam)).floatValue());
} catch (Exception ignore) {
}
}
}
}
Aggregations