use of java.awt.image.DataBufferInt in project deeplearning4j by deeplearning4j.
the class DrawReconstruction method start.
public void start() {
int[] pixels = ((DataBufferInt) img.getRaster().getDataBuffer()).getData();
boolean running = true;
while (running) {
BufferStrategy bs = frame.getBufferStrategy();
if (bs == null) {
frame.createBufferStrategy(4);
return;
}
for (int i = 0; i < width * height; i++) pixels[i] = 0;
Graphics g = bs.getDrawGraphics();
g.drawImage(img, heightOffset, widthOffset, width, height, null);
g.dispose();
bs.show();
}
}
use of java.awt.image.DataBufferInt in project jna by java-native-access.
the class RasterRangesUtils method outputOccupiedRanges.
/**
* Outputs ranges of occupied pixels.
* In a raster that has an alpha layer, a pixel is occupied if its alpha value is not null.
* In a raster without alpha layer, a pixel is occupied if it is not completely black.
* @param raster image to be segmented in non black or non-transparent ranges
* @param out destination of the non null ranges
* @return true if the output succeeded, false otherwise
*/
public static boolean outputOccupiedRanges(Raster raster, RangesOutput out) {
Rectangle bounds = raster.getBounds();
SampleModel sampleModel = raster.getSampleModel();
boolean hasAlpha = sampleModel.getNumBands() == 4;
// Try to use the underlying data array directly for a few common raster formats
if (raster.getParent() == null && bounds.x == 0 && bounds.y == 0) {
// No support for subraster (as obtained with Image.getSubimage(...))
DataBuffer data = raster.getDataBuffer();
if (data.getNumBanks() == 1) {
if (sampleModel instanceof MultiPixelPackedSampleModel) {
MultiPixelPackedSampleModel packedSampleModel = (MultiPixelPackedSampleModel) sampleModel;
if (packedSampleModel.getPixelBitStride() == 1) {
// TYPE_BYTE_BINARY
return outputOccupiedRangesOfBinaryPixels(((DataBufferByte) data).getData(), bounds.width, bounds.height, out);
}
} else if (sampleModel instanceof SinglePixelPackedSampleModel) {
if (sampleModel.getDataType() == DataBuffer.TYPE_INT) {
// TYPE_INT_ARGB, TYPE_INT_ARGB_PRE, TYPE_INT_BGR or TYPE_INT_RGB
return outputOccupiedRanges(((DataBufferInt) data).getData(), bounds.width, bounds.height, hasAlpha ? 0xff000000 : 0xffffff, out);
}
// TODO could easily handle cases of TYPE_USHORT_GRAY and TYPE_BYTE_GRAY.
}
}
}
// Fallback behaviour : copy pixels of raster
int[] pixels = raster.getPixels(0, 0, bounds.width, bounds.height, (int[]) null);
return outputOccupiedRanges(pixels, bounds.width, bounds.height, hasAlpha ? 0xff000000 : 0xffffff, out);
}
use of java.awt.image.DataBufferInt in project processing by processing.
the class QuickTimeWriter method writeFrame.
/**
* Encodes an image as a video frame and writes it into a video track.
* <p>
* Only the video encodings listed below are supported by this method.
* For other encodings, you have to encode the image by yourself and then
* call one of the {@code writeSample} methods.
* <ul>
* <li>RAW</li>
* <li>JPG</li>
* <li>PNG</li>
* </ul>
*
* @param track The track index.
* @param image The image of the video frame.
* @param duration The duration of the video frame in media time scale units.
*
* @throws IndexOutofBoundsException if the track index is out of bounds.
* @throws if the duration is less than 1, or if the dimension of the frame
* does not match the dimension of the video.
* @throws UnsupportedOperationException if the QuickTimeWriter does not have
* a built-in encoder for this video format.
* @throws IOException if writing the sample data failed.
*/
public void writeFrame(int track, BufferedImage image, long duration) throws IOException {
if (duration <= 0) {
throw new IllegalArgumentException("Duration must be greater 0.");
}
// throws index out of bounds exception if illegal track index
VideoTrack t = (VideoTrack) tracks.get(track);
if (t.mediaType != MediaType.VIDEO) {
throw new IllegalArgumentException("Track " + track + " is not a video track");
}
if (t.videoEncoding == null) {
throw new UnsupportedOperationException("Encoding not supported.");
}
ensureStarted();
// The dimension of the image must match the dimension of the video track
if (t.videoWidth != image.getWidth() || t.videoHeight != image.getHeight()) {
throw new IllegalArgumentException("Dimensions of frame[" + tracks.get(track).getSampleCount() + "] (width=" + image.getWidth() + ", height=" + image.getHeight() + ") differs from video dimension (width=" + t.videoWidth + ", height=" + t.videoHeight + ") in track " + track + ".");
}
long offset = getRelativeStreamPosition();
boolean isSync;
switch(t.videoEncoding) {
case RAW:
{
isSync = true;
switch(t.videoDepth) {
case 8:
{
if (image.getType() != BufferedImage.TYPE_BYTE_INDEXED) {
throw new IllegalArgumentException("BufferedImage type " + image.getType() + " does not match track type " + BufferedImage.TYPE_BYTE_INDEXED + ".");
}
// Handle sub-image
WritableRaster raster = image.getRaster();
int sw = raster.getSampleModel().getWidth();
// int sh = raster.getSampleModel().getHeight();
Rectangle r = raster.getBounds();
r.x -= raster.getSampleModelTranslateX();
r.y -= raster.getSampleModelTranslateY();
DataBufferByte buf = (DataBufferByte) raster.getDataBuffer();
byte[] bytes = buf.getData();
// Write the samples
for (int xy = r.x + r.y * sw, ymax = r.x + (r.y + r.height) * sw; xy < ymax; xy += sw) {
mdatAtom.getOutputStream().write(bytes, xy, r.width);
}
break;
}
case 24:
{
WritableRaster raster = image.getRaster();
// holds a scanline of raw image data with 3 channels of 32 bit data
int[] rgb = new int[t.videoWidth * 3];
// holds a scanline of raw image data with 3 channels of 8 bit data
byte[] bytes = new byte[t.videoWidth * 3];
for (int y = 0; y < t.videoHeight; y++) {
// Note: Method getPixels is very slow as it does sample conversions for us
rgb = raster.getPixels(0, y, t.videoWidth, 1, rgb);
for (int k = 0, n = t.videoWidth * 3; k < n; k++) {
bytes[k] = (byte) rgb[k];
}
mdatAtom.getOutputStream().write(bytes);
}
break;
}
default:
throw new UnsupportedOperationException("Encoding not supported.");
}
break;
}
case JPG:
{
isSync = true;
ImageWriter iw = ImageIO.getImageWritersByMIMEType("image/jpeg").next();
ImageWriteParam iwParam = iw.getDefaultWriteParam();
iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
iwParam.setCompressionQuality(t.videoQuality);
MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(mdatAtom.getOutputStream());
iw.setOutput(imgOut);
IIOImage img = new IIOImage(image, null, null);
iw.write(null, img, iwParam);
iw.dispose();
break;
}
case PNG:
{
isSync = true;
ImageWriter iw = ImageIO.getImageWritersByMIMEType("image/png").next();
ImageWriteParam iwParam = iw.getDefaultWriteParam();
// FIXME - Detect number of bits per pixel, ensure that correct value is written into video media header atom.
// FIXME - Maybe we should quietly enforce 24 bits per pixel
MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(mdatAtom.getOutputStream());
iw.setOutput(imgOut);
IIOImage img = new IIOImage(image, null, null);
iw.write(null, img, iwParam);
iw.dispose();
break;
}
case RLE:
{
isSync = t.previousData == null || t.syncInterval != 0 && t.sampleCount % t.syncInterval == 0;
// Handle sub-image
WritableRaster raster = image.getRaster();
int sw = raster.getSampleModel().getWidth();
// int sh = raster.getSampleModel().getHeight();
Rectangle r = raster.getBounds();
r.x -= raster.getSampleModelTranslateX();
r.y -= raster.getSampleModelTranslateY();
if (t.encoder == null) {
t.encoder = new AppleRLEEncoder();
}
AppleRLEEncoder enc = t.encoder;
switch(t.videoDepth) {
case 16:
{
DataBufferUShort buf = (DataBufferUShort) raster.getDataBuffer();
short[] data = buf.getData();
if (isSync) {
enc.writeKey16(mdatAtom.getOutputStream(), data, r.width, r.height, r.x + r.y * sw, sw);
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta16(mdatAtom.getOutputStream(), data, (short[]) t.previousData, r.width, r.height, r.x + r.y * sw, sw);
}
if (t.previousData == null) {
t.previousData = new short[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
case 24:
{
DataBufferInt buf = (DataBufferInt) raster.getDataBuffer();
int[] data = buf.getData();
if (isSync) {
enc.writeKey24(mdatAtom.getOutputStream(), data, r.width, r.height, r.x + r.y * sw, sw);
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta24(mdatAtom.getOutputStream(), data, (int[]) t.previousData, r.width, r.height, r.x + r.y * sw, sw);
}
if (t.previousData == null) {
t.previousData = new int[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
case 32:
{
DataBufferInt buf = (DataBufferInt) raster.getDataBuffer();
int[] data = buf.getData();
if (isSync) {
enc.writeKey32(mdatAtom.getOutputStream(), data, image.getWidth(), image.getHeight(), 0, image.getWidth());
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta32(mdatAtom.getOutputStream(), data, (int[]) t.previousData, image.getWidth(), image.getHeight(), 0, image.getWidth());
}
if (t.previousData == null) {
t.previousData = new int[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
default:
throw new UnsupportedOperationException("Encoding not supported.");
}
break;
}
default:
{
throw new UnsupportedOperationException("Encoding not supported.");
}
}
long length = getRelativeStreamPosition() - offset;
t.addSample(new Sample(duration, offset, length), 1, isSync);
}
use of java.awt.image.DataBufferInt in project scriptographer by scriptographer.
the class Image method setImage.
public void setImage(BufferedImage image) {
int imgType = getCompatibleType();
if (image.getType() != imgType || image.getWidth() != width || image.getHeight() != height) {
BufferedImage tmp = new BufferedImage(width, height, imgType);
tmp.createGraphics().drawImage(image, 0, 0, null);
image = tmp;
}
DataBufferInt buffer = (DataBufferInt) image.getRaster().getDataBuffer();
int[] data = buffer.getData();
nativeSetPixels(data, width, height, byteWidth);
}
use of java.awt.image.DataBufferInt in project scriptographer by scriptographer.
the class Image method getImage.
/**
* fetches the pixels from the image and creates a BufferedImage from it
*/
public BufferedImage getImage() {
BufferedImage img = new BufferedImage(width, height, getCompatibleType());
DataBufferInt buffer = (DataBufferInt) img.getRaster().getDataBuffer();
int[] data = buffer.getData();
nativeGetPixels(data, width, height, byteWidth);
return img;
}
Aggregations