use of java.awt.image.DataBufferByte in project processing by processing.
the class QuickTimeWriter method writeFrame.
/**
* Encodes an image as a video frame and writes it into a video track.
* <p>
* Only the video encodings listed below are supported by this method.
* For other encodings, you have to encode the image by yourself and then
* call one of the {@code writeSample} methods.
* <ul>
* <li>RAW</li>
* <li>JPG</li>
* <li>PNG</li>
* </ul>
*
* @param track The track index.
* @param image The image of the video frame.
* @param duration The duration of the video frame in media time scale units.
*
* @throws IndexOutofBoundsException if the track index is out of bounds.
* @throws if the duration is less than 1, or if the dimension of the frame
* does not match the dimension of the video.
* @throws UnsupportedOperationException if the QuickTimeWriter does not have
* a built-in encoder for this video format.
* @throws IOException if writing the sample data failed.
*/
public void writeFrame(int track, BufferedImage image, long duration) throws IOException {
if (duration <= 0) {
throw new IllegalArgumentException("Duration must be greater 0.");
}
// throws index out of bounds exception if illegal track index
VideoTrack t = (VideoTrack) tracks.get(track);
if (t.mediaType != MediaType.VIDEO) {
throw new IllegalArgumentException("Track " + track + " is not a video track");
}
if (t.videoEncoding == null) {
throw new UnsupportedOperationException("Encoding not supported.");
}
ensureStarted();
// The dimension of the image must match the dimension of the video track
if (t.videoWidth != image.getWidth() || t.videoHeight != image.getHeight()) {
throw new IllegalArgumentException("Dimensions of frame[" + tracks.get(track).getSampleCount() + "] (width=" + image.getWidth() + ", height=" + image.getHeight() + ") differs from video dimension (width=" + t.videoWidth + ", height=" + t.videoHeight + ") in track " + track + ".");
}
long offset = getRelativeStreamPosition();
boolean isSync;
switch(t.videoEncoding) {
case RAW:
{
isSync = true;
switch(t.videoDepth) {
case 8:
{
if (image.getType() != BufferedImage.TYPE_BYTE_INDEXED) {
throw new IllegalArgumentException("BufferedImage type " + image.getType() + " does not match track type " + BufferedImage.TYPE_BYTE_INDEXED + ".");
}
// Handle sub-image
WritableRaster raster = image.getRaster();
int sw = raster.getSampleModel().getWidth();
// int sh = raster.getSampleModel().getHeight();
Rectangle r = raster.getBounds();
r.x -= raster.getSampleModelTranslateX();
r.y -= raster.getSampleModelTranslateY();
DataBufferByte buf = (DataBufferByte) raster.getDataBuffer();
byte[] bytes = buf.getData();
// Write the samples
for (int xy = r.x + r.y * sw, ymax = r.x + (r.y + r.height) * sw; xy < ymax; xy += sw) {
mdatAtom.getOutputStream().write(bytes, xy, r.width);
}
break;
}
case 24:
{
WritableRaster raster = image.getRaster();
// holds a scanline of raw image data with 3 channels of 32 bit data
int[] rgb = new int[t.videoWidth * 3];
// holds a scanline of raw image data with 3 channels of 8 bit data
byte[] bytes = new byte[t.videoWidth * 3];
for (int y = 0; y < t.videoHeight; y++) {
// Note: Method getPixels is very slow as it does sample conversions for us
rgb = raster.getPixels(0, y, t.videoWidth, 1, rgb);
for (int k = 0, n = t.videoWidth * 3; k < n; k++) {
bytes[k] = (byte) rgb[k];
}
mdatAtom.getOutputStream().write(bytes);
}
break;
}
default:
throw new UnsupportedOperationException("Encoding not supported.");
}
break;
}
case JPG:
{
isSync = true;
ImageWriter iw = ImageIO.getImageWritersByMIMEType("image/jpeg").next();
ImageWriteParam iwParam = iw.getDefaultWriteParam();
iwParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
iwParam.setCompressionQuality(t.videoQuality);
MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(mdatAtom.getOutputStream());
iw.setOutput(imgOut);
IIOImage img = new IIOImage(image, null, null);
iw.write(null, img, iwParam);
iw.dispose();
break;
}
case PNG:
{
isSync = true;
ImageWriter iw = ImageIO.getImageWritersByMIMEType("image/png").next();
ImageWriteParam iwParam = iw.getDefaultWriteParam();
// FIXME - Detect number of bits per pixel, ensure that correct value is written into video media header atom.
// FIXME - Maybe we should quietly enforce 24 bits per pixel
MemoryCacheImageOutputStream imgOut = new MemoryCacheImageOutputStream(mdatAtom.getOutputStream());
iw.setOutput(imgOut);
IIOImage img = new IIOImage(image, null, null);
iw.write(null, img, iwParam);
iw.dispose();
break;
}
case RLE:
{
isSync = t.previousData == null || t.syncInterval != 0 && t.sampleCount % t.syncInterval == 0;
// Handle sub-image
WritableRaster raster = image.getRaster();
int sw = raster.getSampleModel().getWidth();
// int sh = raster.getSampleModel().getHeight();
Rectangle r = raster.getBounds();
r.x -= raster.getSampleModelTranslateX();
r.y -= raster.getSampleModelTranslateY();
if (t.encoder == null) {
t.encoder = new AppleRLEEncoder();
}
AppleRLEEncoder enc = t.encoder;
switch(t.videoDepth) {
case 16:
{
DataBufferUShort buf = (DataBufferUShort) raster.getDataBuffer();
short[] data = buf.getData();
if (isSync) {
enc.writeKey16(mdatAtom.getOutputStream(), data, r.width, r.height, r.x + r.y * sw, sw);
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta16(mdatAtom.getOutputStream(), data, (short[]) t.previousData, r.width, r.height, r.x + r.y * sw, sw);
}
if (t.previousData == null) {
t.previousData = new short[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
case 24:
{
DataBufferInt buf = (DataBufferInt) raster.getDataBuffer();
int[] data = buf.getData();
if (isSync) {
enc.writeKey24(mdatAtom.getOutputStream(), data, r.width, r.height, r.x + r.y * sw, sw);
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta24(mdatAtom.getOutputStream(), data, (int[]) t.previousData, r.width, r.height, r.x + r.y * sw, sw);
}
if (t.previousData == null) {
t.previousData = new int[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
case 32:
{
DataBufferInt buf = (DataBufferInt) raster.getDataBuffer();
int[] data = buf.getData();
if (isSync) {
enc.writeKey32(mdatAtom.getOutputStream(), data, image.getWidth(), image.getHeight(), 0, image.getWidth());
} else {
// FIXME - We blindly assume that the sub-image of the previous image is the same as the current one
enc.writeDelta32(mdatAtom.getOutputStream(), data, (int[]) t.previousData, image.getWidth(), image.getHeight(), 0, image.getWidth());
}
if (t.previousData == null) {
t.previousData = new int[data.length];
}
System.arraycopy(data, 0, t.previousData, 0, data.length);
break;
}
default:
throw new UnsupportedOperationException("Encoding not supported.");
}
break;
}
default:
{
throw new UnsupportedOperationException("Encoding not supported.");
}
}
long length = getRelativeStreamPosition() - offset;
t.addSample(new Sample(duration, offset, length), 1, isSync);
}
use of java.awt.image.DataBufferByte in project jdk8u_jdk by JetBrains.
the class IncorrectAlphaConversionBicubic method makeUnmanagedBI.
private static BufferedImage makeUnmanagedBI(GraphicsConfiguration gc, int type) {
BufferedImage img = gc.createCompatibleImage(SIZE, SIZE, type);
Graphics2D g2d = img.createGraphics();
g2d.setColor(RGB);
g2d.fillRect(0, 0, SIZE, SIZE);
g2d.dispose();
final DataBuffer db = img.getRaster().getDataBuffer();
if (db instanceof DataBufferInt) {
((DataBufferInt) db).getData();
} else if (db instanceof DataBufferShort) {
((DataBufferShort) db).getData();
} else if (db instanceof DataBufferByte) {
((DataBufferByte) db).getData();
} else {
try {
img.setAccelerationPriority(0.0f);
} catch (final Throwable ignored) {
}
}
return img;
}
use of java.awt.image.DataBufferByte in project jdk8u_jdk by JetBrains.
the class UnmanagedDrawImagePerformance method makeUnmanagedBI.
private static BufferedImage makeUnmanagedBI(final int type) {
final BufferedImage img = new BufferedImage(SIZE, SIZE, type);
final DataBuffer db = img.getRaster().getDataBuffer();
if (db instanceof DataBufferInt) {
((DataBufferInt) db).getData();
} else if (db instanceof DataBufferShort) {
((DataBufferShort) db).getData();
} else if (db instanceof DataBufferByte) {
((DataBufferByte) db).getData();
} else {
try {
img.setAccelerationPriority(0.0f);
} catch (final Throwable ignored) {
}
}
return img;
}
use of java.awt.image.DataBufferByte in project jdk8u_jdk by JetBrains.
the class IncorrectUnmanagedImageSourceOffset method makeUnmanagedBI.
private static BufferedImage makeUnmanagedBI(final int type) {
final BufferedImage bi = new BufferedImage(511, 255, type);
final DataBuffer db = bi.getRaster().getDataBuffer();
if (db instanceof DataBufferInt) {
((DataBufferInt) db).getData();
} else if (db instanceof DataBufferShort) {
((DataBufferShort) db).getData();
} else if (db instanceof DataBufferByte) {
((DataBufferByte) db).getData();
} else {
try {
bi.setAccelerationPriority(0.0f);
} catch (final Throwable ignored) {
}
}
return bi;
}
use of java.awt.image.DataBufferByte in project tdme by andreasdr.
the class ImageIO method imageToByteBuffer.
/**
* Loads an image to byte buffer
* @param image
* @param flipped
* @param forceAlpha
* @param transparent
* @param powerOfTwoSupport
* @param modeARGB
* @return byte buffer
*/
private ByteBuffer imageToByteBuffer(BufferedImage image, boolean flipped, boolean forceAlpha, int[] transparent, boolean powerOfTwoSupport, boolean modeARGB) {
//
ByteBuffer imageBuffer = null;
WritableRaster raster;
BufferedImage texImage;
int texWidth = image.getWidth();
int texHeight = image.getHeight();
if (powerOfTwoSupport) {
// find the closest power of 2 for the width and height
// of the produced texture
texWidth = 2;
texHeight = 2;
while (texWidth < image.getWidth()) {
texWidth *= 2;
}
while (texHeight < image.getHeight()) {
texHeight *= 2;
}
}
this.width = image.getWidth();
this.height = image.getHeight();
this.texHeight = texHeight;
this.texWidth = texWidth;
// create a raster that can be used by OpenGL as a source
// for a texture
boolean useAlpha = image.getColorModel().hasAlpha() || forceAlpha;
if (useAlpha) {
depth = 32;
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 4, null);
texImage = new BufferedImage(glAlphaColorModel, raster, false, null);
} else {
depth = 24;
raster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, texWidth, texHeight, 3, null);
texImage = new BufferedImage(glColorModel, raster, false, null);
}
// copy the source image into the produced image
Graphics2D g = (Graphics2D) texImage.getGraphics();
// alpha
if (useAlpha) {
g.setColor(new Color(0f, 0f, 0f, 0f));
g.fillRect(0, 0, texWidth, texHeight);
}
if (flipped) {
g.scale(1, -1);
g.drawImage(image, 0, -height, texWidth, texHeight, null);
} else {
g.drawImage(image, 0, 0, texWidth, texHeight, null);
}
// build a byte buffer from the temporary image
// that be used by OpenGL to produce a texture.
byte[] data = ((DataBufferByte) texImage.getRaster().getDataBuffer()).getData();
if (transparent != null) {
for (int i = 0; i < data.length; i += 4) {
boolean match = true;
for (int c = 0; c < 3; c++) {
int value = data[i + c] < 0 ? 256 + data[i + c] : data[i + c];
if (value != transparent[c]) {
match = false;
}
}
if (match) {
data[i + 3] = 0;
}
}
}
if (modeARGB) {
for (int i = 0; i < data.length; i += 4) {
byte rr = data[i + 0];
byte gg = data[i + 1];
byte bb = data[i + 2];
byte aa = data[i + 3];
data[i + 0] = bb;
data[i + 1] = gg;
data[i + 2] = rr;
data[i + 3] = aa;
}
}
imageBuffer = ByteBuffer.allocateDirect(data.length);
imageBuffer.order(ByteOrder.nativeOrder());
imageBuffer.put(data, 0, data.length);
imageBuffer.flip();
g.dispose();
return imageBuffer;
}
Aggregations