use of cbit.vcell.export.gloworm.quicktime.VideoMediaSampleRaw in project vcell by virtualcell.
the class FormatSpecificSpecs method getVideoMediaSample.
public static VideoMediaSample getVideoMediaSample(int width, int height, int sampleDuration, boolean isGrayScale, int compressionType, float compressionQuality, int[] argbData) throws Exception {
if (isGrayScale) {
// convert 32bit to 8bit
BufferedImage bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
byte[] buffer = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
for (int i = 0; i < buffer.length; i++) {
buffer[i] = (byte) (argbData[i] & 0x000000FF);
}
if (compressionType == FormatSpecificSpecs.CODEC_JPEG) {
return FormatSpecificSpecs.encodeJPEG(bufferedImage, compressionQuality, width, height, sampleDuration, Byte.SIZE, true);
} else {
return new VideoMediaSampleRaw(width, height, sampleDuration, buffer, 8, true);
}
}
if (compressionType == FormatSpecificSpecs.CODEC_JPEG) {
BufferedImage bufferedImage = null;
bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
System.arraycopy(argbData, 0, ((DataBufferInt) bufferedImage.getRaster().getDataBuffer()).getData(), 0, argbData.length);
return FormatSpecificSpecs.encodeJPEG(bufferedImage, compressionQuality, width, height, sampleDuration, Integer.SIZE, false);
} else {
ByteArrayOutputStream sampleBytes = new ByteArrayOutputStream();
DataOutputStream sampleData = new DataOutputStream(sampleBytes);
for (int j = 0; j < argbData.length; j++) {
sampleData.writeInt(argbData[j]);
}
sampleData.close();
byte[] bytes = sampleBytes.toByteArray();
return new VideoMediaSampleRaw(width, height, sampleDuration, bytes, Integer.SIZE, false);
}
}
Aggregations