use of java.util.zip.DeflaterOutputStream in project android_frameworks_base by AOSPA.
the class BlobBackupHelper method deflate.
// Also versions the deflated blob internally in case we need to revise it
private byte[] deflate(byte[] data) {
byte[] result = null;
if (data != null) {
try {
ByteArrayOutputStream sink = new ByteArrayOutputStream();
DataOutputStream headerOut = new DataOutputStream(sink);
// write the header directly to the sink ahead of the deflated payload
headerOut.writeInt(mCurrentBlobVersion);
DeflaterOutputStream out = new DeflaterOutputStream(sink);
out.write(data);
// finishes and commits the compression run
out.close();
result = sink.toByteArray();
if (DEBUG) {
Log.v(TAG, "Deflated " + data.length + " bytes to " + result.length);
}
} catch (IOException e) {
Log.w(TAG, "Unable to process payload: " + e.getMessage());
}
}
return result;
}
use of java.util.zip.DeflaterOutputStream in project nutch by apache.
the class DeflateUtils method deflate.
/**
* Returns a deflated copy of the input array.
*/
public static final byte[] deflate(byte[] in) {
// compress using DeflaterOutputStream
ByteArrayOutputStream byteOut = new ByteArrayOutputStream(in.length / EXPECTED_COMPRESSION_RATIO);
DeflaterOutputStream outStream = new DeflaterOutputStream(byteOut);
try {
outStream.write(in);
} catch (Exception e) {
LOG.error("Error compressing: ", e);
}
try {
outStream.close();
} catch (IOException e) {
LOG.error("Error closing: ", e);
}
return byteOut.toByteArray();
}
use of java.util.zip.DeflaterOutputStream in project hive by apache.
the class InputJobInfo method writeObject.
/**
* Serialize this object, compressing the partitions which can exceed the
* allowed jobConf size.
* @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a>
*/
private void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
Deflater def = new Deflater(Deflater.BEST_COMPRESSION);
ObjectOutputStream partInfoWriter = new ObjectOutputStream(new DeflaterOutputStream(oos, def));
partInfoWriter.writeObject(partitions);
partInfoWriter.close();
}
use of java.util.zip.DeflaterOutputStream in project bnd by bndtools.
the class CAFS method write.
/**
* Store an input stream in the CAFS while calculating and returning the
* SHA-1 code.
*
* @param in The input stream to store.
* @return The SHA-1 code.
* @throws Exception if anything goes wrong
*/
public SHA1 write(InputStream in) throws Exception {
Deflater deflater = new Deflater();
MessageDigest md = MessageDigest.getInstance(ALGORITHM);
DigestInputStream din = new DigestInputStream(in, md);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
DeflaterOutputStream dout = new DeflaterOutputStream(bout, deflater);
copy(din, dout);
synchronized (store) {
// First check if it already exists
SHA1 sha1 = new SHA1(md.digest());
long search = index.search(sha1.digest());
if (search > 0)
return sha1;
byte[] compressed = bout.toByteArray();
// we need to append this file to our store,
// which requires a lock. However, we are in a race
// so others can get the lock between us getting
// the length and someone else getting the lock.
// So we must verify after we get the lock that the
// length was unchanged.
FileLock lock = null;
try {
long insertPoint;
int recordLength = compressed.length + HEADERLENGTH;
while (true) {
insertPoint = store.length();
lock = channel.lock(insertPoint, recordLength, false);
if (store.length() == insertPoint)
break;
// We got the wrong lock, someone else
// got in between reading the length
// and locking
lock.release();
}
int totalLength = deflater.getTotalIn();
store.seek(insertPoint);
update(sha1.digest(), compressed, totalLength);
index.insert(sha1.digest(), insertPoint);
return sha1;
} finally {
if (lock != null)
lock.release();
}
}
}
use of java.util.zip.DeflaterOutputStream in project vcell by virtualcell.
the class SmoldynFileWriter method writeHighResVolumeSamples.
private void writeHighResVolumeSamples() throws SolverException {
try {
printWriter.println("# HighResVolumeSamples");
printWriter.println(VCellSmoldynKeyword.start_highResVolumeSamples);
Origin origin = resampledGeometry.getOrigin();
Extent extent = resampledGeometry.getExtent();
int numSamples = 10000000;
ISize sampleSize = GeometrySpec.calulateResetSamplingSize(3, extent, numSamples);
VCImage vcImage = RayCaster.sampleGeometry(resampledGeometry, sampleSize, true);
printWriter.println(VCellSmoldynKeyword.Origin + " " + origin.getX() + " " + origin.getY() + " " + origin.getZ());
printWriter.println(VCellSmoldynKeyword.Size + " " + extent.getX() + " " + extent.getY() + " " + extent.getZ());
printWriter.println(VCellSmoldynKeyword.CompartmentHighResPixelMap + " " + resampledGeometry.getGeometrySpec().getNumSubVolumes());
VCPixelClass[] pixelclasses = vcImage.getPixelClasses();
if (pixelclasses != null && resampledGeometry.getGeometrySpec().getSubVolumes() != null && pixelclasses.length != resampledGeometry.getGeometrySpec().getSubVolumes().length) {
throw new SolverException("Fast mesh sampling failed. Found " + pixelclasses.length + " of " + resampledGeometry.getGeometrySpec().getSubVolumes().length + " volume domains.\n");
}
for (SubVolume subVolume : resampledGeometry.getGeometrySpec().getSubVolumes()) {
for (VCPixelClass vcPixelClass : pixelclasses) {
if (vcPixelClass.getPixel() == subVolume.getHandle()) {
printWriter.println(subVolume.getName() + " " + vcPixelClass.getPixel());
break;
}
}
}
printWriter.println(VCellSmoldynKeyword.VolumeSamples + " " + sampleSize.getX() + " " + sampleSize.getY() + " " + sampleSize.getZ());
if (vcImage != null) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DeflaterOutputStream dos = new DeflaterOutputStream(bos);
byte[] pixels = vcImage.getPixels();
dos.write(pixels, 0, pixels.length);
dos.close();
byte[] compressedPixels = bos.toByteArray();
String compressedStr = Hex.toString(compressedPixels);
int strchar = 250;
int length = compressedStr.length();
for (int i = 0; i < Math.ceil(length * 1.0 / strchar); ++i) {
printWriter.println(compressedStr.substring(i * strchar, Math.min(length, (i + 1) * strchar)));
}
}
printWriter.println(VCellSmoldynKeyword.end_highResVolumeSamples);
printWriter.println();
} catch (Exception ex) {
ex.printStackTrace(System.out);
throw new RuntimeException("Error writing High Resolution Volume Samples: " + ex.getMessage());
}
}
Aggregations