use of java.util.zip.InflaterInputStream in project intellij-plugins by JetBrains.
the class ByteBuffer method uncompress.
public void uncompress() throws IOException {
final InflaterInputStream zipInputStream = new InflaterInputStream(new ByteArrayInputStream(bytes));
bytes = readStream(zipInputStream);
zipInputStream.close();
}
use of java.util.zip.InflaterInputStream in project jdk8u_jdk by JetBrains.
the class PNGImageReader method readImage.
private void readImage(ImageReadParam param) throws IIOException {
readMetadata();
int width = metadata.IHDR_width;
int height = metadata.IHDR_height;
// Init default values
sourceXSubsampling = 1;
sourceYSubsampling = 1;
sourceMinProgressivePass = 0;
sourceMaxProgressivePass = 6;
sourceBands = null;
destinationBands = null;
destinationOffset = new Point(0, 0);
// If an ImageReadParam is available, get values from it
if (param != null) {
sourceXSubsampling = param.getSourceXSubsampling();
sourceYSubsampling = param.getSourceYSubsampling();
sourceMinProgressivePass = Math.max(param.getSourceMinProgressivePass(), 0);
sourceMaxProgressivePass = Math.min(param.getSourceMaxProgressivePass(), 6);
sourceBands = param.getSourceBands();
destinationBands = param.getDestinationBands();
destinationOffset = param.getDestinationOffset();
}
Inflater inf = null;
try {
stream.seek(imageStartPosition);
Enumeration<InputStream> e = new PNGImageDataEnumeration(stream);
InputStream is = new SequenceInputStream(e);
/* InflaterInputStream uses an Inflater instance which consumes
* native (non-GC visible) resources. This is normally implicitly
* freed when the stream is closed. However since the
* InflaterInputStream wraps a client-supplied input stream,
* we cannot close it.
* But the app may depend on GC finalization to close the stream.
* Therefore to ensure timely freeing of native resources we
* explicitly create the Inflater instance and free its resources
* when we are done with the InflaterInputStream by calling
* inf.end();
*/
inf = new Inflater();
is = new InflaterInputStream(is, inf);
is = new BufferedInputStream(is);
this.pixelStream = new DataInputStream(is);
/*
* NB: the PNG spec declares that valid range for width
* and height is [1, 2^31-1], so here we may fail to allocate
* a buffer for destination image due to memory limitation.
*
* However, the recovery strategy for this case should be
* defined on the level of application, so we will not
* try to estimate the required amount of the memory and/or
* handle OOM in any way.
*/
theImage = getDestination(param, getImageTypes(0), width, height);
Rectangle destRegion = new Rectangle(0, 0, 0, 0);
sourceRegion = new Rectangle(0, 0, 0, 0);
computeRegions(param, width, height, theImage, sourceRegion, destRegion);
destinationOffset.setLocation(destRegion.getLocation());
// At this point the header has been read and we know
// how many bands are in the image, so perform checking
// of the read param.
int colorType = metadata.IHDR_colorType;
checkReadParamBandSettings(param, inputBandsForColorType[colorType], theImage.getSampleModel().getNumBands());
processImageStarted(0);
decodeImage();
if (abortRequested()) {
processReadAborted();
} else {
processImageComplete();
}
} catch (IOException e) {
throw new IIOException("Error reading PNG image data", e);
} finally {
if (inf != null) {
inf.end();
}
}
}
use of java.util.zip.InflaterInputStream in project ddf by codice.
the class RestSecurityTest method testInflateDeflateWithTokenDuplication.
@Test
public void testInflateDeflateWithTokenDuplication() throws Exception {
String token = "valid_grant valid_grant valid_grant valid_grant valid_grant valid_grant";
DeflateEncoderDecoder deflateEncoderDecoder = new DeflateEncoderDecoder();
byte[] deflatedToken = deflateEncoderDecoder.deflateToken(token.getBytes());
String cxfInflatedToken = IOUtils.toString(deflateEncoderDecoder.inflateToken(deflatedToken));
String streamInflatedToken = IOUtils.toString(new InflaterInputStream(new ByteArrayInputStream(deflatedToken), new Inflater(true)));
assertNotSame(cxfInflatedToken, token);
assertEquals(streamInflatedToken, token);
}
use of java.util.zip.InflaterInputStream in project ddf by codice.
the class RestSecurity method inflateBase64.
public static String inflateBase64(String base64EncodedValue) throws IOException {
byte[] deflatedValue = Base64.getMimeDecoder().decode(base64EncodedValue);
InputStream is = new InflaterInputStream(new ByteArrayInputStream(deflatedValue), new Inflater(GZIP_COMPATIBLE));
return IOUtils.toString(is, StandardCharsets.UTF_8.name());
}
use of java.util.zip.InflaterInputStream in project bazel by bazelbuild.
the class ProfileInfo method loadProfile.
/**
* Loads and parses Blaze profile file.
*
* @param profileFile profile file path
*
* @return ProfileInfo object with some fields populated (call calculateStats()
* and analyzeRelationships() to populate the remaining fields)
* @throws UnsupportedEncodingException if the file format is invalid
* @throws IOException if the file can't be read
*/
public static ProfileInfo loadProfile(Path profileFile) throws IOException {
// It is extremely important to wrap InflaterInputStream using
// BufferedInputStream because majority of reads would be done using
// readInt()/readLong() methods and InflaterInputStream is very inefficient
// in handling small read requests (performance difference with 1MB buffer
// used below is almost 10x).
DataInputStream in = new DataInputStream(new BufferedInputStream(new InflaterInputStream(profileFile.getInputStream(), new Inflater(false), 65536), 1024 * 1024));
if (in.readInt() != Profiler.MAGIC) {
in.close();
throw new UnsupportedEncodingException("Invalid profile datafile format");
}
if (in.readInt() != Profiler.VERSION) {
in.close();
throw new UnsupportedEncodingException("Incompatible profile datafile version");
}
String fileComment = in.readUTF();
// Read list of used record types
int typeCount = in.readInt();
boolean hasUnknownTypes = false;
Set<String> supportedTasks = new HashSet<>();
for (ProfilerTask task : ProfilerTask.values()) {
supportedTasks.add(task.toString());
}
List<ProfilerTask> typeList = new ArrayList<>();
for (int i = 0; i < typeCount; i++) {
String name = in.readUTF();
if (supportedTasks.contains(name)) {
typeList.add(ProfilerTask.valueOf(name));
} else {
hasUnknownTypes = true;
typeList.add(ProfilerTask.UNKNOWN);
}
}
ProfileInfo info = new ProfileInfo(fileComment);
// TODO(bazel-team): Maybe this still should handle corrupted(truncated) files.
try {
int size;
while ((size = in.readInt()) != Profiler.EOF_MARKER) {
byte[] backingArray = new byte[size];
in.readFully(backingArray);
ByteBuffer buffer = ByteBuffer.wrap(backingArray);
long threadId = VarInt.getVarLong(buffer);
int id = VarInt.getVarInt(buffer);
int parentId = VarInt.getVarInt(buffer);
long startTime = VarInt.getVarLong(buffer);
long duration = VarInt.getVarLong(buffer);
int descIndex = VarInt.getVarInt(buffer) - 1;
if (descIndex == -1) {
String desc = in.readUTF();
descIndex = info.descriptionList.size();
info.descriptionList.add(desc);
}
ProfilerTask type = typeList.get(buffer.get());
byte[] stats = null;
if (buffer.hasRemaining()) {
// Copy aggregated stats.
int offset = buffer.position();
stats = Arrays.copyOfRange(backingArray, offset, size);
if (hasUnknownTypes) {
while (buffer.hasRemaining()) {
byte attrType = buffer.get();
if (typeList.get(attrType) == ProfilerTask.UNKNOWN) {
// We're dealing with unknown aggregated type - update stats array to
// use ProfilerTask.UNKNOWN.ordinal() value.
stats[buffer.position() - 1 - offset] = (byte) ProfilerTask.UNKNOWN.ordinal();
}
VarInt.getVarInt(buffer);
VarInt.getVarLong(buffer);
}
}
}
ProfileInfo.Task task = info.new Task(threadId, id, parentId, startTime, duration, type, descIndex, new CompactStatistics(stats));
info.addTask(task);
}
} catch (IOException e) {
info.corruptedOrIncomplete = true;
} finally {
in.close();
}
return info;
}
Aggregations