use of java.util.zip.InflaterInputStream in project hive by apache.
the class Utilities method getBaseWork.
/**
* Returns the Map or Reduce plan
* Side effect: the BaseWork returned is also placed in the gWorkMap
* @param conf
* @param name
* @return BaseWork based on the name supplied will return null if name is null
* @throws RuntimeException if the configuration files are not proper or if plan can not be loaded
*/
private static BaseWork getBaseWork(Configuration conf, String name) {
Path path = getPlanPath(conf, name);
LOG.debug("PLAN PATH = {}", path);
if (path == null) {
// Map/reduce plan may not be generated
return null;
}
BaseWork gWork = gWorkMap.get(conf).get(path);
if (gWork != null) {
LOG.debug("Found plan in cache for name: {}", name);
return gWork;
}
InputStream in = null;
Kryo kryo = SerializationUtilities.borrowKryo();
try {
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
if (engine.equals("spark")) {
// TODO Add jar into current thread context classloader as it may be invoked by Spark driver inside
// threads, should be unnecessary while SPARK-5377 is resolved.
String addedJars = conf.get(HIVE_ADDED_JARS);
if (StringUtils.isNotEmpty(addedJars)) {
AddToClassPathAction addAction = new AddToClassPathAction(Thread.currentThread().getContextClassLoader(), Arrays.asList(addedJars.split(";")));
ClassLoader newLoader = AccessController.doPrivileged(addAction);
Thread.currentThread().setContextClassLoader(newLoader);
kryo.setClassLoader(newLoader);
}
}
Path localPath = path;
LOG.debug("local path = {}", localPath);
final long serializedSize;
final String planMode;
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
String planStringPath = path.toUri().getPath();
LOG.debug("Loading plan from string: {}", planStringPath);
String planString = conf.getRaw(planStringPath);
if (planString == null) {
LOG.info("Could not find plan string in conf");
return null;
}
serializedSize = planString.length();
planMode = "RPC";
byte[] planBytes = Base64.getDecoder().decode(planString);
in = new ByteArrayInputStream(planBytes);
in = new InflaterInputStream(in);
} else {
LOG.debug("Open file to read in plan: {}", localPath);
FileSystem fs = localPath.getFileSystem(conf);
in = fs.open(localPath);
serializedSize = fs.getFileStatus(localPath).getLen();
planMode = "FILE";
}
if (MAP_PLAN_NAME.equals(name)) {
if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if (MergeFileMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class);
} else if (ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ." + MAPRED_MAPPER_CLASS + " was " + conf.get(MAPRED_MAPPER_CLASS));
}
} else if (REDUCE_PLAN_NAME.equals(name)) {
if (ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ." + MAPRED_REDUCER_CLASS + " was " + conf.get(MAPRED_REDUCER_CLASS));
}
} else if (name.contains(MERGE_PLAN_NAME)) {
if (name.startsWith(MAPNAME)) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if (name.startsWith(REDUCENAME)) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("Unknown work type: " + name);
}
}
LOG.info("Deserialized plan (via {}) - name: {} size: {}", planMode, gWork.getName(), humanReadableByteCount(serializedSize));
gWorkMap.get(conf).put(path, gWork);
return gWork;
} catch (FileNotFoundException fnf) {
// happens. e.g.: no reduce work.
LOG.debug("No plan file found: {}", path, fnf);
return null;
} catch (Exception e) {
String msg = "Failed to load plan: " + path;
LOG.error(msg, e);
throw new RuntimeException(msg, e);
} finally {
SerializationUtilities.releaseKryo(kryo);
IOUtils.closeStream(in);
}
}
use of java.util.zip.InflaterInputStream in project hive by apache.
the class InputJobInfo method readObject.
/**
* Deserialize this object, decompressing the partitions which can exceed the
* allowed jobConf size.
* @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a>
*/
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
ois.defaultReadObject();
// Next object in the stream will be a byte array of partition information which is compressed
ObjectInputStream pis = new ObjectInputStream(new ByteArrayInputStream((byte[]) ois.readObject()));
ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(pis));
partitions = (List<PartInfo>) partInfoReader.readObject();
if (partitions != null) {
for (PartInfo partInfo : partitions) {
if (partInfo.getTableInfo() == null) {
partInfo.setTableInfo(this.tableInfo);
}
}
}
// Closing only the reader used for decompression byte stream
partInfoReader.close();
}
use of java.util.zip.InflaterInputStream in project graylog2-server by Graylog2.
the class BeatsFrameDecoder method processCompressedFrame.
/**
* @see <a href="https://github.com/logstash-plugins/logstash-input-beats/blob/master/PROTOCOL.md#compressed-frame-type">'compressed' frame type</a>
*/
private Collection<ByteBuf> processCompressedFrame(Channel channel, ByteBuf channelBuffer) throws Exception {
final long payloadLength = channelBuffer.readUnsignedInt();
final byte[] data = new byte[(int) payloadLength];
channelBuffer.readBytes(data);
try (final ByteArrayInputStream dataStream = new ByteArrayInputStream(data);
final InputStream in = new InflaterInputStream(dataStream)) {
final ByteBuf buffer = Unpooled.wrappedBuffer(ByteStreams.toByteArray(in));
return processCompressedDataFrames(channel, buffer);
}
}
use of java.util.zip.InflaterInputStream in project Notes by MiCode.
the class GTaskClient method getResponseContent.
private String getResponseContent(HttpEntity entity) throws IOException {
String contentEncoding = null;
if (entity.getContentEncoding() != null) {
contentEncoding = entity.getContentEncoding().getValue();
Log.d(TAG, "encoding: " + contentEncoding);
}
InputStream input = entity.getContent();
if (contentEncoding != null && contentEncoding.equalsIgnoreCase("gzip")) {
input = new GZIPInputStream(entity.getContent());
} else if (contentEncoding != null && contentEncoding.equalsIgnoreCase("deflate")) {
Inflater inflater = new Inflater(true);
input = new InflaterInputStream(entity.getContent(), inflater);
}
try {
InputStreamReader isr = new InputStreamReader(input);
BufferedReader br = new BufferedReader(isr);
StringBuilder sb = new StringBuilder();
while (true) {
String buff = br.readLine();
if (buff == null) {
return sb.toString();
}
sb = sb.append(buff);
}
} finally {
input.close();
}
}
use of java.util.zip.InflaterInputStream in project feign by OpenFeign.
the class RecordedRequestAssert method hasDeflatedBody.
public RecordedRequestAssert hasDeflatedBody(byte[] expectedUncompressed) {
isNotNull();
byte[] compressedBody = actual.getBody().readByteArray();
byte[] uncompressedBody;
try {
uncompressedBody = Util.toByteArray(new InflaterInputStream(new ByteArrayInputStream(compressedBody)));
} catch (IOException e) {
throw new RuntimeException(e);
}
arrays.assertContains(info, uncompressedBody, expectedUncompressed);
return this;
}
Aggregations