Search in sources :

Example 81 with InflaterInputStream

use of java.util.zip.InflaterInputStream in project hive by apache.

the class Utilities method getBaseWork.

/**
 * Returns the Map or Reduce plan
 * Side effect: the BaseWork returned is also placed in the gWorkMap
 * @param conf
 * @param name
 * @return BaseWork based on the name supplied will return null if name is null
 * @throws RuntimeException if the configuration files are not proper or if plan can not be loaded
 */
private static BaseWork getBaseWork(Configuration conf, String name) {
    Path path = getPlanPath(conf, name);
    LOG.debug("PLAN PATH = {}", path);
    if (path == null) {
        // Map/reduce plan may not be generated
        return null;
    }
    BaseWork gWork = gWorkMap.get(conf).get(path);
    if (gWork != null) {
        LOG.debug("Found plan in cache for name: {}", name);
        return gWork;
    }
    InputStream in = null;
    Kryo kryo = SerializationUtilities.borrowKryo();
    try {
        String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
        if (engine.equals("spark")) {
            // TODO Add jar into current thread context classloader as it may be invoked by Spark driver inside
            // threads, should be unnecessary while SPARK-5377 is resolved.
            String addedJars = conf.get(HIVE_ADDED_JARS);
            if (StringUtils.isNotEmpty(addedJars)) {
                AddToClassPathAction addAction = new AddToClassPathAction(Thread.currentThread().getContextClassLoader(), Arrays.asList(addedJars.split(";")));
                ClassLoader newLoader = AccessController.doPrivileged(addAction);
                Thread.currentThread().setContextClassLoader(newLoader);
                kryo.setClassLoader(newLoader);
            }
        }
        Path localPath = path;
        LOG.debug("local path = {}", localPath);
        final long serializedSize;
        final String planMode;
        if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
            String planStringPath = path.toUri().getPath();
            LOG.debug("Loading plan from string: {}", planStringPath);
            String planString = conf.getRaw(planStringPath);
            if (planString == null) {
                LOG.info("Could not find plan string in conf");
                return null;
            }
            serializedSize = planString.length();
            planMode = "RPC";
            byte[] planBytes = Base64.getDecoder().decode(planString);
            in = new ByteArrayInputStream(planBytes);
            in = new InflaterInputStream(in);
        } else {
            LOG.debug("Open file to read in plan: {}", localPath);
            FileSystem fs = localPath.getFileSystem(conf);
            in = fs.open(localPath);
            serializedSize = fs.getFileStatus(localPath).getLen();
            planMode = "FILE";
        }
        if (MAP_PLAN_NAME.equals(name)) {
            if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
            } else if (MergeFileMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class);
            } else if (ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class);
            } else {
                throw new RuntimeException("unable to determine work from configuration ." + MAPRED_MAPPER_CLASS + " was " + conf.get(MAPRED_MAPPER_CLASS));
            }
        } else if (REDUCE_PLAN_NAME.equals(name)) {
            if (ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
            } else {
                throw new RuntimeException("unable to determine work from configuration ." + MAPRED_REDUCER_CLASS + " was " + conf.get(MAPRED_REDUCER_CLASS));
            }
        } else if (name.contains(MERGE_PLAN_NAME)) {
            if (name.startsWith(MAPNAME)) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
            } else if (name.startsWith(REDUCENAME)) {
                gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
            } else {
                throw new RuntimeException("Unknown work type: " + name);
            }
        }
        LOG.info("Deserialized plan (via {}) - name: {} size: {}", planMode, gWork.getName(), humanReadableByteCount(serializedSize));
        gWorkMap.get(conf).put(path, gWork);
        return gWork;
    } catch (FileNotFoundException fnf) {
        // happens. e.g.: no reduce work.
        LOG.debug("No plan file found: {}", path, fnf);
        return null;
    } catch (Exception e) {
        String msg = "Failed to load plan: " + path;
        LOG.error(msg, e);
        throw new RuntimeException(msg, e);
    } finally {
        SerializationUtilities.releaseKryo(kryo);
        IOUtils.closeStream(in);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ByteArrayInputStream(java.io.ByteArrayInputStream) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) InputStream(java.io.InputStream) InflaterInputStream(java.util.zip.InflaterInputStream) FileNotFoundException(java.io.FileNotFoundException) SQLFeatureNotSupportedException(java.sql.SQLFeatureNotSupportedException) SQLTransientException(java.sql.SQLTransientException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLException(java.sql.SQLException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) EOFException(java.io.EOFException) FileNotFoundException(java.io.FileNotFoundException) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) ByteArrayInputStream(java.io.ByteArrayInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) URLClassLoader(java.net.URLClassLoader) ColumnTruncateMapper(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateMapper) ColumnTruncateWork(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork) BaseWork(org.apache.hadoop.hive.ql.plan.BaseWork) ExecMapper(org.apache.hadoop.hive.ql.exec.mr.ExecMapper) Kryo(com.esotericsoftware.kryo.Kryo)

Example 82 with InflaterInputStream

use of java.util.zip.InflaterInputStream in project hive by apache.

the class InputJobInfo method readObject.

/**
 * Deserialize this object, decompressing the partitions which can exceed the
 * allowed jobConf size.
 * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a>
 */
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
    ois.defaultReadObject();
    // Next object in the stream will be a byte array of partition information which is compressed
    ObjectInputStream pis = new ObjectInputStream(new ByteArrayInputStream((byte[]) ois.readObject()));
    ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(pis));
    partitions = (List<PartInfo>) partInfoReader.readObject();
    if (partitions != null) {
        for (PartInfo partInfo : partitions) {
            if (partInfo.getTableInfo() == null) {
                partInfo.setTableInfo(this.tableInfo);
            }
        }
    }
    // Closing only the reader used for decompression byte stream
    partInfoReader.close();
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) ObjectInputStream(java.io.ObjectInputStream)

Example 83 with InflaterInputStream

use of java.util.zip.InflaterInputStream in project graylog2-server by Graylog2.

the class BeatsFrameDecoder method processCompressedFrame.

/**
 * @see <a href="https://github.com/logstash-plugins/logstash-input-beats/blob/master/PROTOCOL.md#compressed-frame-type">'compressed' frame type</a>
 */
private Collection<ByteBuf> processCompressedFrame(Channel channel, ByteBuf channelBuffer) throws Exception {
    final long payloadLength = channelBuffer.readUnsignedInt();
    final byte[] data = new byte[(int) payloadLength];
    channelBuffer.readBytes(data);
    try (final ByteArrayInputStream dataStream = new ByteArrayInputStream(data);
        final InputStream in = new InflaterInputStream(dataStream)) {
        final ByteBuf buffer = Unpooled.wrappedBuffer(ByteStreams.toByteArray(in));
        return processCompressedDataFrames(channel, buffer);
    }
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) InflaterInputStream(java.util.zip.InflaterInputStream) ByteBuf(io.netty.buffer.ByteBuf)

Example 84 with InflaterInputStream

use of java.util.zip.InflaterInputStream in project Notes by MiCode.

the class GTaskClient method getResponseContent.

private String getResponseContent(HttpEntity entity) throws IOException {
    String contentEncoding = null;
    if (entity.getContentEncoding() != null) {
        contentEncoding = entity.getContentEncoding().getValue();
        Log.d(TAG, "encoding: " + contentEncoding);
    }
    InputStream input = entity.getContent();
    if (contentEncoding != null && contentEncoding.equalsIgnoreCase("gzip")) {
        input = new GZIPInputStream(entity.getContent());
    } else if (contentEncoding != null && contentEncoding.equalsIgnoreCase("deflate")) {
        Inflater inflater = new Inflater(true);
        input = new InflaterInputStream(entity.getContent(), inflater);
    }
    try {
        InputStreamReader isr = new InputStreamReader(input);
        BufferedReader br = new BufferedReader(isr);
        StringBuilder sb = new StringBuilder();
        while (true) {
            String buff = br.readLine();
            if (buff == null) {
                return sb.toString();
            }
            sb = sb.append(buff);
        }
    } finally {
        input.close();
    }
}
Also used : GZIPInputStream(java.util.zip.GZIPInputStream) InputStreamReader(java.io.InputStreamReader) GZIPInputStream(java.util.zip.GZIPInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) InputStream(java.io.InputStream) InflaterInputStream(java.util.zip.InflaterInputStream) BufferedReader(java.io.BufferedReader) Inflater(java.util.zip.Inflater)

Example 85 with InflaterInputStream

use of java.util.zip.InflaterInputStream in project feign by OpenFeign.

the class RecordedRequestAssert method hasDeflatedBody.

public RecordedRequestAssert hasDeflatedBody(byte[] expectedUncompressed) {
    isNotNull();
    byte[] compressedBody = actual.getBody().readByteArray();
    byte[] uncompressedBody;
    try {
        uncompressedBody = Util.toByteArray(new InflaterInputStream(new ByteArrayInputStream(compressedBody)));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    arrays.assertContains(info, uncompressedBody, expectedUncompressed);
    return this;
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) IOException(java.io.IOException)

Aggregations

InflaterInputStream (java.util.zip.InflaterInputStream)200 ByteArrayInputStream (java.io.ByteArrayInputStream)113 InputStream (java.io.InputStream)100 IOException (java.io.IOException)74 Inflater (java.util.zip.Inflater)66 ByteArrayOutputStream (java.io.ByteArrayOutputStream)50 GZIPInputStream (java.util.zip.GZIPInputStream)39 DataInputStream (java.io.DataInputStream)33 FileInputStream (java.io.FileInputStream)27 BufferedInputStream (java.io.BufferedInputStream)24 InputStreamReader (java.io.InputStreamReader)13 DeflaterOutputStream (java.util.zip.DeflaterOutputStream)13 HttpURLConnection (java.net.HttpURLConnection)12 URL (java.net.URL)11 File (java.io.File)10 BufferedReader (java.io.BufferedReader)9 OutputStream (java.io.OutputStream)9 Point (java.awt.Point)7 EOFException (java.io.EOFException)7 URLConnection (java.net.URLConnection)7