Search in sources :

Example 46 with Kryo

use of com.esotericsoftware.kryo.Kryo in project apex-malhar by apache.

the class FileEnrichmentTest method testEnrichmentOperator.

@Test
public void testEnrichmentOperator() throws IOException, InterruptedException {
    URL origUrl = this.getClass().getResource("/productmapping.txt");
    URL fileUrl = new URL(this.getClass().getResource("/").toString() + "productmapping1.txt");
    FileUtils.deleteQuietly(new File(fileUrl.getPath()));
    FileUtils.copyFile(new File(origUrl.getPath()), new File(fileUrl.getPath()));
    MapEnricher oper = new MapEnricher();
    FSLoader store = new JsonFSLoader();
    store.setFileName(fileUrl.toString());
    oper.setLookupFields(Arrays.asList("productId"));
    oper.setIncludeFields(Arrays.asList("productCategory"));
    oper.setStore(store);
    oper.setup(null);
    /* File contains 6 entries, but operator one entry is duplicate,
     * so cache should contains only 5 entries after scanning input file.
     */
    // Assert.assertEquals("Number of mappings ", 7, oper.cache.size());
    CollectorTestSink<Map<String, Object>> sink = new CollectorTestSink<>();
    @SuppressWarnings({ "unchecked", "rawtypes" }) CollectorTestSink<Object> tmp = (CollectorTestSink) sink;
    oper.output.setSink(tmp);
    oper.activate(null);
    oper.beginWindow(0);
    Map<String, Object> tuple = Maps.newHashMap();
    tuple.put("productId", 3);
    tuple.put("channelId", 4);
    tuple.put("amount", 10.0);
    Kryo kryo = new Kryo();
    oper.input.process(kryo.copy(tuple));
    oper.endWindow();
    oper.deactivate();
    /* Number of tuple, emitted */
    Assert.assertEquals("Number of tuple emitted ", 1, sink.collectedTuples.size());
    Map<String, Object> emitted = sink.collectedTuples.iterator().next();
    /* The fields present in original event is kept as it is */
    Assert.assertEquals("Number of fields in emitted tuple", 4, emitted.size());
    Assert.assertEquals("value of productId is 3", tuple.get("productId"), emitted.get("productId"));
    Assert.assertEquals("value of channelId is 4", tuple.get("channelId"), emitted.get("channelId"));
    Assert.assertEquals("value of amount is 10.0", tuple.get("amount"), emitted.get("amount"));
    /* Check if productCategory is added to the event */
    Assert.assertEquals("productCategory is part of tuple", true, emitted.containsKey("productCategory"));
    Assert.assertEquals("value of product category is 1", 5, emitted.get("productCategory"));
    Assert.assertTrue(emitted.get("productCategory") instanceof Integer);
}
Also used : URL(java.net.URL) File(java.io.File) Map(java.util.Map) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Kryo(com.esotericsoftware.kryo.Kryo) Test(org.junit.Test)

Example 47 with Kryo

use of com.esotericsoftware.kryo.Kryo in project hive by apache.

the class Utilities method getBaseWork.

/**
 * Returns the Map or Reduce plan
 * Side effect: the BaseWork returned is also placed in the gWorkMap
 * @param conf
 * @param name
 * @return BaseWork based on the name supplied will return null if name is null
 * @throws RuntimeException if the configuration files are not proper or if plan can not be loaded
 */
private static BaseWork getBaseWork(Configuration conf, String name) {
    Path path = null;
    InputStream in = null;
    Kryo kryo = SerializationUtilities.borrowKryo();
    try {
        String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
        if (engine.equals("spark")) {
            // TODO Add jar into current thread context classloader as it may be invoked by Spark driver inside
            // threads, should be unnecessary while SPARK-5377 is resolved.
            String addedJars = conf.get(HIVE_ADDED_JARS);
            if (StringUtils.isNotEmpty(addedJars)) {
                ClassLoader loader = Thread.currentThread().getContextClassLoader();
                ClassLoader newLoader = addToClassPath(loader, addedJars.split(";"));
                Thread.currentThread().setContextClassLoader(newLoader);
                kryo.setClassLoader(newLoader);
            }
        }
        path = getPlanPath(conf, name);
        LOG.info("PLAN PATH = {}", path);
        if (path == null) {
            // Map/reduce plan may not be generated
            return null;
        }
        BaseWork gWork = gWorkMap.get(conf).get(path);
        if (gWork == null) {
            Path localPath = path;
            LOG.debug("local path = {}", localPath);
            final long serializedSize;
            final String planMode;
            if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
                String planStringPath = path.toUri().getPath();
                LOG.debug("Loading plan from string: {}", planStringPath);
                String planString = conf.getRaw(planStringPath);
                if (planString == null) {
                    LOG.info("Could not find plan string in conf");
                    return null;
                }
                serializedSize = planString.length();
                planMode = "RPC";
                byte[] planBytes = Base64.decodeBase64(planString);
                in = new ByteArrayInputStream(planBytes);
                in = new InflaterInputStream(in);
            } else {
                LOG.debug("Open file to read in plan: {}", localPath);
                FileSystem fs = localPath.getFileSystem(conf);
                in = fs.open(localPath);
                serializedSize = fs.getFileStatus(localPath).getLen();
                planMode = "FILE";
            }
            if (MAP_PLAN_NAME.equals(name)) {
                if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
                } else if (MergeFileMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class);
                } else if (ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class);
                } else {
                    throw new RuntimeException("unable to determine work from configuration ." + MAPRED_MAPPER_CLASS + " was " + conf.get(MAPRED_MAPPER_CLASS));
                }
            } else if (REDUCE_PLAN_NAME.equals(name)) {
                if (ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
                } else {
                    throw new RuntimeException("unable to determine work from configuration ." + MAPRED_REDUCER_CLASS + " was " + conf.get(MAPRED_REDUCER_CLASS));
                }
            } else if (name.contains(MERGE_PLAN_NAME)) {
                if (name.startsWith(MAPNAME)) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
                } else if (name.startsWith(REDUCENAME)) {
                    gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
                } else {
                    throw new RuntimeException("Unknown work type: " + name);
                }
            }
            LOG.info("Deserialized plan (via {}) - name: {} size: {}", planMode, gWork.getName(), humanReadableByteCount(serializedSize));
            gWorkMap.get(conf).put(path, gWork);
        } else {
            LOG.debug("Found plan in cache for name: {}", name);
        }
        return gWork;
    } catch (FileNotFoundException fnf) {
        // happens. e.g.: no reduce work.
        LOG.debug("No plan file found: {}", path, fnf);
        return null;
    } catch (Exception e) {
        String msg = "Failed to load plan: " + path;
        LOG.error(msg, e);
        throw new RuntimeException(msg, e);
    } finally {
        SerializationUtilities.releaseKryo(kryo);
        IOUtils.closeStream(in);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) InflaterInputStream(java.util.zip.InflaterInputStream) InputStream(java.io.InputStream) InflaterInputStream(java.util.zip.InflaterInputStream) FileNotFoundException(java.io.FileNotFoundException) SQLFeatureNotSupportedException(java.sql.SQLFeatureNotSupportedException) SQLTransientException(java.sql.SQLTransientException) SQLException(java.sql.SQLException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) EOFException(java.io.EOFException) FileNotFoundException(java.io.FileNotFoundException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) ByteArrayInputStream(java.io.ByteArrayInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) URLClassLoader(java.net.URLClassLoader) ColumnTruncateMapper(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateMapper) ColumnTruncateWork(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork) BaseWork(org.apache.hadoop.hive.ql.plan.BaseWork) ExecMapper(org.apache.hadoop.hive.ql.exec.mr.ExecMapper) Kryo(com.esotericsoftware.kryo.Kryo)

Example 48 with Kryo

use of com.esotericsoftware.kryo.Kryo in project jersey by jersey.

the class KryoMessageBodyProvider method writeTo.

@Override
public void writeTo(final Object object, final Class<?> type, final Type genericType, final Annotation[] annotations, final MediaType mediaType, final MultivaluedMap<String, Object> httpHeaders, final OutputStream entityStream) throws IOException, WebApplicationException {
    final Output output = new Output(entityStream);
    kryoPool.run(new KryoCallback() {

        public Object execute(Kryo kryo) {
            kryo.writeObject(output, object);
            return null;
        }
    });
    output.flush();
}
Also used : Output(com.esotericsoftware.kryo.io.Output) KryoCallback(com.esotericsoftware.kryo.pool.KryoCallback) Kryo(com.esotericsoftware.kryo.Kryo)

Example 49 with Kryo

use of com.esotericsoftware.kryo.Kryo in project hive by apache.

the class SerializationUtilities method serializeObjectToKryo.

private static byte[] serializeObjectToKryo(Serializable object) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    Output output = new Output(baos);
    Kryo kryo = borrowKryo();
    try {
        kryo.writeObject(output, object);
    } finally {
        releaseKryo(kryo);
    }
    output.close();
    return baos.toByteArray();
}
Also used : Output(com.esotericsoftware.kryo.io.Output) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Kryo(com.esotericsoftware.kryo.Kryo)

Example 50 with Kryo

use of com.esotericsoftware.kryo.Kryo in project hive by apache.

the class SerializationUtilities method deserializeObjectFromKryo.

private static <T extends Serializable> T deserializeObjectFromKryo(byte[] bytes, Class<T> clazz) {
    Input inp = new Input(new ByteArrayInputStream(bytes));
    Kryo kryo = borrowKryo();
    T func = null;
    try {
        func = kryo.readObject(inp, clazz);
    } finally {
        releaseKryo(kryo);
    }
    inp.close();
    return func;
}
Also used : Input(com.esotericsoftware.kryo.io.Input) ByteArrayInputStream(java.io.ByteArrayInputStream) Kryo(com.esotericsoftware.kryo.Kryo)

Aggregations

Kryo (com.esotericsoftware.kryo.Kryo)94 Input (com.esotericsoftware.kryo.io.Input)37 Output (com.esotericsoftware.kryo.io.Output)34 Test (org.junit.Test)26 ByteArrayOutputStream (java.io.ByteArrayOutputStream)21 ByteArrayInputStream (java.io.ByteArrayInputStream)17 StdInstantiatorStrategy (org.objenesis.strategy.StdInstantiatorStrategy)14 File (java.io.File)10 CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)10 List (java.util.List)9 Map (java.util.Map)8 Test (org.testng.annotations.Test)8 ArrayList (java.util.ArrayList)7 Path (org.apache.hadoop.fs.Path)7 BigIntegerSerializer (com.esotericsoftware.kryo.serializers.DefaultSerializers.BigIntegerSerializer)5 FileNotFoundException (java.io.FileNotFoundException)5 IOException (java.io.IOException)5 BaseTest (org.broadinstitute.hellbender.utils.test.BaseTest)5 DefaultPartition (com.datatorrent.api.DefaultPartition)4 CountDownLatch (java.util.concurrent.CountDownLatch)4