use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class TestGenericUDFOPMultiply method testFloatTimesFloat.
@Test
public void testFloatTimesFloat() throws HiveException {
GenericUDFOPMultiply udf = new GenericUDFOPMultiply();
FloatWritable f1 = new FloatWritable(4.5f);
FloatWritable f2 = new FloatWritable(0.0f);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableFloatObjectInspector, PrimitiveObjectInspectorFactory.writableFloatObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(f1), new DeferredJavaObject(f2) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo);
FloatWritable res = (FloatWritable) udf.evaluate(args);
Assert.assertEquals(0.0, res.get(), EPSILON);
}
use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class TestGenericUDFOPPositive method testFloat.
@Test
public void testFloat() throws HiveException {
GenericUDFOPPositive udf = new GenericUDFOPPositive();
FloatWritable input = new FloatWritable(323.4747f);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableFloatObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo());
FloatWritable res = (FloatWritable) udf.evaluate(args);
Assert.assertEquals(323.4747, res.get(), EPSILON);
}
use of org.apache.hadoop.io.FloatWritable in project hive by apache.
the class TestObjectInspectorConverters method convertFloat.
private void convertFloat() {
Converter floatConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
assertEquals("LongConverter", new FloatWritable(0), floatConverter.convert(Integer.valueOf(0)));
assertEquals("LongConverter", new FloatWritable(1), floatConverter.convert(Integer.valueOf(1)));
assertEquals("LongConverter", null, floatConverter.convert(null));
}
use of org.apache.hadoop.io.FloatWritable in project Cloud9 by lintool.
the class Int2FloatMap method createIfMissing.
public final void createIfMissing(int k) {
Integer ki = new Integer(k);
if (data.get(ki) == null) {
FloatWritable n = new FloatWritable();
data.put(k, n);
}
}
use of org.apache.hadoop.io.FloatWritable in project Cloud9 by lintool.
the class BigramRelativeFrequencyIT method testBigramRelativeFrequencyJson.
@Test
public void testBigramRelativeFrequencyJson() throws Exception {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
assertTrue(fs.exists(collectionPath));
String[] args = new String[] { "hadoop --config src/test/resources/hadoop-local-conf/ jar", IntegrationUtils.getJar("target", "cloud9"), edu.umd.cloud9.example.bigram.BigramRelativeFrequencyJson.class.getCanonicalName(), "-input", collectionPath.toString(), "-output", tmpPrefix + "-json", "-numReducers", "1" };
IntegrationUtils.exec(Joiner.on(" ").join(args));
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(new Path(tmpPrefix + "-json/part-r-00000")));
BigramRelativeFrequencyJson.MyTuple json = new BigramRelativeFrequencyJson.MyTuple();
FloatWritable f = new FloatWritable();
reader.next(json, f);
assertEquals("&c", json.getJsonObject().get("Left").getAsString());
assertEquals("*", json.getJsonObject().get("Right").getAsString());
assertEquals(17f, f.get(), 10e-6);
for (int i = 0; i < 100; i++) {
reader.next(json, f);
}
assertEquals("'dear", json.getJsonObject().get("Left").getAsString());
assertEquals("*", json.getJsonObject().get("Right").getAsString());
assertEquals(2f, f.get(), 10e-6);
reader.next(json, f);
assertEquals("'dear", json.getJsonObject().get("Left").getAsString());
assertEquals("lord", json.getJsonObject().get("Right").getAsString());
assertEquals(1f, f.get(), 10e-6);
reader.close();
}
Aggregations