use of org.apache.hadoop.io.DoubleWritable in project presto by prestodb.
the class TestObjectEncoders method testPrimitiveObjectEncoders.
@Test
public void testPrimitiveObjectEncoders() {
ObjectInspector inspector;
ObjectEncoder encoder;
inspector = writableLongObjectInspector;
encoder = createEncoder(BIGINT, inspector);
assertTrue(encoder.encode(new LongWritable(123456L)) instanceof Long);
inspector = writableIntObjectInspector;
encoder = createEncoder(INTEGER, inspector);
assertTrue(encoder.encode(new IntWritable(12345)) instanceof Long);
inspector = writableShortObjectInspector;
encoder = createEncoder(SMALLINT, inspector);
assertTrue(encoder.encode(new ShortWritable((short) 1234)) instanceof Long);
inspector = writableByteObjectInspector;
encoder = createEncoder(TINYINT, inspector);
assertTrue(encoder.encode(new ByteWritable((byte) 123)) instanceof Long);
inspector = writableBooleanObjectInspector;
encoder = createEncoder(BOOLEAN, inspector);
assertTrue(encoder.encode(new BooleanWritable(true)) instanceof Boolean);
inspector = writableDoubleObjectInspector;
encoder = createEncoder(DOUBLE, inspector);
assertTrue(encoder.encode(new DoubleWritable(0.1)) instanceof Double);
inspector = writableDateObjectInspector;
encoder = createEncoder(DATE, inspector);
assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long);
inspector = writableHiveDecimalObjectInspector;
encoder = createEncoder(createDecimalType(11, 10), inspector);
assertTrue(encoder.encode(new HiveDecimalWritable("1.2345678910")) instanceof Long);
encoder = createEncoder(createDecimalType(34, 33), inspector);
assertTrue(encoder.encode(new HiveDecimalWritable("1.281734081274028174012432412423134")) instanceof Slice);
}
use of org.apache.hadoop.io.DoubleWritable in project presto by prestodb.
the class RcFileTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof LazyPrimitive) {
actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY);
} else {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L));
}
} else if (actualValue instanceof StructObject) {
StructObject structObject = (StructObject) actualValue;
actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList());
} else if (actualValue instanceof LazyBinaryArray) {
actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList());
} else if (actualValue instanceof LazyBinaryMap) {
actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap());
} else if (actualValue instanceof LazyArray) {
actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList());
} else if (actualValue instanceof LazyMap) {
actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap());
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
}
return actualValue;
}
use of org.apache.hadoop.io.DoubleWritable in project shifu by ShifuML.
the class FeatureImportanceReducer method cleanup.
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
List<FeatureScore> featureScores = new ArrayList<FeatureImportanceReducer.FeatureScore>();
for (Entry<Integer, Double> entry : variableStatsMap.entrySet()) {
featureScores.add(new FeatureScore(entry.getKey(), entry.getValue()));
}
Collections.sort(featureScores, new Comparator<FeatureScore>() {
@Override
public int compare(FeatureScore fs1, FeatureScore fs2) {
if (fs1.binAvgScore < fs2.binAvgScore) {
return 1;
}
if (fs1.binAvgScore > fs2.binAvgScore) {
return -1;
}
return 0;
}
});
for (FeatureScore featureScore : featureScores) {
context.write(new IntWritable(featureScore.columnNum), new DoubleWritable(featureScore.binAvgScore));
}
}
use of org.apache.hadoop.io.DoubleWritable in project shifu by ShifuML.
the class FeatureImportanceMapper method setup.
@Override
protected void setup(Context context) throws IOException, InterruptedException {
loadConfigFiles(context);
loadTagWeightNum();
this.dataPurifier = new DataPurifier(this.modelConfig, false);
this.outputKey = new IntWritable();
this.outputValue = new DoubleWritable();
this.tags = new HashSet<String>(modelConfig.getFlattenTags());
this.headers = CommonUtils.getFinalHeaders(modelConfig);
this.initFeatureStats();
}
use of org.apache.hadoop.io.DoubleWritable in project goldenorb by jzachr.
the class SampleDoubleMessageTest method startServer.
/**
*/
@SuppressWarnings("unchecked")
@Before
public void startServer() throws IOException {
server = new RPCServer<DoubleMessage, DoubleWritable>(SERVER_PORT);
server.start();
Configuration conf = new Configuration();
InetSocketAddress addr = new InetSocketAddress("localhost", SERVER_PORT);
if (client == null)
client = (RPCProtocol<DoubleMessage, DoubleWritable>) RPC.waitForProxy(RPCProtocol.class, RPCProtocol.versionID, addr, conf);
}
Aggregations