use of org.apache.hadoop.io.FloatWritable in project flink by apache.
the class HiveShimV100 method javaToWritable.
Optional<Writable> javaToWritable(@Nonnull Object value) {
Writable writable = null;
// in case value is already a Writable
if (value instanceof Writable) {
writable = (Writable) value;
} else if (value instanceof Boolean) {
writable = new BooleanWritable((Boolean) value);
} else if (value instanceof Byte) {
writable = new ByteWritable((Byte) value);
} else if (value instanceof Short) {
writable = new ShortWritable((Short) value);
} else if (value instanceof Integer) {
writable = new IntWritable((Integer) value);
} else if (value instanceof Long) {
writable = new LongWritable((Long) value);
} else if (value instanceof Float) {
writable = new FloatWritable((Float) value);
} else if (value instanceof Double) {
writable = new DoubleWritable((Double) value);
} else if (value instanceof String) {
writable = new Text((String) value);
} else if (value instanceof HiveChar) {
writable = new HiveCharWritable((HiveChar) value);
} else if (value instanceof HiveVarchar) {
writable = new HiveVarcharWritable((HiveVarchar) value);
} else if (value instanceof HiveDecimal) {
writable = new HiveDecimalWritable((HiveDecimal) value);
} else if (value instanceof Date) {
writable = new DateWritable((Date) value);
} else if (value instanceof Timestamp) {
writable = new TimestampWritable((Timestamp) value);
} else if (value instanceof BigDecimal) {
HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) value);
writable = new HiveDecimalWritable(hiveDecimal);
} else if (value instanceof byte[]) {
writable = new BytesWritable((byte[]) value);
}
return Optional.ofNullable(writable);
}
use of org.apache.hadoop.io.FloatWritable in project goldenorb by jzachr.
the class SampleFloatMessageTest method startServer.
/**
*/
@SuppressWarnings("unchecked")
@Before
public void startServer() throws IOException {
server = new RPCServer<FloatMessage, FloatWritable>(SERVER_PORT);
server.start();
Configuration conf = new Configuration();
InetSocketAddress addr = new InetSocketAddress("localhost", SERVER_PORT);
if (client == null)
client = (RPCProtocol<FloatMessage, FloatWritable>) RPC.waitForProxy(RPCProtocol.class, RPCProtocol.versionID, addr, conf);
}
use of org.apache.hadoop.io.FloatWritable in project goldenorb by jzachr.
the class CheckPointDataTest method testCheckpointInput.
/**
* Tests the CheckPointDataInput class by reading several different types of Writables from the checkpoint.
* Asserts that Writables that were written in are of the same value and type when reading in from HDFS.
*
* @throws Exception
*/
@Test
public void testCheckpointInput() throws Exception {
int superStep = 0;
int partition = 0;
OrbConfiguration orbConf = new OrbConfiguration();
orbConf.set("fs.default.name", "hdfs://localhost:" + cluster.getNameNodePort());
orbConf.setJobNumber("0");
orbConf.setFileOutputPath("test");
CheckPointDataInput checkpointInput = new CheckPointDataInput(orbConf, superStep, partition);
// Data is read on a FIFO basis
IntWritable intInput = new IntWritable();
intInput.readFields(checkpointInput);
LongWritable longInput = new LongWritable();
longInput.readFields(checkpointInput);
Text textInput = new Text();
textInput.readFields(checkpointInput);
FloatWritable floatInput = new FloatWritable();
floatInput.readFields(checkpointInput);
checkpointInput.close();
assertThat(checkpointInput, notNullValue());
assertEquals(intInput.get(), 4);
assertEquals(longInput.get(), 9223372036854775807L);
assertEquals(textInput.toString(), "test");
assertTrue(floatInput.get() == 3.14159F);
}
use of org.apache.hadoop.io.FloatWritable in project goldenorb by jzachr.
the class CheckPointDataTest method testCheckpointOutput.
/**
* Tests the CheckPointDataOutput class by writing several different types of Writables to the checkpoint.
*
* @throws Exception
*/
@Test
public void testCheckpointOutput() throws Exception {
int superStep = 0;
int partition = 0;
OrbConfiguration orbConf = new OrbConfiguration();
orbConf.set("fs.default.name", "hdfs://localhost:" + cluster.getNameNodePort());
orbConf.setJobNumber("0");
orbConf.setFileOutputPath("test");
CheckPointDataOutput checkpointOutput = new CheckPointDataOutput(orbConf, superStep, partition);
IntWritable intOutput = new IntWritable(4);
intOutput.write(checkpointOutput);
LongWritable longOutput = new LongWritable(9223372036854775807L);
longOutput.write(checkpointOutput);
Text textOutput = new Text("test");
textOutput.write(checkpointOutput);
FloatWritable floatOutput = new FloatWritable(3.14159F);
floatOutput.write(checkpointOutput);
checkpointOutput.close();
assertThat(checkpointOutput, notNullValue());
}
use of org.apache.hadoop.io.FloatWritable in project Cloud9 by lintool.
the class AnalyzeBigramRelativeFrequency method main.
@SuppressWarnings({ "static-access" })
public static void main(String[] args) {
Options options = new Options();
options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
CommandLine cmdline = null;
CommandLineParser parser = new GnuParser();
try {
cmdline = parser.parse(options, args);
} catch (ParseException exp) {
System.err.println("Error parsing command line: " + exp.getMessage());
System.exit(-1);
}
if (!cmdline.hasOption(INPUT)) {
System.out.println("args: " + Arrays.toString(args));
HelpFormatter formatter = new HelpFormatter();
formatter.setWidth(120);
formatter.printHelp(AnalyzeBigramRelativeFrequency.class.getName(), options);
ToolRunner.printGenericCommandUsage(System.out);
System.exit(-1);
}
String inputPath = cmdline.getOptionValue(INPUT);
System.out.println("input path: " + inputPath);
List<PairOfWritables<PairOfStrings, FloatWritable>> pairs = SequenceFileUtils.readDirectory(new Path(inputPath));
List<PairOfWritables<PairOfStrings, FloatWritable>> list1 = Lists.newArrayList();
List<PairOfWritables<PairOfStrings, FloatWritable>> list2 = Lists.newArrayList();
for (PairOfWritables<PairOfStrings, FloatWritable> p : pairs) {
PairOfStrings bigram = p.getLeftElement();
if (bigram.getLeftElement().equals("light")) {
list1.add(p);
}
if (bigram.getLeftElement().equals("contain")) {
list2.add(p);
}
}
Collections.sort(list1, new Comparator<PairOfWritables<PairOfStrings, FloatWritable>>() {
public int compare(PairOfWritables<PairOfStrings, FloatWritable> e1, PairOfWritables<PairOfStrings, FloatWritable> e2) {
if (e1.getRightElement().compareTo(e2.getRightElement()) == 0) {
return e1.getLeftElement().compareTo(e2.getLeftElement());
}
return e2.getRightElement().compareTo(e1.getRightElement());
}
});
Iterator<PairOfWritables<PairOfStrings, FloatWritable>> iter1 = Iterators.limit(list1.iterator(), 10);
while (iter1.hasNext()) {
PairOfWritables<PairOfStrings, FloatWritable> p = iter1.next();
PairOfStrings bigram = p.getLeftElement();
System.out.println(bigram + "\t" + p.getRightElement());
}
Collections.sort(list2, new Comparator<PairOfWritables<PairOfStrings, FloatWritable>>() {
public int compare(PairOfWritables<PairOfStrings, FloatWritable> e1, PairOfWritables<PairOfStrings, FloatWritable> e2) {
if (e1.getRightElement().compareTo(e2.getRightElement()) == 0) {
return e1.getLeftElement().compareTo(e2.getLeftElement());
}
return e2.getRightElement().compareTo(e1.getRightElement());
}
});
Iterator<PairOfWritables<PairOfStrings, FloatWritable>> iter2 = Iterators.limit(list2.iterator(), 10);
while (iter2.hasNext()) {
PairOfWritables<PairOfStrings, FloatWritable> p = iter2.next();
PairOfStrings bigram = p.getLeftElement();
System.out.println(bigram + "\t" + p.getRightElement());
}
}
Aggregations