use of org.apache.hadoop.typedbytes.TypedBytesWritable in project hadoop by apache.
the class TypedBytesOutputReader method initialize.
@Override
public void initialize(PipeMapRed pipeMapRed) throws IOException {
super.initialize(pipeMapRed);
clientIn = pipeMapRed.getClientInput();
key = new TypedBytesWritable();
value = new TypedBytesWritable();
in = new TypedBytesInput(clientIn);
}
use of org.apache.hadoop.typedbytes.TypedBytesWritable in project hadoop by apache.
the class LoadTypedBytes method run.
/**
* The main driver for <code>LoadTypedBytes</code>.
*/
public int run(String[] args) throws Exception {
if (args.length == 0) {
System.err.println("Too few arguments!");
printUsage();
return 1;
}
Path path = new Path(args[0]);
FileSystem fs = path.getFileSystem(getConf());
if (fs.exists(path)) {
System.err.println("given path exists already!");
return -1;
}
TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(System.in));
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path, TypedBytesWritable.class, TypedBytesWritable.class);
try {
TypedBytesWritable key = new TypedBytesWritable();
TypedBytesWritable value = new TypedBytesWritable();
byte[] rawKey = tbinput.readRaw();
while (rawKey != null) {
byte[] rawValue = tbinput.readRaw();
key.set(rawKey, 0, rawKey.length);
value.set(rawValue, 0, rawValue.length);
writer.append(key, value);
rawKey = tbinput.readRaw();
}
} finally {
writer.close();
}
return 0;
}
use of org.apache.hadoop.typedbytes.TypedBytesWritable in project hadoop by apache.
the class TestLoadTypedBytes method testLoading.
@Test
public void testLoading() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
FileSystem fs = cluster.getFileSystem();
ByteArrayOutputStream out = new ByteArrayOutputStream();
TypedBytesOutput tboutput = new TypedBytesOutput(new DataOutputStream(out));
for (int i = 0; i < 100; i++) {
// key
tboutput.write(new Long(i));
// value
tboutput.write("" + (10 * i));
}
InputStream isBackup = System.in;
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
System.setIn(in);
LoadTypedBytes loadtb = new LoadTypedBytes(conf);
try {
Path root = new Path("/typedbytestest");
assertTrue(fs.mkdirs(root));
assertTrue(fs.exists(root));
String[] args = new String[1];
args[0] = "/typedbytestest/test.seq";
int ret = loadtb.run(args);
assertEquals("Return value != 0.", 0, ret);
Path file = new Path(root, "test.seq");
assertTrue(fs.exists(file));
SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
int counter = 0;
TypedBytesWritable key = new TypedBytesWritable();
TypedBytesWritable value = new TypedBytesWritable();
while (reader.next(key, value)) {
assertEquals(Long.class, key.getValue().getClass());
assertEquals(String.class, value.getValue().getClass());
assertTrue("Invalid record.", Integer.parseInt(value.toString()) % 10 == 0);
counter++;
}
assertEquals("Wrong number of records.", 100, counter);
} finally {
try {
fs.close();
} catch (Exception e) {
}
System.setIn(isBackup);
cluster.shutdown();
}
}
Aggregations