use of org.apache.hadoop.io.UTF8 in project elasticsearch-hadoop by elastic.
the class WritableValueWriter method write.
@Override
@SuppressWarnings({ "unchecked", "deprecation" })
public Result write(Writable writable, Generator generator) {
if (writable == null || writable instanceof NullWritable) {
generator.writeNull();
} else if (writable instanceof Text) {
Text text = (Text) writable;
generator.writeUTF8String(text.getBytes(), 0, text.getLength());
} else if (writable instanceof UTF8) {
UTF8 utf8 = (UTF8) writable;
generator.writeUTF8String(utf8.getBytes(), 0, utf8.getLength());
} else if (WritableCompatUtil.isShortWritable(writable)) {
generator.writeNumber(WritableCompatUtil.unwrap(writable));
} else if (writable instanceof IntWritable) {
generator.writeNumber(((IntWritable) writable).get());
} else if (writable instanceof LongWritable) {
generator.writeNumber(((LongWritable) writable).get());
} else if (writable instanceof VLongWritable) {
generator.writeNumber(((VLongWritable) writable).get());
} else if (writable instanceof VIntWritable) {
generator.writeNumber(((VIntWritable) writable).get());
} else if (writable instanceof ByteWritable) {
generator.writeNumber(((ByteWritable) writable).get());
} else if (writable instanceof DoubleWritable) {
generator.writeNumber(((DoubleWritable) writable).get());
} else if (writable instanceof FloatWritable) {
generator.writeNumber(((FloatWritable) writable).get());
} else if (writable instanceof BooleanWritable) {
generator.writeBoolean(((BooleanWritable) writable).get());
} else if (writable instanceof BytesWritable) {
BytesWritable bw = (BytesWritable) writable;
generator.writeBinary(bw.getBytes(), 0, bw.getLength());
} else if (writable instanceof MD5Hash) {
generator.writeString(writable.toString());
} else if (writable instanceof ArrayWritable) {
generator.writeBeginArray();
for (Writable wrt : ((ArrayWritable) writable).get()) {
Result result = write(wrt, generator);
if (!result.isSuccesful()) {
return result;
}
}
generator.writeEndArray();
} else if (writable instanceof AbstractMapWritable) {
Map<Writable, Writable> map = (Map<Writable, Writable>) writable;
generator.writeBeginObject();
// ignore handling sets (which are just maps with null values)
for (Entry<Writable, Writable> entry : map.entrySet()) {
String fieldName = entry.getKey().toString();
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
if (!result.isSuccesful()) {
return result;
}
}
}
generator.writeEndObject();
} else {
if (writeUnknownTypes) {
return handleUnknown(writable, generator);
}
return Result.FAILED(writable);
}
return Result.SUCCESFUL();
}
use of org.apache.hadoop.io.UTF8 in project hadoop by apache.
the class TestRPC method testCallsInternal.
private void testCallsInternal(Configuration conf) throws IOException {
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class).setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0).build();
TestProtocol proxy = null;
try {
server.start();
InetSocketAddress addr = NetUtils.getConnectAddress(server);
proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf);
proxy.ping();
String stringResult = proxy.echo("foo");
assertEquals(stringResult, "foo");
stringResult = proxy.echo((String) null);
assertEquals(stringResult, null);
// Check rpcMetrics
MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
assertCounter("RpcProcessingTimeNumOps", 3L, rb);
assertCounterGt("SentBytes", 0L, rb);
assertCounterGt("ReceivedBytes", 0L, rb);
// Number of calls to echo method should be 2
rb = getMetrics(server.rpcDetailedMetrics.name());
assertCounter("EchoNumOps", 2L, rb);
// Number of calls to ping method should be 1
assertCounter("PingNumOps", 1L, rb);
String[] stringResults = proxy.echo(new String[] { "foo", "bar" });
assertTrue(Arrays.equals(stringResults, new String[] { "foo", "bar" }));
stringResults = proxy.echo((String[]) null);
assertTrue(Arrays.equals(stringResults, null));
UTF8 utf8Result = (UTF8) proxy.echo(new UTF8("hello world"));
assertEquals(new UTF8("hello world"), utf8Result);
utf8Result = (UTF8) proxy.echo((UTF8) null);
assertEquals(null, utf8Result);
int intResult = proxy.add(1, 2);
assertEquals(intResult, 3);
intResult = proxy.add(new int[] { 1, 2 });
assertEquals(intResult, 3);
// Test protobufs
EnumDescriptorProto sendProto = EnumDescriptorProto.newBuilder().setName("test").build();
EnumDescriptorProto retProto = proxy.exchangeProto(sendProto);
assertEquals(sendProto, retProto);
assertNotSame(sendProto, retProto);
boolean caught = false;
try {
proxy.error();
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Caught " + e);
}
caught = true;
}
assertTrue(caught);
rb = getMetrics(server.rpcDetailedMetrics.name());
assertCounter("IOExceptionNumOps", 1L, rb);
proxy.testServerGet();
// create multiple threads and make them do large data transfers
System.out.println("Starting multi-threaded RPC test...");
server.setSocketSendBufSize(1024);
Thread[] threadId = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
Transactions trans = new Transactions(proxy, datasize);
threadId[i] = new Thread(trans, "TransactionThread-" + i);
threadId[i].start();
}
// wait for all transactions to get over
System.out.println("Waiting for all threads to finish RPCs...");
for (int i = 0; i < numThreads; i++) {
try {
threadId[i].join();
} catch (InterruptedException e) {
// retry
i--;
}
}
} finally {
server.stop();
if (proxy != null)
RPC.stopProxy(proxy);
}
}
Aggregations