Search in sources :

Example 1 with UTF8

use of org.apache.hadoop.io.UTF8 in project elasticsearch-hadoop by elastic.

the class WritableValueWriter method write.

@Override
@SuppressWarnings({ "unchecked", "deprecation" })
public Result write(Writable writable, Generator generator) {
    if (writable == null || writable instanceof NullWritable) {
        generator.writeNull();
    } else if (writable instanceof Text) {
        Text text = (Text) writable;
        generator.writeUTF8String(text.getBytes(), 0, text.getLength());
    } else if (writable instanceof UTF8) {
        UTF8 utf8 = (UTF8) writable;
        generator.writeUTF8String(utf8.getBytes(), 0, utf8.getLength());
    } else if (WritableCompatUtil.isShortWritable(writable)) {
        generator.writeNumber(WritableCompatUtil.unwrap(writable));
    } else if (writable instanceof IntWritable) {
        generator.writeNumber(((IntWritable) writable).get());
    } else if (writable instanceof LongWritable) {
        generator.writeNumber(((LongWritable) writable).get());
    } else if (writable instanceof VLongWritable) {
        generator.writeNumber(((VLongWritable) writable).get());
    } else if (writable instanceof VIntWritable) {
        generator.writeNumber(((VIntWritable) writable).get());
    } else if (writable instanceof ByteWritable) {
        generator.writeNumber(((ByteWritable) writable).get());
    } else if (writable instanceof DoubleWritable) {
        generator.writeNumber(((DoubleWritable) writable).get());
    } else if (writable instanceof FloatWritable) {
        generator.writeNumber(((FloatWritable) writable).get());
    } else if (writable instanceof BooleanWritable) {
        generator.writeBoolean(((BooleanWritable) writable).get());
    } else if (writable instanceof BytesWritable) {
        BytesWritable bw = (BytesWritable) writable;
        generator.writeBinary(bw.getBytes(), 0, bw.getLength());
    } else if (writable instanceof MD5Hash) {
        generator.writeString(writable.toString());
    } else if (writable instanceof ArrayWritable) {
        generator.writeBeginArray();
        for (Writable wrt : ((ArrayWritable) writable).get()) {
            Result result = write(wrt, generator);
            if (!result.isSuccesful()) {
                return result;
            }
        }
        generator.writeEndArray();
    } else if (writable instanceof AbstractMapWritable) {
        Map<Writable, Writable> map = (Map<Writable, Writable>) writable;
        generator.writeBeginObject();
        // ignore handling sets (which are just maps with null values)
        for (Entry<Writable, Writable> entry : map.entrySet()) {
            String fieldName = entry.getKey().toString();
            if (shouldKeep(generator.getParentPath(), fieldName)) {
                generator.writeFieldName(fieldName);
                Result result = write(entry.getValue(), generator);
                if (!result.isSuccesful()) {
                    return result;
                }
            }
        }
        generator.writeEndObject();
    } else {
        if (writeUnknownTypes) {
            return handleUnknown(writable, generator);
        }
        return Result.FAILED(writable);
    }
    return Result.SUCCESFUL();
}
Also used : VIntWritable(org.apache.hadoop.io.VIntWritable) NullWritable(org.apache.hadoop.io.NullWritable) AbstractMapWritable(org.apache.hadoop.io.AbstractMapWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) IntWritable(org.apache.hadoop.io.IntWritable) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.io.DoubleWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) NullWritable(org.apache.hadoop.io.NullWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Entry(java.util.Map.Entry) ArrayWritable(org.apache.hadoop.io.ArrayWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) UTF8(org.apache.hadoop.io.UTF8) VLongWritable(org.apache.hadoop.io.VLongWritable) AbstractMapWritable(org.apache.hadoop.io.AbstractMapWritable) MD5Hash(org.apache.hadoop.io.MD5Hash) VLongWritable(org.apache.hadoop.io.VLongWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) Map(java.util.Map) VIntWritable(org.apache.hadoop.io.VIntWritable) IntWritable(org.apache.hadoop.io.IntWritable)

Example 2 with UTF8

use of org.apache.hadoop.io.UTF8 in project hadoop by apache.

the class TestRPC method testCallsInternal.

private void testCallsInternal(Configuration conf) throws IOException {
    Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class).setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0).build();
    TestProtocol proxy = null;
    try {
        server.start();
        InetSocketAddress addr = NetUtils.getConnectAddress(server);
        proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf);
        proxy.ping();
        String stringResult = proxy.echo("foo");
        assertEquals(stringResult, "foo");
        stringResult = proxy.echo((String) null);
        assertEquals(stringResult, null);
        // Check rpcMetrics
        MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
        assertCounter("RpcProcessingTimeNumOps", 3L, rb);
        assertCounterGt("SentBytes", 0L, rb);
        assertCounterGt("ReceivedBytes", 0L, rb);
        // Number of calls to echo method should be 2
        rb = getMetrics(server.rpcDetailedMetrics.name());
        assertCounter("EchoNumOps", 2L, rb);
        // Number of calls to ping method should be 1
        assertCounter("PingNumOps", 1L, rb);
        String[] stringResults = proxy.echo(new String[] { "foo", "bar" });
        assertTrue(Arrays.equals(stringResults, new String[] { "foo", "bar" }));
        stringResults = proxy.echo((String[]) null);
        assertTrue(Arrays.equals(stringResults, null));
        UTF8 utf8Result = (UTF8) proxy.echo(new UTF8("hello world"));
        assertEquals(new UTF8("hello world"), utf8Result);
        utf8Result = (UTF8) proxy.echo((UTF8) null);
        assertEquals(null, utf8Result);
        int intResult = proxy.add(1, 2);
        assertEquals(intResult, 3);
        intResult = proxy.add(new int[] { 1, 2 });
        assertEquals(intResult, 3);
        // Test protobufs
        EnumDescriptorProto sendProto = EnumDescriptorProto.newBuilder().setName("test").build();
        EnumDescriptorProto retProto = proxy.exchangeProto(sendProto);
        assertEquals(sendProto, retProto);
        assertNotSame(sendProto, retProto);
        boolean caught = false;
        try {
            proxy.error();
        } catch (IOException e) {
            if (LOG.isDebugEnabled()) {
                LOG.debug("Caught " + e);
            }
            caught = true;
        }
        assertTrue(caught);
        rb = getMetrics(server.rpcDetailedMetrics.name());
        assertCounter("IOExceptionNumOps", 1L, rb);
        proxy.testServerGet();
        // create multiple threads and make them do large data transfers
        System.out.println("Starting multi-threaded RPC test...");
        server.setSocketSendBufSize(1024);
        Thread[] threadId = new Thread[numThreads];
        for (int i = 0; i < numThreads; i++) {
            Transactions trans = new Transactions(proxy, datasize);
            threadId[i] = new Thread(trans, "TransactionThread-" + i);
            threadId[i].start();
        }
        // wait for all transactions to get over
        System.out.println("Waiting for all threads to finish RPCs...");
        for (int i = 0; i < numThreads; i++) {
            try {
                threadId[i].join();
            } catch (InterruptedException e) {
                // retry
                i--;
            }
        }
    } finally {
        server.stop();
        if (proxy != null)
            RPC.stopProxy(proxy);
    }
}
Also used : EnumDescriptorProto(com.google.protobuf.DescriptorProtos.EnumDescriptorProto) InetSocketAddress(java.net.InetSocketAddress) IOException(java.io.IOException) UTF8(org.apache.hadoop.io.UTF8) MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder)

Aggregations

UTF8 (org.apache.hadoop.io.UTF8)2 EnumDescriptorProto (com.google.protobuf.DescriptorProtos.EnumDescriptorProto)1 IOException (java.io.IOException)1 InetSocketAddress (java.net.InetSocketAddress)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1 AbstractMapWritable (org.apache.hadoop.io.AbstractMapWritable)1 ArrayWritable (org.apache.hadoop.io.ArrayWritable)1 BooleanWritable (org.apache.hadoop.io.BooleanWritable)1 ByteWritable (org.apache.hadoop.io.ByteWritable)1 BytesWritable (org.apache.hadoop.io.BytesWritable)1 DoubleWritable (org.apache.hadoop.io.DoubleWritable)1 FloatWritable (org.apache.hadoop.io.FloatWritable)1 IntWritable (org.apache.hadoop.io.IntWritable)1 LongWritable (org.apache.hadoop.io.LongWritable)1 MD5Hash (org.apache.hadoop.io.MD5Hash)1 NullWritable (org.apache.hadoop.io.NullWritable)1 Text (org.apache.hadoop.io.Text)1 VIntWritable (org.apache.hadoop.io.VIntWritable)1 VLongWritable (org.apache.hadoop.io.VLongWritable)1