use of java.io.DataInput in project druid by druid-io.
the class StringArrayWritable method fromBytes.
public static final InputRow fromBytes(byte[] data, AggregatorFactory[] aggs) {
try {
DataInput in = ByteStreams.newDataInput(data);
//Read timestamp
long timestamp = in.readLong();
Map<String, Object> event = Maps.newHashMap();
//Read dimensions
List<String> dimensions = Lists.newArrayList();
int dimNum = WritableUtils.readVInt(in);
for (int i = 0; i < dimNum; i++) {
String dimension = readString(in);
dimensions.add(dimension);
List<String> dimensionValues = readStringArray(in);
if (dimensionValues == null) {
continue;
}
if (dimensionValues.size() == 1) {
event.put(dimension, dimensionValues.get(0));
} else {
event.put(dimension, dimensionValues);
}
}
//Read metrics
int metricSize = WritableUtils.readVInt(in);
for (int i = 0; i < metricSize; i++) {
String metric = readString(in);
String type = getType(metric, aggs, i);
if (type.equals("float")) {
event.put(metric, in.readFloat());
} else if (type.equals("long")) {
event.put(metric, WritableUtils.readVLong(in));
} else {
ComplexMetricSerde serde = getComplexMetricSerde(type);
byte[] value = readBytes(in);
event.put(metric, serde.fromBytes(value, 0, value.length));
}
}
return new MapBasedInputRow(timestamp, dimensions, event);
} catch (IOException ex) {
throw Throwables.propagate(ex);
}
}
use of java.io.DataInput in project hbase by apache.
the class AccessControlLists method readPermissions.
/**
* Reads a set of permissions as {@link org.apache.hadoop.io.Writable} instances from the input
* stream.
*/
public static ListMultimap<String, TablePermission> readPermissions(byte[] data, Configuration conf) throws DeserializationException {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
AccessControlProtos.UsersAndPermissions.Builder builder = AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return AccessControlUtil.toUserTablePermissions(builder.build());
} catch (IOException e) {
throw new DeserializationException(e);
}
} else {
// TODO: We have to re-write non-PB data as PB encoded. Otherwise we will carry old Writables
// forever (here and a couple of other places).
ListMultimap<String, TablePermission> perms = ArrayListMultimap.create();
try {
DataInput in = new DataInputStream(new ByteArrayInputStream(data));
int length = in.readInt();
for (int i = 0; i < length; i++) {
String user = Text.readString(in);
List<TablePermission> userPerms = readWritablePermissions(in, conf);
perms.putAll(user, userPerms);
}
} catch (IOException | ClassNotFoundException e) {
throw new DeserializationException(e);
}
return perms;
}
}
use of java.io.DataInput in project hadoop by apache.
the class TestTypedBytesWritable method testIO.
public void testIO() throws IOException {
TypedBytesWritable tbw = new TypedBytesWritable();
tbw.setValue(12345);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput dout = new DataOutputStream(baos);
tbw.write(dout);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInput din = new DataInputStream(bais);
TypedBytesWritable readTbw = new TypedBytesWritable();
readTbw.readFields(din);
assertEquals(tbw, readTbw);
}
use of java.io.DataInput in project netty by netty.
the class NioSocketChannelTest method testFlushAfterGatheredFlush.
/**
* Reproduces the issue #1679
*/
@Test
public void testFlushAfterGatheredFlush() throws Exception {
NioEventLoopGroup group = new NioEventLoopGroup(1);
try {
ServerBootstrap sb = new ServerBootstrap();
sb.group(group).channel(NioServerSocketChannel.class);
sb.childHandler(new ChannelInboundHandlerAdapter() {
@Override
public void channelActive(final ChannelHandlerContext ctx) throws Exception {
// Trigger a gathering write by writing two buffers.
ctx.write(Unpooled.wrappedBuffer(new byte[] { 'a' }));
ChannelFuture f = ctx.write(Unpooled.wrappedBuffer(new byte[] { 'b' }));
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
// This message must be flushed
ctx.writeAndFlush(Unpooled.wrappedBuffer(new byte[] { 'c' }));
}
});
ctx.flush();
}
});
SocketAddress address = sb.bind(0).sync().channel().localAddress();
Socket s = new Socket(NetUtil.LOCALHOST, ((InetSocketAddress) address).getPort());
DataInput in = new DataInputStream(s.getInputStream());
byte[] buf = new byte[3];
in.readFully(buf);
assertThat(new String(buf, CharsetUtil.US_ASCII), is("abc"));
s.close();
} finally {
group.shutdownGracefully().sync();
}
}
use of java.io.DataInput in project che by eclipse.
the class FileLockSerializerTest method readsLockObjectWithExpirationData.
@Test
public void readsLockObjectWithExpirationData() throws Exception {
String token = Long.toString(System.currentTimeMillis());
long expired = System.currentTimeMillis() + 10000;
DataInput data = mock(DataInput.class);
when(data.readUTF()).thenReturn(token);
when(data.readLong()).thenReturn(expired);
FileLock lock = lockSerializer.read(data);
assertEquals(new FileLock(token, expired), lock);
}
Aggregations