use of java.io.DataInputStream in project jvm-serializers by eishay.
the class ProtoServerHandler method handle.
void handle(final OutputStream os, final InputStream is) throws IOException {
RpcCallback<Message> done = new RpcCallback<Message>() {
DataOutputStream dos = new DataOutputStream(os);
public void run(Message content) {
try {
byte[] array = _serializer.serialize((MediaContent) content);
dos.writeInt(array.length);
dos.write(array);
dos.flush();
} catch (Exception e) {
e.printStackTrace();
}
}
};
DataInputStream dis = new DataInputStream(is);
int index = dis.readInt();
MethodDescriptor method = getDescriptor().getMethods().get(index);
byte[] array = new byte[dis.readInt()];
dis.readFully(array);
Message request = getRequestPrototype(method).newBuilderForType().mergeFrom(array).build();
callMethod(method, null, request, done);
}
use of java.io.DataInputStream in project buck by facebook.
the class HttpArtifactCacheBinaryProtocolTest method testStoreRequest.
@Test
public void testStoreRequest() throws IOException {
final RuleKey ruleKey = new RuleKey("00000000010000000000008000000000");
final RuleKey ruleKey2 = new RuleKey("90000000000000000000008000000005");
final String data = "data";
ImmutableMap<String, String> metadata = ImmutableMap.of("metaKey", "metaValue");
HttpArtifactCacheBinaryProtocol.StoreRequest storeRequest = new HttpArtifactCacheBinaryProtocol.StoreRequest(ArtifactInfo.builder().addRuleKeys(ruleKey, ruleKey2).setMetadata(metadata).build(), new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return new ByteArrayInputStream(data.getBytes(Charsets.UTF_8));
}
});
ByteArrayOutputStream storeRequestOutputStream = new ByteArrayOutputStream();
storeRequest.write(storeRequestOutputStream);
ByteArrayOutputStream storeRequestPayloadStream = new ByteArrayOutputStream();
StoreResponseReadResult readStoreRequest = HttpArtifactCacheBinaryProtocol.readStoreRequest(new DataInputStream(new ByteArrayInputStream(storeRequestOutputStream.toByteArray())), storeRequestPayloadStream);
assertThat(readStoreRequest.getRuleKeys(), Matchers.containsInAnyOrder(ruleKey, ruleKey2));
assertThat(readStoreRequest.getMetadata(), Matchers.equalTo(metadata));
assertThat(storeRequestPayloadStream.toByteArray(), Matchers.equalTo(data.getBytes(Charsets.UTF_8)));
}
use of java.io.DataInputStream in project buck by facebook.
the class HttpArtifactCacheBinaryProtocolTest method testReadFetchResponse.
@Test
public void testReadFetchResponse() throws IOException {
final String base64EncodedData = "AAAALgAAAAEAIDAwMDAwMDAwMDEwMDAwMDAwMDAwMDA4MDAwMDAwMDAwAAAAANcwdr5kYXRh";
final RuleKey ruleKey = new RuleKey("00000000010000000000008000000000");
final String data = "data";
byte[] expectedData;
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
DataOutputStream dataOut = new DataOutputStream(out)) {
byte[] metadata = HttpArtifactCacheBinaryProtocol.createMetadataHeader(ImmutableSet.of(ruleKey), ImmutableMap.of(), ByteSource.wrap(data.getBytes(Charsets.UTF_8)));
dataOut.writeInt(metadata.length);
dataOut.write(metadata);
dataOut.write(data.getBytes(Charsets.UTF_8));
expectedData = out.toByteArray();
}
assertThat(expectedData, Matchers.equalTo(BaseEncoding.base64().decode(base64EncodedData)));
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(expectedData))) {
FetchResponseReadResult result = HttpArtifactCacheBinaryProtocol.readFetchResponse(inputStream, outputStream);
assertThat(result.getRuleKeys(), Matchers.contains(ruleKey));
assertThat(outputStream.toByteArray(), Matchers.equalTo(data.getBytes(Charsets.UTF_8)));
assertThat(result.getActualHashCode(), Matchers.equalTo(HashCode.fromString("d73076be")));
assertThat(result.getExpectedHashCode(), Matchers.equalTo(HashCode.fromString("d73076be")));
assertThat(result.getMetadata(), Matchers.anEmptyMap());
assertThat(result.getResponseSizeBytes(), Matchers.equalTo(4L));
}
}
use of java.io.DataInputStream in project druid by druid-io.
the class IntermediateLongSupplierSerializer method makeDelegate.
private void makeDelegate() throws IOException {
CompressionFactory.LongEncodingWriter writer;
long delta;
try {
delta = LongMath.checkedSubtract(maxVal, minVal);
} catch (ArithmeticException e) {
delta = -1;
}
if (uniqueValues.size() <= CompressionFactory.MAX_TABLE_SIZE) {
writer = new TableLongEncodingWriter(uniqueValues);
} else if (delta != -1 && delta != Long.MAX_VALUE) {
writer = new DeltaLongEncodingWriter(minVal, delta);
} else {
writer = new LongsLongEncodingWriter(order);
}
if (compression == CompressedObjectStrategy.CompressionStrategy.NONE) {
delegate = new EntireLayoutLongSupplierSerializer(ioPeon, filenameBase, order, writer);
} else {
delegate = new BlockLayoutLongSupplierSerializer(ioPeon, filenameBase, order, writer, compression);
}
try (DataInputStream tempIn = new DataInputStream(new BufferedInputStream(ioPeon.makeInputStream(tempFile)))) {
delegate.open();
while (tempIn.available() > 0) {
delegate.add(tempIn.readLong());
}
}
}
use of java.io.DataInputStream in project deeplearning4j by deeplearning4j.
the class ConnectionCosts method read.
private static ConnectionCosts read(InputStream input) throws IOException {
DataInputStream dataInput = new DataInputStream(new BufferedInputStream(input));
int size = dataInput.readInt();
ByteBuffer byteBuffer = ByteBufferIO.read(dataInput);
ShortBuffer costs = byteBuffer.asShortBuffer();
return new ConnectionCosts(size, costs);
}
Aggregations