use of java.io.DataInputStream in project hadoop by apache.
the class NMTokenIdentifierNewForTest method readFields.
@Override
public void readFields(DataInput in) throws IOException {
DataInputStream dis = (DataInputStream) in;
byte[] buffer = IOUtils.toByteArray(dis);
proto = NMTokenIdentifierNewProto.parseFrom(buffer);
}
use of java.io.DataInputStream in project hbase by apache.
the class HFileBlockDefaultDecodingContext method prepareDecoding.
@Override
public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader, ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException {
final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock);
InputStream dataInputStream = new DataInputStream(byteBuffInputStream);
try {
Encryption.Context cryptoContext = fileContext.getEncryptionContext();
if (cryptoContext != Encryption.Context.NONE) {
Cipher cipher = cryptoContext.getCipher();
Decryptor decryptor = cipher.getDecryptor();
decryptor.setKey(cryptoContext.getKey());
// Encrypted block format:
// +--------------------------+
// | byte iv length |
// +--------------------------+
// | iv data ... |
// +--------------------------+
// | encrypted block data ... |
// +--------------------------+
int ivLength = dataInputStream.read();
if (ivLength > 0) {
byte[] iv = new byte[ivLength];
IOUtils.readFully(dataInputStream, iv);
decryptor.setIv(iv);
// All encrypted blocks will have a nonzero IV length. If we see an IV
// length of zero, this means the encoding context had 0 bytes of
// plaintext to encode.
decryptor.reset();
dataInputStream = decryptor.createDecryptionStream(dataInputStream);
}
onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
}
Compression.Algorithm compression = fileContext.getCompression();
assert blockBufferWithoutHeader.hasArray();
if (compression != Compression.Algorithm.NONE) {
Compression.decompress(blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, compression);
} else {
IOUtils.readFully(dataInputStream, blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader);
}
} finally {
byteBuffInputStream.close();
dataInputStream.close();
}
}
use of java.io.DataInputStream in project hbase by apache.
the class AccessControlLists method readPermissions.
/**
* Reads a set of permissions as {@link org.apache.hadoop.io.Writable} instances from the input
* stream.
*/
public static ListMultimap<String, TablePermission> readPermissions(byte[] data, Configuration conf) throws DeserializationException {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
AccessControlProtos.UsersAndPermissions.Builder builder = AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return AccessControlUtil.toUserTablePermissions(builder.build());
} catch (IOException e) {
throw new DeserializationException(e);
}
} else {
// TODO: We have to re-write non-PB data as PB encoded. Otherwise we will carry old Writables
// forever (here and a couple of other places).
ListMultimap<String, TablePermission> perms = ArrayListMultimap.create();
try {
DataInput in = new DataInputStream(new ByteArrayInputStream(data));
int length = in.readInt();
for (int i = 0; i < length; i++) {
String user = Text.readString(in);
List<TablePermission> userPerms = readWritablePermissions(in, conf);
perms.putAll(user, userPerms);
}
} catch (IOException | ClassNotFoundException e) {
throw new DeserializationException(e);
}
return perms;
}
}
use of java.io.DataInputStream in project hadoop by apache.
the class ClientToAMTokenIdentifierForTest method readFields.
@Override
public void readFields(DataInput in) throws IOException {
DataInputStream dis = (DataInputStream) in;
byte[] buffer = IOUtils.toByteArray(dis);
proto = ClientToAMTokenIdentifierForTestProto.parseFrom(buffer);
}
use of java.io.DataInputStream in project hbase by apache.
the class BlockingRpcConnection method negotiateCryptoAes.
private void negotiateCryptoAes(RPCProtos.CryptoCipherMeta cryptoCipherMeta) throws IOException {
// initilize the Crypto AES with CryptoCipherMeta
saslRpcClient.initCryptoCipher(cryptoCipherMeta, this.rpcClient.conf);
// reset the inputStream/outputStream for Crypto AES encryption
this.in = new DataInputStream(new BufferedInputStream(saslRpcClient.getInputStream()));
this.out = new DataOutputStream(new BufferedOutputStream(saslRpcClient.getOutputStream()));
}
Aggregations