Search in sources :

Example 91 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class ZKDelegationTokenSecretManager method addOrUpdateToken.

private void addOrUpdateToken(TokenIdent ident, DelegationTokenInformation info, boolean isUpdate) throws Exception {
    String nodeCreatePath = getNodePath(ZK_DTSM_TOKENS_ROOT, DELEGATION_TOKEN_PREFIX + ident.getSequenceNumber());
    ByteArrayOutputStream tokenOs = new ByteArrayOutputStream();
    DataOutputStream tokenOut = new DataOutputStream(tokenOs);
    ByteArrayOutputStream seqOs = new ByteArrayOutputStream();
    try {
        ident.write(tokenOut);
        tokenOut.writeLong(info.getRenewDate());
        tokenOut.writeInt(info.getPassword().length);
        tokenOut.write(info.getPassword());
        if (LOG.isDebugEnabled()) {
            LOG.debug((isUpdate ? "Updating " : "Storing ") + "ZKDTSMDelegationToken_" + ident.getSequenceNumber());
        }
        if (isUpdate) {
            zkClient.setData().forPath(nodeCreatePath, tokenOs.toByteArray()).setVersion(-1);
        } else {
            zkClient.create().withMode(CreateMode.PERSISTENT).forPath(nodeCreatePath, tokenOs.toByteArray());
        }
    } finally {
        seqOs.close();
    }
}
Also used : DataOutputStream(java.io.DataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 92 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestBlockReplacement method replaceBlock.

/*
   * Replace block
   */
private boolean replaceBlock(ExtendedBlock block, DatanodeInfo source, DatanodeInfo sourceProxy, DatanodeInfo destination, StorageType targetStorageType, Status opStatus) throws IOException, SocketException {
    Socket sock = new Socket();
    try {
        sock.connect(NetUtils.createSocketAddr(destination.getXferAddr()), HdfsConstants.READ_TIMEOUT);
        sock.setKeepAlive(true);
        // sendRequest
        DataOutputStream out = new DataOutputStream(sock.getOutputStream());
        new Sender(out).replaceBlock(block, targetStorageType, BlockTokenSecretManager.DUMMY_TOKEN, source.getDatanodeUuid(), sourceProxy);
        out.flush();
        // receiveResponse
        DataInputStream reply = new DataInputStream(sock.getInputStream());
        BlockOpResponseProto proto = BlockOpResponseProto.parseDelimitedFrom(reply);
        while (proto.getStatus() == Status.IN_PROGRESS) {
            proto = BlockOpResponseProto.parseDelimitedFrom(reply);
        }
        return proto.getStatus() == opStatus;
    } finally {
        sock.close();
    }
}
Also used : Sender(org.apache.hadoop.hdfs.protocol.datatransfer.Sender) DataOutputStream(java.io.DataOutputStream) BlockOpResponseProto(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto) DataInputStream(java.io.DataInputStream) Socket(java.net.Socket)

Example 93 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestCredentials method testReadWriteStorage.

@SuppressWarnings("unchecked")
@Test
public <T extends TokenIdentifier> void testReadWriteStorage() throws IOException, NoSuchAlgorithmException {
    // create tokenStorage Object
    Credentials ts = new Credentials();
    Token<T> token1 = new Token();
    Token<T> token2 = new Token();
    Text service1 = new Text("service1");
    Text service2 = new Text("service2");
    Collection<Text> services = new ArrayList<Text>();
    services.add(service1);
    services.add(service2);
    token1.setService(service1);
    token2.setService(service2);
    ts.addToken(new Text("sometoken1"), token1);
    ts.addToken(new Text("sometoken2"), token2);
    // create keys and put it in
    final KeyGenerator kg = KeyGenerator.getInstance(DEFAULT_HMAC_ALGORITHM);
    String alias = "alias";
    Map<Text, byte[]> m = new HashMap<Text, byte[]>(10);
    for (int i = 0; i < 10; i++) {
        Key key = kg.generateKey();
        m.put(new Text(alias + i), key.getEncoded());
        ts.addSecretKey(new Text(alias + i), key.getEncoded());
    }
    // create file to store
    File tmpFileName = new File(tmpDir, "tokenStorageTest");
    DataOutputStream dos = new DataOutputStream(new FileOutputStream(tmpFileName));
    ts.write(dos);
    dos.close();
    // open and read it back
    DataInputStream dis = new DataInputStream(new FileInputStream(tmpFileName));
    ts = new Credentials();
    ts.readFields(dis);
    dis.close();
    // get the tokens and compare the services
    Collection<Token<? extends TokenIdentifier>> list = ts.getAllTokens();
    assertEquals("getAllTokens should return collection of size 2", list.size(), 2);
    boolean foundFirst = false;
    boolean foundSecond = false;
    for (Token<? extends TokenIdentifier> token : list) {
        if (token.getService().equals(service1)) {
            foundFirst = true;
        }
        if (token.getService().equals(service2)) {
            foundSecond = true;
        }
    }
    assertTrue("Tokens for services service1 and service2 must be present", foundFirst && foundSecond);
    // compare secret keys
    int mapLen = m.size();
    assertEquals("wrong number of keys in the Storage", mapLen, ts.numberOfSecretKeys());
    for (Text a : m.keySet()) {
        byte[] kTS = ts.getSecretKey(a);
        byte[] kLocal = m.get(a);
        assertTrue("keys don't match for " + a, WritableComparator.compareBytes(kTS, 0, kTS.length, kLocal, 0, kLocal.length) == 0);
    }
    tmpFileName.delete();
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) HashMap(java.util.HashMap) DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) DataInputStream(java.io.DataInputStream) FileInputStream(java.io.FileInputStream) FileOutputStream(java.io.FileOutputStream) KeyGenerator(javax.crypto.KeyGenerator) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Key(java.security.Key) Test(org.junit.Test)

Example 94 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class TestCredentials method writeCredentialsProto.

private void writeCredentialsProto(Credentials creds, String filename) throws IOException, FileNotFoundException {
    DataOutputStream dos = new DataOutputStream(new FileOutputStream(new File(tmpDir, filename)));
    creds.writeProto(dos);
}
Also used : DataOutputStream(java.io.DataOutputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File)

Example 95 with DataOutputStream

use of java.io.DataOutputStream in project hadoop by apache.

the class BlockReaderRemote method newBlockReader.

/**
   * Create a new BlockReader specifically to satisfy a read.
   * This method also sends the OP_READ_BLOCK request.
   *
   * @param file  File location
   * @param block  The block object
   * @param blockToken  The block token for security
   * @param startOffset  The read offset, relative to block head
   * @param len  The number of bytes to read
   * @param verifyChecksum  Whether to verify checksum
   * @param clientName  Client name
   * @param peer  The Peer to use
   * @param datanodeID  The DatanodeID this peer is connected to
   * @return New BlockReader instance, or null on error.
   */
public static BlockReader newBlockReader(String file, ExtendedBlock block, Token<BlockTokenIdentifier> blockToken, long startOffset, long len, boolean verifyChecksum, String clientName, Peer peer, DatanodeID datanodeID, PeerCache peerCache, CachingStrategy cachingStrategy, Tracer tracer, int networkDistance) throws IOException {
    // in and out will be closed when sock is closed (by the caller)
    final DataOutputStream out = new DataOutputStream(new BufferedOutputStream(peer.getOutputStream()));
    new Sender(out).readBlock(block, blockToken, clientName, startOffset, len, verifyChecksum, cachingStrategy);
    //
    // Get bytes in block
    //
    DataInputStream in = new DataInputStream(peer.getInputStream());
    BlockOpResponseProto status = BlockOpResponseProto.parseFrom(PBHelperClient.vintPrefixed(in));
    checkSuccess(status, peer, block, file);
    ReadOpChecksumInfoProto checksumInfo = status.getReadOpChecksumInfo();
    DataChecksum checksum = DataTransferProtoUtil.fromProto(checksumInfo.getChecksum());
    //Warning when we get CHECKSUM_NULL?
    // Read the first chunk offset.
    long firstChunkOffset = checksumInfo.getChunkOffset();
    if (firstChunkOffset < 0 || firstChunkOffset > startOffset || firstChunkOffset <= (startOffset - checksum.getBytesPerChecksum())) {
        throw new IOException("BlockReader: error in first chunk offset (" + firstChunkOffset + ") startOffset is " + startOffset + " for file " + file);
    }
    return new BlockReaderRemote(file, block.getBlockId(), checksum, verifyChecksum, startOffset, firstChunkOffset, len, peer, datanodeID, peerCache, tracer, networkDistance);
}
Also used : Sender(org.apache.hadoop.hdfs.protocol.datatransfer.Sender) DataOutputStream(java.io.DataOutputStream) BlockOpResponseProto(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto) ReadOpChecksumInfoProto(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) BufferedOutputStream(java.io.BufferedOutputStream) DataChecksum(org.apache.hadoop.util.DataChecksum)

Aggregations

DataOutputStream (java.io.DataOutputStream)2968 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1314 IOException (java.io.IOException)1024 Test (org.junit.Test)633 DataInputStream (java.io.DataInputStream)615 FileOutputStream (java.io.FileOutputStream)427 ByteArrayInputStream (java.io.ByteArrayInputStream)411 File (java.io.File)281 BufferedOutputStream (java.io.BufferedOutputStream)228 UnitTest (org.apache.geode.test.junit.categories.UnitTest)172 URL (java.net.URL)149 InputStreamReader (java.io.InputStreamReader)146 BufferedReader (java.io.BufferedReader)142 Path (org.apache.hadoop.fs.Path)137 DataInput (java.io.DataInput)124 ArrayList (java.util.ArrayList)122 HttpURLConnection (java.net.HttpURLConnection)120 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)117 FileInputStream (java.io.FileInputStream)107 InputStream (java.io.InputStream)107