Search in sources :

Example 46 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method updatePipeline.

@Override
public UpdatePipelineResponseProto updatePipeline(RpcController controller, UpdatePipelineRequestProto req) throws ServiceException {
    try {
        List<DatanodeIDProto> newNodes = req.getNewNodesList();
        List<String> newStorageIDs = req.getStorageIDsList();
        server.updatePipeline(req.getClientName(), PBHelperClient.convert(req.getOldBlock()), PBHelperClient.convert(req.getNewBlock()), PBHelperClient.convert(newNodes.toArray(new DatanodeIDProto[newNodes.size()])), newStorageIDs.toArray(new String[newStorageIDs.size()]));
        return VOID_UPDATEPIPELINE_RESPONSE;
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException) DatanodeIDProto(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)

Example 47 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method listCacheDirectives.

@Override
public ListCacheDirectivesResponseProto listCacheDirectives(RpcController controller, ListCacheDirectivesRequestProto request) throws ServiceException {
    try {
        CacheDirectiveInfo filter = PBHelperClient.convert(request.getFilter());
        BatchedEntries<CacheDirectiveEntry> entries = server.listCacheDirectives(request.getPrevId(), filter);
        ListCacheDirectivesResponseProto.Builder builder = ListCacheDirectivesResponseProto.newBuilder();
        builder.setHasMore(entries.hasMore());
        for (int i = 0, n = entries.size(); i < n; i++) {
            builder.addElements(PBHelperClient.convert(entries.get(i)));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) ServiceException(com.google.protobuf.ServiceException) ListCacheDirectivesResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCacheDirectivesResponseProto) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry) IOException(java.io.IOException)

Example 48 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method addBlock.

@Override
public AddBlockResponseProto addBlock(RpcController controller, AddBlockRequestProto req) throws ServiceException {
    try {
        List<DatanodeInfoProto> excl = req.getExcludeNodesList();
        List<String> favor = req.getFavoredNodesList();
        EnumSet<AddBlockFlag> flags = PBHelperClient.convertAddBlockFlags(req.getFlagsList());
        LocatedBlock result = server.addBlock(req.getSrc(), req.getClientName(), req.hasPrevious() ? PBHelperClient.convert(req.getPrevious()) : null, (excl == null || excl.size() == 0) ? null : PBHelperClient.convert(excl.toArray(new DatanodeInfoProto[excl.size()])), req.getFileId(), (favor == null || favor.size() == 0) ? null : favor.toArray(new String[favor.size()]), flags);
        return AddBlockResponseProto.newBuilder().setBlock(PBHelperClient.convertLocatedBlock(result)).build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) AddBlockFlag(org.apache.hadoop.hdfs.AddBlockFlag) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) IOException(java.io.IOException) DatanodeInfoProto(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)

Example 49 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method getDataEncryptionKey.

@Override
public GetDataEncryptionKeyResponseProto getDataEncryptionKey(RpcController controller, GetDataEncryptionKeyRequestProto request) throws ServiceException {
    try {
        GetDataEncryptionKeyResponseProto.Builder builder = GetDataEncryptionKeyResponseProto.newBuilder();
        DataEncryptionKey encryptionKey = server.getDataEncryptionKey();
        if (encryptionKey != null) {
            builder.setDataEncryptionKey(PBHelperClient.convert(encryptionKey));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : DataEncryptionKey(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey) ServiceException(com.google.protobuf.ServiceException) GetDataEncryptionKeyResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.GetDataEncryptionKeyResponseProto) IOException(java.io.IOException)

Example 50 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class DatanodeProtocolClientSideTranslatorPB method commitBlockSynchronization.

@Override
public void commitBlockSynchronization(ExtendedBlock block, long newgenerationstamp, long newlength, boolean closeFile, boolean deleteblock, DatanodeID[] newtargets, String[] newtargetstorages) throws IOException {
    CommitBlockSynchronizationRequestProto.Builder builder = CommitBlockSynchronizationRequestProto.newBuilder().setBlock(PBHelperClient.convert(block)).setNewGenStamp(newgenerationstamp).setNewLength(newlength).setCloseFile(closeFile).setDeleteBlock(deleteblock);
    for (int i = 0; i < newtargets.length; i++) {
        builder.addNewTaragets(PBHelperClient.convert(newtargets[i]));
        builder.addNewTargetStorages(newtargetstorages[i]);
    }
    CommitBlockSynchronizationRequestProto req = builder.build();
    try {
        rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req);
    } catch (ServiceException se) {
        throw ProtobufHelper.getRemoteException(se);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) CommitBlockSynchronizationRequestProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.CommitBlockSynchronizationRequestProto)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3