Search in sources :

Example 86 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolTranslatorPB method getBlockLocalPathInfo.

@Override
public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock block, Token<BlockTokenIdentifier> token) throws IOException {
    GetBlockLocalPathInfoRequestProto req = GetBlockLocalPathInfoRequestProto.newBuilder().setBlock(PBHelperClient.convert(block)).setToken(PBHelperClient.convert(token)).build();
    GetBlockLocalPathInfoResponseProto resp;
    try {
        resp = rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
    return new BlockLocalPathInfo(PBHelperClient.convert(resp.getBlock()), resp.getLocalPath(), resp.getLocalMetaPath());
}
Also used : ServiceException(com.google.protobuf.ServiceException) GetBlockLocalPathInfoRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetBlockLocalPathInfoRequestProto) BlockLocalPathInfo(org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo) GetBlockLocalPathInfoResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetBlockLocalPathInfoResponseProto)

Example 87 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolTranslatorPB method submitDiskBalancerPlan.

/**
   * Submits a disk balancer plan to the datanode.
   * @param planID - Plan ID is the hash512 string of the plan that is
   *               submitted. This is used by clients when they want to find
   *               local copies of these plans.
   * @param planVersion - The data format of the plans - for future , not
   *                    used now.
   * @param planFile - Plan file name
   * @param planData - Actual plan data in json format
   * @param skipDateCheck - Skips the date check.
   * @throws IOException
   */
@Override
public void submitDiskBalancerPlan(String planID, long planVersion, String planFile, String planData, boolean skipDateCheck) throws IOException {
    try {
        SubmitDiskBalancerPlanRequestProto request = SubmitDiskBalancerPlanRequestProto.newBuilder().setPlanID(planID).setPlanVersion(planVersion).setPlanFile(planFile).setPlan(planData).setIgnoreDateCheck(skipDateCheck).build();
        rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) SubmitDiskBalancerPlanRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.SubmitDiskBalancerPlanRequestProto)

Example 88 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientDatanodeProtocolTranslatorPB method queryDiskBalancerPlan.

/**
   * Gets the status of an executing diskbalancer Plan.
   */
@Override
public DiskBalancerWorkStatus queryDiskBalancerPlan() throws IOException {
    try {
        QueryPlanStatusRequestProto request = QueryPlanStatusRequestProto.newBuilder().build();
        QueryPlanStatusResponseProto response = rpcProxy.queryDiskBalancerPlan(NULL_CONTROLLER, request);
        DiskBalancerWorkStatus.Result result = Result.NO_PLAN;
        if (response.hasResult()) {
            result = DiskBalancerWorkStatus.Result.values()[response.getResult()];
        }
        return new DiskBalancerWorkStatus(result, response.hasPlanID() ? response.getPlanID() : null, response.hasPlanFile() ? response.getPlanFile() : null, response.hasCurrentStatus() ? response.getCurrentStatus() : null);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : Result(org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result) ServiceException(com.google.protobuf.ServiceException) QueryPlanStatusRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.QueryPlanStatusRequestProto) DiskBalancerWorkStatus(org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus) QueryPlanStatusResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.QueryPlanStatusResponseProto)

Example 89 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method setQuota.

@Override
public void setQuota(String path, long namespaceQuota, long storagespaceQuota, StorageType type) throws IOException {
    final SetQuotaRequestProto.Builder builder = SetQuotaRequestProto.newBuilder().setPath(path).setNamespaceQuota(namespaceQuota).setStoragespaceQuota(storagespaceQuota);
    if (type != null) {
        builder.setStorageType(PBHelperClient.convertStorageType(type));
    }
    final SetQuotaRequestProto req = builder.build();
    try {
        rpcProxy.setQuota(null, req);
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : ServiceException(com.google.protobuf.ServiceException) SetQuotaRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.SetQuotaRequestProto)

Example 90 with ServiceException

use of com.google.protobuf.ServiceException in project hadoop by apache.

the class ClientNamenodeProtocolTranslatorPB method createSnapshot.

@Override
public String createSnapshot(String snapshotRoot, String snapshotName) throws IOException {
    final CreateSnapshotRequestProto.Builder builder = CreateSnapshotRequestProto.newBuilder().setSnapshotRoot(snapshotRoot);
    if (snapshotName != null) {
        builder.setSnapshotName(snapshotName);
    }
    final CreateSnapshotRequestProto req = builder.build();
    try {
        return rpcProxy.createSnapshot(null, req).getSnapshotPath();
    } catch (ServiceException e) {
        throw ProtobufHelper.getRemoteException(e);
    }
}
Also used : CreateSnapshotRequestProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CreateSnapshotRequestProto) ServiceException(com.google.protobuf.ServiceException)

Aggregations

ServiceException (com.google.protobuf.ServiceException)139 IOException (java.io.IOException)66 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)12 Configuration (org.apache.hadoop.conf.Configuration)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 Table (org.apache.hadoop.hbase.client.Table)5 CoprocessorRpcChannel (org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)5 InetSocketAddress (java.net.InetSocketAddress)4 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)4 ByteString (com.google.protobuf.ByteString)3 InterruptedIOException (java.io.InterruptedIOException)3 ConnectException (java.net.ConnectException)3 SocketTimeoutException (java.net.SocketTimeoutException)3 Callable (java.util.concurrent.Callable)3 ExecutionException (java.util.concurrent.ExecutionException)3 ExecutorService (java.util.concurrent.ExecutorService)3 EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)3 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)3 Server (org.apache.hadoop.ipc.Server)3