Search in sources :

Example 1 with BulkLoadHFileRequest

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest in project hbase by apache.

the class RequestConverter method buildBulkLoadHFileRequest.

/**
   * Create a protocol buffer bulk load request
   *
   * @param familyPaths
   * @param regionName
   * @param assignSeqNum
   * @param userToken
   * @param bulkToken
   * @param copyFiles
   * @return a bulk load request
   */
public static BulkLoadHFileRequest buildBulkLoadHFileRequest(final List<Pair<byte[], String>> familyPaths, final byte[] regionName, boolean assignSeqNum, final Token<?> userToken, final String bulkToken, boolean copyFiles) {
    RegionSpecifier region = RequestConverter.buildRegionSpecifier(RegionSpecifierType.REGION_NAME, regionName);
    ClientProtos.DelegationToken protoDT = null;
    if (userToken != null) {
        protoDT = ClientProtos.DelegationToken.newBuilder().setIdentifier(UnsafeByteOperations.unsafeWrap(userToken.getIdentifier())).setPassword(UnsafeByteOperations.unsafeWrap(userToken.getPassword())).setKind(userToken.getKind().toString()).setService(userToken.getService().toString()).build();
    }
    List<ClientProtos.BulkLoadHFileRequest.FamilyPath> protoFamilyPaths = new ArrayList<>(familyPaths.size());
    if (!familyPaths.isEmpty()) {
        ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder pathBuilder = ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder();
        for (Pair<byte[], String> el : familyPaths) {
            protoFamilyPaths.add(pathBuilder.setFamily(UnsafeByteOperations.unsafeWrap(el.getFirst())).setPath(el.getSecond()).build());
        }
        pathBuilder.clear();
    }
    BulkLoadHFileRequest.Builder request = ClientProtos.BulkLoadHFileRequest.newBuilder().setRegion(region).setAssignSeqNum(assignSeqNum).addAllFamilyPath(protoFamilyPaths);
    if (userToken != null) {
        request.setFsToken(protoDT);
    }
    if (bulkToken != null) {
        request.setBulkToken(bulkToken);
    }
    request.setCopyFile(copyFiles);
    return request.build();
}
Also used : BulkLoadHFileRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) ArrayList(java.util.ArrayList) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) RegionSpecifier(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier)

Example 2 with BulkLoadHFileRequest

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest in project hbase by apache.

the class SecureBulkLoadManager method secureBulkLoadHFiles.

public Map<byte[], List<Path>> secureBulkLoadHFiles(final Region region, final BulkLoadHFileRequest request) throws IOException {
    final List<Pair<byte[], String>> familyPaths = new ArrayList<>(request.getFamilyPathCount());
    for (ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) {
        familyPaths.add(new Pair<>(el.getFamily().toByteArray(), el.getPath()));
    }
    Token userToken = null;
    if (userProvider.isHadoopSecurityEnabled()) {
        userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService()));
    }
    final String bulkToken = request.getBulkToken();
    User user = getActiveUser();
    final UserGroupInformation ugi = user.getUGI();
    if (userProvider.isHadoopSecurityEnabled()) {
        try {
            Token tok = TokenUtil.obtainToken(conn);
            if (tok != null) {
                boolean b = ugi.addToken(tok);
                LOG.debug("token added " + tok + " for user " + ugi + " return=" + b);
            }
        } catch (IOException ioe) {
            LOG.warn("unable to add token", ioe);
        }
    }
    if (userToken != null) {
        ugi.addToken(userToken);
    } else if (userProvider.isHadoopSecurityEnabled()) {
        //for mini cluster testing
        throw new DoNotRetryIOException("User token cannot be null");
    }
    boolean bypass = false;
    if (region.getCoprocessorHost() != null) {
        bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
    }
    boolean loaded = false;
    Map<byte[], List<Path>> map = null;
    try {
        if (!bypass) {
            // ('request user'), another for the target fs (HBase region server principal).
            if (userProvider.isHadoopSecurityEnabled()) {
                FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer");
                targetfsDelegationToken.acquireDelegationToken(fs);
                Token<?> targetFsToken = targetfsDelegationToken.getUserToken();
                if (targetFsToken != null && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) {
                    ugi.addToken(targetFsToken);
                }
            }
            map = ugi.doAs(new PrivilegedAction<Map<byte[], List<Path>>>() {

                @Override
                public Map<byte[], List<Path>> run() {
                    FileSystem fs = null;
                    try {
                        fs = FileSystem.get(conf);
                        for (Pair<byte[], String> el : familyPaths) {
                            Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst()));
                            if (!fs.exists(stageFamily)) {
                                fs.mkdirs(stageFamily);
                                fs.setPermission(stageFamily, PERM_ALL_ACCESS);
                            }
                        }
                        //To enable access prior to staging
                        return region.bulkLoadHFiles(familyPaths, true, new SecureBulkLoadListener(fs, bulkToken, conf), request.getCopyFile());
                    } catch (Exception e) {
                        LOG.error("Failed to complete bulk load", e);
                    }
                    return null;
                }
            });
            if (map != null) {
                loaded = true;
            }
        }
    } finally {
        if (region.getCoprocessorHost() != null) {
            region.getCoprocessorHost().postBulkLoadHFile(familyPaths, map, loaded);
        }
    }
    return map;
}
Also used : Path(org.apache.hadoop.fs.Path) User(org.apache.hadoop.hbase.security.User) BulkLoadHFileRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ArrayList(java.util.ArrayList) Token(org.apache.hadoop.security.token.Token) FsDelegationToken(org.apache.hadoop.hbase.security.token.FsDelegationToken) Text(org.apache.hadoop.io.Text) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) FsDelegationToken(org.apache.hadoop.hbase.security.token.FsDelegationToken) PrivilegedAction(java.security.PrivilegedAction) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) List(java.util.List) Pair(org.apache.hadoop.hbase.util.Pair) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

ArrayList (java.util.ArrayList)2 BulkLoadHFileRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest)2 IOException (java.io.IOException)1 PrivilegedAction (java.security.PrivilegedAction)1 List (java.util.List)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)1 User (org.apache.hadoop.hbase.security.User)1 FsDelegationToken (org.apache.hadoop.hbase.security.token.FsDelegationToken)1 ClientProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)1 RegionSpecifier (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier)1 Pair (org.apache.hadoop.hbase.util.Pair)1 Text (org.apache.hadoop.io.Text)1 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)1 Token (org.apache.hadoop.security.token.Token)1