Search in sources :

Example 11 with XAttr

use of org.apache.hadoop.fs.XAttr in project hadoop by apache.

the class FSDirEncryptionZoneOp method getFileEncryptionInfo.

/**
   * This function combines the per-file encryption info (obtained
   * from the inode's XAttrs), and the encryption info from its zone, and
   * returns a consolidated FileEncryptionInfo instance. Null is returned
   * for non-encrypted or raw files.
   *
   * @param fsd fsdirectory
   * @param iip inodes in the path containing the file, passed in to
   *            avoid obtaining the list of inodes again
   * @return consolidated file encryption info; null for non-encrypted files
   */
static FileEncryptionInfo getFileEncryptionInfo(final FSDirectory fsd, final INodesInPath iip) throws IOException {
    if (iip.isRaw() || !fsd.ezManager.hasCreatedEncryptionZone() || !iip.getLastINode().isFile()) {
        return null;
    }
    fsd.readLock();
    try {
        EncryptionZone encryptionZone = getEZForPath(fsd, iip);
        if (encryptionZone == null) {
            // not an encrypted file
            return null;
        } else if (encryptionZone.getPath() == null || encryptionZone.getPath().isEmpty()) {
            if (NameNode.LOG.isDebugEnabled()) {
                NameNode.LOG.debug("Encryption zone " + encryptionZone.getPath() + " does not have a valid path.");
            }
        }
        final CryptoProtocolVersion version = encryptionZone.getVersion();
        final CipherSuite suite = encryptionZone.getSuite();
        final String keyName = encryptionZone.getKeyName();
        XAttr fileXAttr = FSDirXAttrOp.unprotectedGetXAttrByPrefixedName(iip, CRYPTO_XATTR_FILE_ENCRYPTION_INFO);
        if (fileXAttr == null) {
            NameNode.LOG.warn("Could not find encryption XAttr for file " + iip.getPath() + " in encryption zone " + encryptionZone.getPath());
            return null;
        }
        try {
            HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.PerFileEncryptionInfoProto.parseFrom(fileXAttr.getValue());
            return PBHelperClient.convert(fileProto, suite, version, keyName);
        } catch (InvalidProtocolBufferException e) {
            throw new IOException("Could not parse file encryption info for " + "inode " + iip.getPath(), e);
        }
    } finally {
        fsd.readUnlock();
    }
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) HdfsProtos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos) CryptoProtocolVersion(org.apache.hadoop.crypto.CryptoProtocolVersion) CipherSuite(org.apache.hadoop.crypto.CipherSuite) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) IOException(java.io.IOException) XAttr(org.apache.hadoop.fs.XAttr)

Example 12 with XAttr

use of org.apache.hadoop.fs.XAttr in project hadoop by apache.

the class FSDirErasureCodingOp method setErasureCodingPolicyXAttr.

private static List<XAttr> setErasureCodingPolicyXAttr(final FSNamesystem fsn, final INodesInPath srcIIP, ErasureCodingPolicy ecPolicy) throws IOException {
    FSDirectory fsd = fsn.getFSDirectory();
    assert fsd.hasWriteLock();
    Preconditions.checkNotNull(srcIIP, "INodes cannot be null");
    Preconditions.checkNotNull(ecPolicy, "EC policy cannot be null");
    String src = srcIIP.getPath();
    final INode inode = srcIIP.getLastINode();
    if (inode == null) {
        throw new FileNotFoundException("Path not found: " + srcIIP.getPath());
    }
    if (!inode.isDirectory()) {
        throw new IOException("Attempt to set an erasure coding policy " + "for a file " + src);
    }
    final XAttr ecXAttr;
    DataOutputStream dOut = null;
    try {
        ByteArrayOutputStream bOut = new ByteArrayOutputStream();
        dOut = new DataOutputStream(bOut);
        WritableUtils.writeString(dOut, ecPolicy.getName());
        ecXAttr = XAttrHelper.buildXAttr(XATTR_ERASURECODING_POLICY, bOut.toByteArray());
    } finally {
        IOUtils.closeStream(dOut);
    }
    // check whether the directory already has an erasure coding policy
    // directly on itself.
    final Boolean hasEcXAttr = getErasureCodingPolicyXAttrForINode(fsn, inode) == null ? false : true;
    final List<XAttr> xattrs = Lists.newArrayListWithCapacity(1);
    xattrs.add(ecXAttr);
    final EnumSet<XAttrSetFlag> flag = hasEcXAttr ? EnumSet.of(XAttrSetFlag.REPLACE) : EnumSet.of(XAttrSetFlag.CREATE);
    FSDirXAttrOp.unprotectedSetXAttrs(fsd, srcIIP, xattrs, flag);
    return xattrs;
}
Also used : XAttrSetFlag(org.apache.hadoop.fs.XAttrSetFlag) DataOutputStream(java.io.DataOutputStream) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) XAttr(org.apache.hadoop.fs.XAttr)

Example 13 with XAttr

use of org.apache.hadoop.fs.XAttr in project hadoop by apache.

the class EncryptionZoneManager method createEncryptionZone.

/**
   * Create a new encryption zone.
   * <p/>
   * Called while holding the FSDirectory lock.
   */
XAttr createEncryptionZone(INodesInPath srcIIP, CipherSuite suite, CryptoProtocolVersion version, String keyName) throws IOException {
    assert dir.hasWriteLock();
    // Check if src is a valid path for new EZ creation
    if (srcIIP.getLastINode() == null) {
        throw new FileNotFoundException("cannot find " + srcIIP.getPath());
    }
    if (dir.isNonEmptyDirectory(srcIIP)) {
        throw new IOException("Attempt to create an encryption zone for a non-empty directory.");
    }
    INode srcINode = srcIIP.getLastINode();
    if (!srcINode.isDirectory()) {
        throw new IOException("Attempt to create an encryption zone for a file.");
    }
    if (hasCreatedEncryptionZone() && encryptionZones.get(srcINode.getId()) != null) {
        throw new IOException("Directory " + srcIIP.getPath() + " is already an encryption zone.");
    }
    final HdfsProtos.ZoneEncryptionInfoProto proto = PBHelperClient.convert(suite, version, keyName);
    final XAttr ezXAttr = XAttrHelper.buildXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE, proto.toByteArray());
    final List<XAttr> xattrs = Lists.newArrayListWithCapacity(1);
    xattrs.add(ezXAttr);
    // updating the xattr will call addEncryptionZone,
    // done this way to handle edit log loading
    FSDirXAttrOp.unprotectedSetXAttrs(dir, srcIIP, xattrs, EnumSet.of(XAttrSetFlag.CREATE));
    return ezXAttr;
}
Also used : HdfsProtos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) XAttr(org.apache.hadoop.fs.XAttr)

Example 14 with XAttr

use of org.apache.hadoop.fs.XAttr in project hadoop by apache.

the class DFSClient method getXAttr.

public byte[] getXAttr(String src, String name) throws IOException {
    checkOpen();
    try (TraceScope ignored = newPathTraceScope("getXAttr", src)) {
        final List<XAttr> xAttrs = XAttrHelper.buildXAttrAsList(name);
        final List<XAttr> result = namenode.getXAttrs(src, xAttrs);
        return XAttrHelper.getFirstXAttrValue(result);
    } catch (RemoteException re) {
        throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class);
    }
}
Also used : TraceScope(org.apache.htrace.core.TraceScope) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException) RemoteException(org.apache.hadoop.ipc.RemoteException) UnresolvedPathException(org.apache.hadoop.hdfs.protocol.UnresolvedPathException) XAttr(org.apache.hadoop.fs.XAttr)

Example 15 with XAttr

use of org.apache.hadoop.fs.XAttr in project hadoop by apache.

the class XAttrFormat method toBytes.

/**
   * Pack the XAttrs to byte[].
   * 
   * @param xAttrs the XAttrs
   * @return the packed bytes
   */
static byte[] toBytes(List<XAttr> xAttrs) {
    if (xAttrs == null || xAttrs.isEmpty()) {
        return null;
    }
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    try {
        for (XAttr a : xAttrs) {
            int nsOrd = a.getNameSpace().ordinal();
            Preconditions.checkArgument(nsOrd < 8, "Too many namespaces.");
            int nid = XAttrStorage.getNameSerialNumber(a.getName());
            Preconditions.checkArgument(nid < XATTR_NAME_ID_MAX, "Too large serial number of the xattr name");
            // big-endian
            int v = ((nsOrd & XATTR_NAMESPACE_MASK) << XATTR_NAMESPACE_OFFSET) | (nid & XATTR_NAME_MASK);
            out.write(Ints.toByteArray(v));
            int vlen = a.getValue() == null ? 0 : a.getValue().length;
            Preconditions.checkArgument(vlen < XATTR_VALUE_LEN_MAX, "The length of xAttr values is too long.");
            out.write((byte) (vlen >> 8));
            out.write((byte) (vlen));
            if (vlen > 0) {
                out.write(a.getValue());
            }
        }
    } catch (IOException e) {
    // in fact, no exception
    }
    return out.toByteArray();
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) XAttr(org.apache.hadoop.fs.XAttr)

Aggregations

XAttr (org.apache.hadoop.fs.XAttr)43 IOException (java.io.IOException)13 Test (org.junit.Test)7 HdfsProtos (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos)5 FileNotFoundException (java.io.FileNotFoundException)4 XAttrProto (org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.XAttrProto)4 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 ArrayList (java.util.ArrayList)2 HadoopIllegalArgumentException (org.apache.hadoop.HadoopIllegalArgumentException)2 CipherSuite (org.apache.hadoop.crypto.CipherSuite)2 CryptoProtocolVersion (org.apache.hadoop.crypto.CryptoProtocolVersion)2 AccessControlException (org.apache.hadoop.security.AccessControlException)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 ObjectReader (com.fasterxml.jackson.databind.ObjectReader)1 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 ImmutableList (com.google.common.collect.ImmutableList)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1