Search in sources :

Example 1 with DefaultCAProfile

use of org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultCAProfile in project ozone by apache.

the class HASecurityUtils method getPrimarySCMSelfSignedCert.

/**
 * For primary SCM get sub-ca signed certificate and root CA certificate by
 * root CA certificate server and store it using certificate client.
 */
private static void getPrimarySCMSelfSignedCert(CertificateClient client, OzoneConfiguration config, SCMStorageConfig scmStorageConfig, InetSocketAddress scmAddress) {
    try {
        CertificateServer rootCAServer = initializeRootCertificateServer(config, null, scmStorageConfig, new DefaultCAProfile());
        PKCS10CertificationRequest csr = generateCSR(client, scmStorageConfig, config, scmAddress);
        X509CertificateHolder subSCMCertHolder = rootCAServer.requestCertificate(csr, KERBEROS_TRUSTED, SCM).get();
        X509CertificateHolder rootCACertificateHolder = rootCAServer.getCACertificate();
        String pemEncodedCert = CertificateCodec.getPEMEncodedString(subSCMCertHolder);
        String pemEncodedRootCert = CertificateCodec.getPEMEncodedString(rootCACertificateHolder);
        client.storeCertificate(pemEncodedRootCert, true, true);
        client.storeCertificate(pemEncodedCert, true);
        persistSubCACertificate(config, client, subSCMCertHolder);
        // Persist scm cert serial ID.
        scmStorageConfig.setScmCertSerialId(subSCMCertHolder.getSerialNumber().toString());
    } catch (InterruptedException | ExecutionException | IOException | CertificateException e) {
        LOG.error("Error while fetching/storing SCM signed certificate.", e);
        Thread.currentThread().interrupt();
        throw new RuntimeException(e);
    }
}
Also used : PKCS10CertificationRequest(org.bouncycastle.pkcs.PKCS10CertificationRequest) X509CertificateHolder(org.bouncycastle.cert.X509CertificateHolder) CertificateServer(org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateServer) DefaultCAProfile(org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultCAProfile) CertificateException(java.security.cert.CertificateException) CertificateSignRequest.getEncodedString(org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest.getEncodedString) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException)

Example 2 with DefaultCAProfile

use of org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultCAProfile in project ozone by apache.

the class StorageContainerManager method initializeCAnSecurityProtocol.

/**
 * If security is enabled we need to have the Security Protocol and a
 * default CA. This function initializes those values based on the
 * configurator.
 *
 * @param conf - Config
 * @param configurator - configurator
 * @throws IOException - on Failure
 * @throws AuthenticationException - on Failure
 */
private void initializeCAnSecurityProtocol(OzoneConfiguration conf, SCMConfigurator configurator) throws IOException {
    // So it is easy to use different Certificate Servers if needed.
    if (this.scmMetadataStore == null) {
        LOG.error("Cannot initialize Certificate Server without a valid meta " + "data layer.");
        throw new SCMException("Cannot initialize CA without a valid metadata " + "store", ResultCodes.SCM_NOT_INITIALIZED);
    }
    certificateStore = new SCMCertStore.Builder().setMetadaStore(scmMetadataStore).setRatisServer(scmHAManager.getRatisServer()).setCRLSequenceId(getLastSequenceIdForCRL()).build();
    final CertificateServer scmCertificateServer;
    final CertificateServer rootCertificateServer;
    // performed init with SCM HA version code.
    if (scmStorageConfig.checkPrimarySCMIdInitialized()) {
        // Start specific instance SCM CA server.
        String subject = SCM_SUB_CA_PREFIX + InetAddress.getLocalHost().getHostName();
        if (configurator.getCertificateServer() != null) {
            scmCertificateServer = configurator.getCertificateServer();
        } else {
            scmCertificateServer = new DefaultCAServer(subject, scmStorageConfig.getClusterID(), scmStorageConfig.getScmId(), certificateStore, new DefaultProfile(), scmCertificateClient.getComponentName());
            // INTERMEDIARY_CA which issues certs to DN and OM.
            scmCertificateServer.init(new SecurityConfig(configuration), CertificateServer.CAType.INTERMEDIARY_CA);
        }
        if (primaryScmNodeId.equals(scmStorageConfig.getScmId())) {
            if (configurator.getCertificateServer() != null) {
                rootCertificateServer = configurator.getCertificateServer();
            } else {
                rootCertificateServer = HASecurityUtils.initializeRootCertificateServer(conf, certificateStore, scmStorageConfig, new DefaultCAProfile());
            }
            persistPrimarySCMCerts();
        } else {
            rootCertificateServer = null;
        }
    } else {
        // On a upgraded cluster primary scm nodeId will not be set as init will
        // not be run again after upgrade. So for a upgraded cluster where init
        // has not happened again we will have setup like before where it has
        // one CA server which is issuing certificates to DN and OM.
        rootCertificateServer = HASecurityUtils.initializeRootCertificateServer(conf, certificateStore, scmStorageConfig, new DefaultProfile());
        scmCertificateServer = rootCertificateServer;
    }
    // We need to pass getCACertificate as rootCA certificate,
    // as for SCM CA is root-CA.
    securityProtocolServer = new SCMSecurityProtocolServer(conf, rootCertificateServer, scmCertificateServer, scmCertificateClient != null ? scmCertificateClient.getCACertificate() : null, this);
    if (securityConfig.isContainerTokenEnabled()) {
        containerTokenMgr = createContainerTokenSecretManager(configuration);
    }
}
Also used : DefaultProfile(org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultProfile) SecurityConfig(org.apache.hadoop.hdds.security.x509.SecurityConfig) CertificateServer(org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateServer) DefaultCAServer(org.apache.hadoop.hdds.security.x509.certificate.authority.DefaultCAServer) DefaultCAProfile(org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultCAProfile) SCMException(org.apache.hadoop.hdds.scm.exceptions.SCMException)

Aggregations

CertificateServer (org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateServer)2 DefaultCAProfile (org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultCAProfile)2 IOException (java.io.IOException)1 CertificateException (java.security.cert.CertificateException)1 ExecutionException (java.util.concurrent.ExecutionException)1 SCMException (org.apache.hadoop.hdds.scm.exceptions.SCMException)1 SecurityConfig (org.apache.hadoop.hdds.security.x509.SecurityConfig)1 DefaultCAServer (org.apache.hadoop.hdds.security.x509.certificate.authority.DefaultCAServer)1 DefaultProfile (org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultProfile)1 CertificateSignRequest.getEncodedString (org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest.getEncodedString)1 X509CertificateHolder (org.bouncycastle.cert.X509CertificateHolder)1 PKCS10CertificationRequest (org.bouncycastle.pkcs.PKCS10CertificationRequest)1