Search in sources :

Example 1 with CryptoModuleParameters

use of org.apache.accumulo.core.security.crypto.CryptoModuleParameters in project accumulo by apache.

the class DfsLogger method open.

/**
 * Opens a Write-Ahead Log file and writes the necessary header information and OPEN entry to the file. The file is ready to be used for ingest if this method
 * returns successfully. If an exception is thrown from this method, it is the callers responsibility to ensure that {@link #close()} is called to prevent
 * leaking the file handle and/or syncing thread.
 *
 * @param address
 *          The address of the host using this WAL
 */
public synchronized void open(String address) throws IOException {
    String filename = UUID.randomUUID().toString();
    log.debug("Address is {}", address);
    String logger = Joiner.on("+").join(address.split(":"));
    log.debug("DfsLogger.open() begin");
    VolumeManager fs = conf.getFileSystem();
    VolumeChooserEnvironment chooserEnv = new VolumeChooserEnvironment(ChooserScope.LOGGER);
    logPath = fs.choose(chooserEnv, ServerConstants.getBaseUris()) + Path.SEPARATOR + ServerConstants.WAL_DIR + Path.SEPARATOR + logger + Path.SEPARATOR + filename;
    metaReference = toString();
    LoggerOperation op = null;
    try {
        short replication = (short) conf.getConfiguration().getCount(Property.TSERV_WAL_REPLICATION);
        if (replication == 0)
            replication = fs.getDefaultReplication(new Path(logPath));
        long blockSize = getWalBlockSize(conf.getConfiguration());
        if (conf.getConfiguration().getBoolean(Property.TSERV_WAL_SYNC))
            logFile = fs.createSyncable(new Path(logPath), 0, replication, blockSize);
        else
            logFile = fs.create(new Path(logPath), true, 0, replication, blockSize);
        sync = logFile.getClass().getMethod("hsync");
        flush = logFile.getClass().getMethod("hflush");
        // Initialize the crypto operations.
        org.apache.accumulo.core.security.crypto.CryptoModule cryptoModule = org.apache.accumulo.core.security.crypto.CryptoModuleFactory.getCryptoModule(conf.getConfiguration().get(Property.CRYPTO_MODULE_CLASS));
        // Initialize the log file with a header and the crypto params used to set up this log file.
        logFile.write(LOG_FILE_HEADER_V3.getBytes(UTF_8));
        CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf.getConfiguration());
        // Immediately update to the correct cipher. Doing this here keeps the CryptoModule independent of the writers using it
        if (params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()) != null && !params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()).equals("")) {
            params.setCipherSuite(params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()));
        }
        NoFlushOutputStream nfos = new NoFlushOutputStream(logFile);
        params.setPlaintextOutputStream(nfos);
        // In order to bootstrap the reading of this file later, we have to record the CryptoModule that was used to encipher it here,
        // so that that crypto module can re-read its own parameters.
        logFile.writeUTF(conf.getConfiguration().get(Property.CRYPTO_MODULE_CLASS));
        params = cryptoModule.getEncryptingOutputStream(params);
        OutputStream encipheringOutputStream = params.getEncryptedOutputStream();
        // another data OutputStream.
        if (encipheringOutputStream == nfos) {
            log.debug("No enciphering, using raw output stream");
            encryptingLogFile = nfos;
        } else {
            log.debug("Enciphering found, wrapping in DataOutputStream");
            encryptingLogFile = new DataOutputStream(encipheringOutputStream);
        }
        LogFileKey key = new LogFileKey();
        key.event = OPEN;
        key.tserverSession = filename;
        key.filename = filename;
        op = logFileData(Collections.singletonList(new Pair<>(key, EMPTY)), Durability.SYNC);
    } catch (Exception ex) {
        if (logFile != null)
            logFile.close();
        logFile = null;
        encryptingLogFile = null;
        throw new IOException(ex);
    }
    syncThread = new Daemon(new LoggingRunnable(log, new LogSyncingTask()));
    syncThread.setName("Accumulo WALog thread " + toString());
    syncThread.start();
    op.await();
    log.debug("Got new write-ahead log: {}", this);
}
Also used : Path(org.apache.hadoop.fs.Path) VolumeManager(org.apache.accumulo.server.fs.VolumeManager) DataOutputStream(java.io.DataOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DataOutputStream(java.io.DataOutputStream) NoFlushOutputStream(org.apache.accumulo.core.security.crypto.NoFlushOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DFSOutputStream(org.apache.hadoop.hdfs.DFSOutputStream) OutputStream(java.io.OutputStream) LogFileKey(org.apache.accumulo.tserver.logger.LogFileKey) IOException(java.io.IOException) EOFException(java.io.EOFException) ClosedChannelException(java.nio.channels.ClosedChannelException) IOException(java.io.IOException) CryptoModule(org.apache.accumulo.core.security.crypto.CryptoModule) LoggingRunnable(org.apache.accumulo.fate.util.LoggingRunnable) CryptoModuleParameters(org.apache.accumulo.core.security.crypto.CryptoModuleParameters) Daemon(org.apache.accumulo.core.util.Daemon) VolumeChooserEnvironment(org.apache.accumulo.server.fs.VolumeChooserEnvironment) NoFlushOutputStream(org.apache.accumulo.core.security.crypto.NoFlushOutputStream)

Example 2 with CryptoModuleParameters

use of org.apache.accumulo.core.security.crypto.CryptoModuleParameters in project accumulo by apache.

the class DfsLogger method readHeaderAndReturnStream.

public static DFSLoggerInputStreams readHeaderAndReturnStream(FSDataInputStream input, AccumuloConfiguration conf) throws IOException {
    DataInputStream decryptingInput = null;
    byte[] magic = DfsLogger.LOG_FILE_HEADER_V3.getBytes(UTF_8);
    byte[] magicBuffer = new byte[magic.length];
    try {
        input.readFully(magicBuffer);
        if (Arrays.equals(magicBuffer, magic)) {
            // additional parameters it needs from the underlying stream.
            String cryptoModuleClassname = input.readUTF();
            CryptoModule cryptoModule = CryptoModuleFactory.getCryptoModule(cryptoModuleClassname);
            // Create the parameters and set the input stream into those parameters
            CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf);
            params.setEncryptedInputStream(input);
            // Create the plaintext input stream from the encrypted one
            params = cryptoModule.getDecryptingInputStream(params);
            if (params.getPlaintextInputStream() instanceof DataInputStream) {
                decryptingInput = (DataInputStream) params.getPlaintextInputStream();
            } else {
                decryptingInput = new DataInputStream(params.getPlaintextInputStream());
            }
        } else {
            input.seek(0);
            byte[] magicV2 = DfsLogger.LOG_FILE_HEADER_V2.getBytes(UTF_8);
            byte[] magicBufferV2 = new byte[magicV2.length];
            input.readFully(magicBufferV2);
            if (Arrays.equals(magicBufferV2, magicV2)) {
                // Log files from 1.5 dump their options in raw to the logger files. Since we don't know the class
                // that needs to read those files, we can make a couple of basic assumptions. Either it's going to be
                // the NullCryptoModule (no crypto) or the DefaultCryptoModule.
                // If it's null, we won't have any parameters whatsoever. First, let's attempt to read
                // parameters
                Map<String, String> opts = new HashMap<>();
                int count = input.readInt();
                for (int i = 0; i < count; i++) {
                    String key = input.readUTF();
                    String value = input.readUTF();
                    opts.put(key, value);
                }
                if (opts.size() == 0) {
                    // NullCryptoModule, we're done
                    decryptingInput = input;
                } else {
                    // The DefaultCryptoModule will want to read the parameters from the underlying file, so we will put the file back to that spot.
                    org.apache.accumulo.core.security.crypto.CryptoModule cryptoModule = org.apache.accumulo.core.security.crypto.CryptoModuleFactory.getCryptoModule(DefaultCryptoModule.class.getName());
                    CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf);
                    // go back to the beginning, but skip over magicV2 already checked earlier
                    input.seek(magicV2.length);
                    params.setEncryptedInputStream(input);
                    params = cryptoModule.getDecryptingInputStream(params);
                    if (params.getPlaintextInputStream() instanceof DataInputStream) {
                        decryptingInput = (DataInputStream) params.getPlaintextInputStream();
                    } else {
                        decryptingInput = new DataInputStream(params.getPlaintextInputStream());
                    }
                }
            } else {
                input.seek(0);
                decryptingInput = input;
            }
        }
    } catch (EOFException e) {
        log.warn("Got EOFException trying to read WAL header information, assuming the rest of the file has no data.");
        // A TabletServer might have died before the (complete) header was written
        throw new LogHeaderIncompleteException(e);
    }
    return new DFSLoggerInputStreams(input, decryptingInput);
}
Also used : CryptoModule(org.apache.accumulo.core.security.crypto.CryptoModule) DefaultCryptoModule(org.apache.accumulo.core.security.crypto.DefaultCryptoModule) HashMap(java.util.HashMap) DefaultCryptoModule(org.apache.accumulo.core.security.crypto.DefaultCryptoModule) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) DataInputStream(java.io.DataInputStream) CryptoModule(org.apache.accumulo.core.security.crypto.CryptoModule) CryptoModuleParameters(org.apache.accumulo.core.security.crypto.CryptoModuleParameters) EOFException(java.io.EOFException)

Aggregations

EOFException (java.io.EOFException)2 CryptoModule (org.apache.accumulo.core.security.crypto.CryptoModule)2 CryptoModuleParameters (org.apache.accumulo.core.security.crypto.CryptoModuleParameters)2 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1 IOException (java.io.IOException)1 OutputStream (java.io.OutputStream)1 ClosedChannelException (java.nio.channels.ClosedChannelException)1 HashMap (java.util.HashMap)1 DefaultCryptoModule (org.apache.accumulo.core.security.crypto.DefaultCryptoModule)1 NoFlushOutputStream (org.apache.accumulo.core.security.crypto.NoFlushOutputStream)1 Daemon (org.apache.accumulo.core.util.Daemon)1 LoggingRunnable (org.apache.accumulo.fate.util.LoggingRunnable)1 VolumeChooserEnvironment (org.apache.accumulo.server.fs.VolumeChooserEnvironment)1 VolumeManager (org.apache.accumulo.server.fs.VolumeManager)1 LogFileKey (org.apache.accumulo.tserver.logger.LogFileKey)1 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 Path (org.apache.hadoop.fs.Path)1 DFSOutputStream (org.apache.hadoop.hdfs.DFSOutputStream)1