use of org.apache.accumulo.core.security.crypto.CryptoModule in project accumulo by apache.
the class DfsLogger method open.
/**
* Opens a Write-Ahead Log file and writes the necessary header information and OPEN entry to the file. The file is ready to be used for ingest if this method
* returns successfully. If an exception is thrown from this method, it is the callers responsibility to ensure that {@link #close()} is called to prevent
* leaking the file handle and/or syncing thread.
*
* @param address
* The address of the host using this WAL
*/
public synchronized void open(String address) throws IOException {
String filename = UUID.randomUUID().toString();
log.debug("Address is {}", address);
String logger = Joiner.on("+").join(address.split(":"));
log.debug("DfsLogger.open() begin");
VolumeManager fs = conf.getFileSystem();
VolumeChooserEnvironment chooserEnv = new VolumeChooserEnvironment(ChooserScope.LOGGER);
logPath = fs.choose(chooserEnv, ServerConstants.getBaseUris()) + Path.SEPARATOR + ServerConstants.WAL_DIR + Path.SEPARATOR + logger + Path.SEPARATOR + filename;
metaReference = toString();
LoggerOperation op = null;
try {
short replication = (short) conf.getConfiguration().getCount(Property.TSERV_WAL_REPLICATION);
if (replication == 0)
replication = fs.getDefaultReplication(new Path(logPath));
long blockSize = getWalBlockSize(conf.getConfiguration());
if (conf.getConfiguration().getBoolean(Property.TSERV_WAL_SYNC))
logFile = fs.createSyncable(new Path(logPath), 0, replication, blockSize);
else
logFile = fs.create(new Path(logPath), true, 0, replication, blockSize);
sync = logFile.getClass().getMethod("hsync");
flush = logFile.getClass().getMethod("hflush");
// Initialize the crypto operations.
org.apache.accumulo.core.security.crypto.CryptoModule cryptoModule = org.apache.accumulo.core.security.crypto.CryptoModuleFactory.getCryptoModule(conf.getConfiguration().get(Property.CRYPTO_MODULE_CLASS));
// Initialize the log file with a header and the crypto params used to set up this log file.
logFile.write(LOG_FILE_HEADER_V3.getBytes(UTF_8));
CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf.getConfiguration());
// Immediately update to the correct cipher. Doing this here keeps the CryptoModule independent of the writers using it
if (params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()) != null && !params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()).equals("")) {
params.setCipherSuite(params.getAllOptions().get(Property.CRYPTO_WAL_CIPHER_SUITE.getKey()));
}
NoFlushOutputStream nfos = new NoFlushOutputStream(logFile);
params.setPlaintextOutputStream(nfos);
// In order to bootstrap the reading of this file later, we have to record the CryptoModule that was used to encipher it here,
// so that that crypto module can re-read its own parameters.
logFile.writeUTF(conf.getConfiguration().get(Property.CRYPTO_MODULE_CLASS));
params = cryptoModule.getEncryptingOutputStream(params);
OutputStream encipheringOutputStream = params.getEncryptedOutputStream();
// another data OutputStream.
if (encipheringOutputStream == nfos) {
log.debug("No enciphering, using raw output stream");
encryptingLogFile = nfos;
} else {
log.debug("Enciphering found, wrapping in DataOutputStream");
encryptingLogFile = new DataOutputStream(encipheringOutputStream);
}
LogFileKey key = new LogFileKey();
key.event = OPEN;
key.tserverSession = filename;
key.filename = filename;
op = logFileData(Collections.singletonList(new Pair<>(key, EMPTY)), Durability.SYNC);
} catch (Exception ex) {
if (logFile != null)
logFile.close();
logFile = null;
encryptingLogFile = null;
throw new IOException(ex);
}
syncThread = new Daemon(new LoggingRunnable(log, new LogSyncingTask()));
syncThread.setName("Accumulo WALog thread " + toString());
syncThread.start();
op.await();
log.debug("Got new write-ahead log: {}", this);
}
use of org.apache.accumulo.core.security.crypto.CryptoModule in project accumulo by apache.
the class DfsLogger method readHeaderAndReturnStream.
public static DFSLoggerInputStreams readHeaderAndReturnStream(FSDataInputStream input, AccumuloConfiguration conf) throws IOException {
DataInputStream decryptingInput = null;
byte[] magic = DfsLogger.LOG_FILE_HEADER_V3.getBytes(UTF_8);
byte[] magicBuffer = new byte[magic.length];
try {
input.readFully(magicBuffer);
if (Arrays.equals(magicBuffer, magic)) {
// additional parameters it needs from the underlying stream.
String cryptoModuleClassname = input.readUTF();
CryptoModule cryptoModule = CryptoModuleFactory.getCryptoModule(cryptoModuleClassname);
// Create the parameters and set the input stream into those parameters
CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf);
params.setEncryptedInputStream(input);
// Create the plaintext input stream from the encrypted one
params = cryptoModule.getDecryptingInputStream(params);
if (params.getPlaintextInputStream() instanceof DataInputStream) {
decryptingInput = (DataInputStream) params.getPlaintextInputStream();
} else {
decryptingInput = new DataInputStream(params.getPlaintextInputStream());
}
} else {
input.seek(0);
byte[] magicV2 = DfsLogger.LOG_FILE_HEADER_V2.getBytes(UTF_8);
byte[] magicBufferV2 = new byte[magicV2.length];
input.readFully(magicBufferV2);
if (Arrays.equals(magicBufferV2, magicV2)) {
// Log files from 1.5 dump their options in raw to the logger files. Since we don't know the class
// that needs to read those files, we can make a couple of basic assumptions. Either it's going to be
// the NullCryptoModule (no crypto) or the DefaultCryptoModule.
// If it's null, we won't have any parameters whatsoever. First, let's attempt to read
// parameters
Map<String, String> opts = new HashMap<>();
int count = input.readInt();
for (int i = 0; i < count; i++) {
String key = input.readUTF();
String value = input.readUTF();
opts.put(key, value);
}
if (opts.size() == 0) {
// NullCryptoModule, we're done
decryptingInput = input;
} else {
// The DefaultCryptoModule will want to read the parameters from the underlying file, so we will put the file back to that spot.
org.apache.accumulo.core.security.crypto.CryptoModule cryptoModule = org.apache.accumulo.core.security.crypto.CryptoModuleFactory.getCryptoModule(DefaultCryptoModule.class.getName());
CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf);
// go back to the beginning, but skip over magicV2 already checked earlier
input.seek(magicV2.length);
params.setEncryptedInputStream(input);
params = cryptoModule.getDecryptingInputStream(params);
if (params.getPlaintextInputStream() instanceof DataInputStream) {
decryptingInput = (DataInputStream) params.getPlaintextInputStream();
} else {
decryptingInput = new DataInputStream(params.getPlaintextInputStream());
}
}
} else {
input.seek(0);
decryptingInput = input;
}
}
} catch (EOFException e) {
log.warn("Got EOFException trying to read WAL header information, assuming the rest of the file has no data.");
// A TabletServer might have died before the (complete) header was written
throw new LogHeaderIncompleteException(e);
}
return new DFSLoggerInputStreams(input, decryptingInput);
}
Aggregations