use of java.io.FilterOutputStream in project candlepin by candlepin.
the class CrlFileUtil method updateCRLFile.
/**
* Updates the specified CRL file by adding or removing entries. If both lists are either null
* or empty, the CRL file will not be modified by this method. If the file does not exist or
* appears to be empty, it will be initialized before processing the lists.
*
* @param file
* The CRL file to update
*
* @param revoke
* A collection of serials to revoke (add)
*
* @param unrevoke
* A collection of serials to unrevoke (remove)
*
* @throws IOException
* if an IO error occurs while updating the CRL file
*/
public void updateCRLFile(File file, final Collection<BigInteger> revoke, final Collection<BigInteger> unrevoke) throws IOException {
if (!file.exists() || file.length() == 0) {
this.initializeCRLFile(file, revoke);
return;
}
File strippedFile = stripCRLFile(file);
InputStream input = null;
InputStream reaper = null;
BufferedOutputStream output = null;
OutputStream filter = null;
OutputStream encoder = null;
try {
// Impl note:
// Due to the way the X509CRLStreamWriter works (and the DER format in general), we have
// to make two passes through the file.
input = new Base64InputStream(new FileInputStream(strippedFile));
reaper = new Base64InputStream(new FileInputStream(strippedFile));
// Note: This will break if we ever stop using RSA keys
PrivateKey key = this.certificateReader.getCaKey();
X509CRLStreamWriter writer = new X509CRLStreamWriter(input, (RSAPrivateKey) key, this.certificateReader.getCACert());
// Add new entries
if (revoke != null) {
Date now = new Date();
for (BigInteger serial : revoke) {
writer.add(serial, now, CRLReason.privilegeWithdrawn);
}
}
// or we could miss cases where we have entries to remove, but nothing to add.
if (unrevoke != null && !unrevoke.isEmpty()) {
writer.preScan(reaper, new CRLEntryValidator() {
public boolean shouldDelete(CRLEntry entry) {
BigInteger certSerial = entry.getUserCertificate().getValue();
return unrevoke.contains(certSerial);
}
});
} else {
writer.preScan(reaper);
}
writer.setSigningAlgorithm(PKIUtility.SIGNATURE_ALGO);
// Verify we actually have work to do now
if (writer.hasChangesQueued()) {
output = new BufferedOutputStream(new FileOutputStream(file));
filter = new FilterOutputStream(output) {
private boolean needsLineBreak = true;
public void write(int b) throws IOException {
this.needsLineBreak = (b != (byte) '\n');
super.write(b);
}
public void write(byte[] buffer) throws IOException {
this.needsLineBreak = (buffer[buffer.length - 1] != (byte) '\n');
super.write(buffer);
}
public void write(byte[] buffer, int off, int len) throws IOException {
this.needsLineBreak = (buffer[off + len - 1] != (byte) '\n');
super.write(buffer, off, len);
}
public void close() throws IOException {
if (this.needsLineBreak) {
super.write((int) '\n');
this.needsLineBreak = false;
}
// Impl note:
// We're intentionally not propagating the call here.
}
};
encoder = new Base64OutputStream(filter, true, 76, new byte[] { (byte) '\n' });
output.write("-----BEGIN X509 CRL-----\n".getBytes());
writer.lock();
writer.write(encoder);
encoder.close();
filter.close();
output.write("-----END X509 CRL-----\n".getBytes());
output.close();
}
} catch (GeneralSecurityException e) {
// This should never actually happen
log.error("Unexpected security error occurred while retrieving CA key", e);
} catch (CryptoException e) {
// Something went horribly wrong with the stream writer
log.error("Unexpected error occurred while writing new CRL file", e);
} finally {
for (Closeable stream : Arrays.asList(encoder, output, reaper, input)) {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
log.error("Unexpected exception occurred while closing stream: {}", stream, e);
}
}
}
if (!strippedFile.delete()) {
log.error("Unable to delete temporary CRL file: {}", strippedFile);
}
}
}
use of java.io.FilterOutputStream in project wikidata-query-rdf by wikimedia.
the class FileStreamDumper method wrap.
@Override
public InputStream wrap(InputStream inputStream) {
if (inputStream == null)
return null;
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
OutputStream tee = new FilterOutputStream(buffer) {
@Override
public void close() throws IOException {
synchronized (FileStreamDumper.this) {
dumpOutput.write(buffer.toByteArray());
}
super.close();
}
};
return new TeeInputStream(inputStream, tee, true);
}
use of java.io.FilterOutputStream in project j2objc by google.
the class OldFilterOutputStreamTest method test_writeI.
public void test_writeI() throws IOException {
Support_OutputStream sos = new Support_OutputStream(1);
os = new FilterOutputStream(sos);
os.write(42);
bis = new ByteArrayInputStream(sos.toByteArray());
assertTrue("Test 1: Byte has not been written.", bis.available() == 1);
assertEquals("Test 2: Incorrect byte written or read;", 42, bis.read());
try {
// Support_OutputStream throws an IOException if the internal
// buffer is full, which it should be now.
os.write(42);
fail("Test 2: IOException expected.");
} catch (IOException e) {
// Expected.
}
}
use of java.io.FilterOutputStream in project j2objc by google.
the class OldFilterOutputStreamTest method test_write$BII_Exception.
public void test_write$BII_Exception() throws IOException {
Support_OutputStream sos = new Support_OutputStream(testLength);
os = new FilterOutputStream(sos);
byte[] buf = new byte[10];
try {
os.write(buf, -1, 1);
fail("IndexOutOfBoundsException expected.");
} catch (IndexOutOfBoundsException e) {
// Expected.
}
try {
os.write(buf, 0, -1);
fail("IndexOutOfBoundsException expected.");
} catch (IndexOutOfBoundsException e) {
// Expected.
}
try {
os.write(buf, 10, 1);
fail("IndexOutOfBoundsException expected.");
} catch (IndexOutOfBoundsException e) {
// Expected.
}
}
use of java.io.FilterOutputStream in project apex-malhar by apache.
the class AbstractFileOutputOperatorTest method testChainFilters.
@Test
public void testChainFilters() throws NoSuchAlgorithmException, IOException {
EvenOddHDFSExactlyOnceWriter writer = new EvenOddHDFSExactlyOnceWriter();
KeyGenerator keygen = KeyGenerator.getInstance("AES");
keygen.init(128);
final SecretKey secretKey = keygen.generateKey();
byte[] iv = "TestParam16bytes".getBytes();
final IvParameterSpec ivps = new IvParameterSpec(iv);
FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream> chainStreamProvider = new FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream>();
chainStreamProvider.addStreamProvider(new FilterStreamCodec.GZipFilterStreamProvider());
// The filter is to keep track of the offsets to handle multi member gzip issue with openjdk
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4691425
final CounterFilterStreamContext evenCounterContext = new CounterFilterStreamContext();
final CounterFilterStreamContext oddCounterContext = new CounterFilterStreamContext();
chainStreamProvider.addStreamProvider(new FilterStreamProvider.SimpleFilterReusableStreamProvider<CounterFilterOutputStream, OutputStream>() {
@Override
protected FilterStreamContext<CounterFilterOutputStream> createFilterStreamContext(OutputStream outputStream) throws IOException {
if (evenCounterContext.isDoInit()) {
evenCounterContext.init(outputStream);
return evenCounterContext;
} else {
oddCounterContext.init(outputStream);
return oddCounterContext;
}
}
});
chainStreamProvider.addStreamProvider(new FilterStreamProvider.SimpleFilterReusableStreamProvider<CipherOutputStream, OutputStream>() {
@Override
protected FilterStreamContext<CipherOutputStream> createFilterStreamContext(OutputStream outputStream) throws IOException {
try {
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
cipher.init(Cipher.ENCRYPT_MODE, secretKey, ivps);
return new FilterStreamCodec.CipherFilterStreamContext(outputStream, cipher);
} catch (Exception e) {
throw new IOException(e);
}
}
});
writer.setFilterStreamProvider(chainStreamProvider);
File evenFile = new File(testMeta.getDir(), EVEN_FILE);
File oddFile = new File(testMeta.getDir(), ODD_FILE);
List<Long> evenOffsets = new ArrayList<Long>();
List<Long> oddOffsets = new ArrayList<Long>();
writer.setFilePath(testMeta.getDir());
writer.setAlwaysWriteToTmp(false);
writer.setup(testMeta.testOperatorContext);
for (int i = 0; i < 10; ++i) {
writer.beginWindow(i);
for (int j = 0; j < 1000; ++j) {
writer.input.put(i);
}
writer.endWindow();
if ((i % 2) == 1) {
writer.beforeCheckpoint(i);
evenOffsets.add(evenCounterContext.getCounter());
oddOffsets.add(oddCounterContext.getCounter());
}
}
writer.teardown();
/*
evenOffsets.add(evenFile.length());
oddOffsets.add(oddFile.length());
*/
checkCompressedFile(evenFile, evenOffsets, 0, 5, 1000, secretKey, iv);
checkCompressedFile(oddFile, oddOffsets, 1, 5, 1000, secretKey, iv);
}
Aggregations