use of java.io.FilterInputStream in project voldemort by voldemort.
the class BasicFetchStrategy method copyFileWithCheckSum.
/**
* Function to copy a file from the given filesystem with a checksum of type
* 'checkSumType' computed and returned. In case an error occurs during such
* a copy, we do a retry for a maximum of NUM_RETRIES
*
* @param source
* Source path of the file to copy
* @param dest
* Destination path of the file on the local machine
* @param checkSumType
* Type of the Checksum to be computed for this file
* @return A Checksum (generator) of type checkSumType which contains the
* computed checksum of the copied file
* @throws IOException
*/
private byte[] copyFileWithCheckSum(HdfsFile source, File dest, CheckSumType checkSumType) throws IOException {
byte[] checkSum = null;
CheckSum bufferCheckSumGenerator = null;
logger.debug("Starting copy of " + source + " to " + dest);
// Check if its Gzip compressed
boolean isCompressed = source.isCompressed();
FilterInputStream input = null;
OutputStream output = null;
long startTimeMS = System.currentTimeMillis();
int previousAttempt = 0;
for (int attempt = 1; attempt <= fetcher.getMaxAttempts(); attempt++) {
boolean success = false;
long totalBytesRead = 0;
boolean fsOpened = false;
bufferCheckSumGenerator = null;
stats.singleFileFetchStart(attempt != 1);
try {
// Create a per file checksum generator
if (checkSumType != null) {
bufferCheckSumGenerator = CheckSum.getInstance(checkSumType);
}
logger.info("Starting attempt #" + attempt + "/" + fetcher.getMaxAttempts() + " to fetch remote file: " + source + " to local destination: " + dest);
input = new ThrottledInputStream(fs.open(source.getPath()), fetcher.getThrottler(), stats);
if (isCompressed) {
// We are already bounded by the "hdfs.fetcher.buffer.size"
// specified in the Voldemort config, the default value of
// which is 64K. Using the same as the buffer size for
// GZIPInputStream as well.
input = new GZIPInputStream(input, this.bufferSize);
}
fsOpened = true;
output = new BufferedOutputStream(new FileOutputStream(dest));
int read;
while (true) {
if (status != null && status.hasException()) {
Exception ex = status.getException();
if (ex instanceof AsyncOperationStoppedException) {
// Then stop() has been called, so let's bubble up the exception
throw (AsyncOperationStoppedException) ex;
}
}
read = input.read(buffer);
if (read < 0) {
break;
} else {
output.write(buffer, 0, read);
}
// Update the per file checksum
if (bufferCheckSumGenerator != null) {
bufferCheckSumGenerator.update(buffer, 0, read);
}
stats.recordBytesWritten(read);
totalBytesRead += read;
boolean reportIntervalPassed = stats.getBytesTransferredSinceLastReport() > fetcher.getReportingIntervalBytes();
if (attempt != previousAttempt || reportIntervalPassed) {
previousAttempt = attempt;
NumberFormat format = NumberFormat.getNumberInstance();
format.setMaximumFractionDigits(2);
String message = stats.getTotalBytesTransferred() / (1024 * 1024) + " MB copied at " + format.format(stats.getBytesTransferredPerSecond() / (1024 * 1024)) + " MB/sec" + ", " + format.format(stats.getPercentCopied()) + " % complete" + ", attempt: #" + attempt + "/" + fetcher.getMaxAttempts() + ", current file: " + dest.getName();
if (this.status == null) {
// This is to accommodate tests and the old ReadOnlyStoreManagementServlet code path
// FIXME: Delete this when we get rid of the old code which does not use status
logger.info(message);
} else {
this.status.setStatus(message);
// status.toString() is more detailed than just the message. We print the whole
// thing so that server-side logs are very similar to client (BnP) -side logs.
logger.info(this.status.toString());
}
if (reportIntervalPassed) {
stats.reset();
}
}
}
if (bufferCheckSumGenerator != null) {
checkSum = bufferCheckSumGenerator.getCheckSum();
}
stats.reportFileDownloaded(dest, startTimeMS, source.getSize(), System.currentTimeMillis() - startTimeMS, attempt, totalBytesRead, checkSum);
logger.info("Completed copy of " + source + " to " + dest);
success = true;
} catch (IOException e) {
if (!fsOpened) {
logger.error("Error while opening the file stream to " + source, e);
} else {
logger.error("Error while copying file " + source + " after " + totalBytesRead + " bytes.", e);
}
if (e.getCause() != null) {
logger.error("Cause of error ", e.getCause());
}
if (attempt < fetcher.getMaxAttempts()) {
logger.info("Will retry copying after " + fetcher.getRetryDelayMs() + " ms");
sleepForRetryDelayMs();
} else {
stats.reportFileError(dest, fetcher.getMaxAttempts(), startTimeMS, e);
logger.info("Fetcher giving up copy after " + fetcher.getMaxAttempts() + " attempts");
throw e;
}
} finally {
stats.singleFileFetchEnd();
IOUtils.closeQuietly(output);
IOUtils.closeQuietly(input);
if (success) {
break;
}
}
}
//second time checksum validation. Check if the local file is consistent with the buffer
if (bufferCheckSumGenerator != null) {
CheckSum fileCheckSumGenerator = CheckSum.getInstance(checkSumType);
BufferedInputStream in = new BufferedInputStream(new FileInputStream(dest));
int read;
try {
while ((read = in.read(buffer)) >= 0) {
fileCheckSumGenerator.update(buffer, 0, read);
}
if (ByteUtils.compare(fileCheckSumGenerator.getCheckSum(), checkSum) != 0)
throw new VoldemortException("Local file: " + dest.getAbsolutePath() + " checksum (" + ByteUtils.toHexString(fileCheckSumGenerator.getCheckSum()) + ") does not match with the checksum in the buffer (" + ByteUtils.toHexString(fileCheckSumGenerator.getCheckSum()) + ")");
} finally {
IOUtils.closeQuietly(in);
}
}
return checkSum;
}
use of java.io.FilterInputStream in project jackrabbit by apache.
the class ValueHelper method deserialize.
/**
* Deserializes the string data read from the given reader to a
* <code>Value</code> of the given type.
*
* @param reader reader for the string data to be deserialized
* @param type type of value
* @param decodeBlanks if <code>true</code> <code>"_x0020_"</code>
* character sequences will be decoded to single space
* characters each.
* @param factory ValueFactory used to build the <code>Value</code> object.
* @return the deserialized <code>Value</code>
* @throws IOException if an i/o error occured during the
* serialization
* @throws ValueFormatException if the string data is not of the required
* format
* @throws RepositoryException if an error occured during the
* deserialization.
*/
public static Value deserialize(Reader reader, int type, boolean decodeBlanks, ValueFactory factory) throws IOException, ValueFormatException, RepositoryException {
if (type == PropertyType.BINARY) {
// base64 encoded binary value;
// the encodeBlanks flag can be ignored since base64-encoded
// data cannot contain encoded space characters
// decode to temp file
TransientFileFactory fileFactory = TransientFileFactory.getInstance();
final File tmpFile = fileFactory.createTransientFile("bin", null, null);
OutputStream out = new BufferedOutputStream(new FileOutputStream(tmpFile));
try {
Base64.decode(reader, out);
} finally {
out.close();
}
// pass InputStream wrapper to ValueFactory, that creates a BinaryValue.
return factory.createValue(new FilterInputStream(new FileInputStream(tmpFile)) {
public void close() throws IOException {
in.close();
// temp file can now safely be removed
tmpFile.delete();
}
});
/*
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Base64.decode(reader, baos);
// no need to close ByteArrayOutputStream
//baos.close();
return new BinaryValue(baos.toByteArray());
*/
} else {
char[] chunk = new char[8192];
int read;
StringBuilder buf = new StringBuilder();
while ((read = reader.read(chunk)) > -1) {
buf.append(chunk, 0, read);
}
String value = buf.toString();
if (decodeBlanks) {
// decode encoded blanks in value
value = Text.replace(value, "_x0020_", " ");
}
return convert(value, type, factory);
}
}
use of java.io.FilterInputStream in project jdk8u_jdk by JetBrains.
the class GZIPInputStream method readTrailer.
/*
* Reads GZIP member trailer and returns true if the eos
* reached, false if there are more (concatenated gzip
* data set)
*/
private boolean readTrailer() throws IOException {
InputStream in = this.in;
int n = inf.getRemaining();
if (n > 0) {
in = new SequenceInputStream(new ByteArrayInputStream(buf, len - n, n), new FilterInputStream(in) {
public void close() throws IOException {
}
});
}
// Uses left-to-right evaluation order
if ((readUInt(in) != crc.getValue()) || // rfc1952; ISIZE is the input size modulo 2^32
(readUInt(in) != (inf.getBytesWritten() & 0xffffffffL)))
throw new ZipException("Corrupt GZIP trailer");
// try concatenated case
if (this.in.available() > 0 || n > 26) {
// this.trailer
int m = 8;
try {
// next.header
m += readHeader(in);
} catch (IOException ze) {
// ignore any malformed, do nothing
return true;
}
inf.reset();
if (n > m)
inf.setInput(buf, len - n + m, n - m);
return false;
}
return true;
}
use of java.io.FilterInputStream in project tdi-studio-se by Talend.
the class JavaProcessor method generateWSDL.
private void generateWSDL(IFolder wsdlsPackageFolder, INode node) throws CoreException, IOException {
// retrieve WSDL content (compressed-n-encoded) -> zip-content.-> wsdls.(first named main.wsdl)
String wsdlContent = node.getElementParameter("WSDL_CONTENT").getValue().toString();
// String wsdlContent = (String) node.getPropertyValue("WSDL_CONTENT"); //$NON-NLS-1$
String uniqueName = node.getUniqueName();
if (null != uniqueName && null != wsdlContent && !wsdlContent.trim().isEmpty()) {
// configure decoding and uncompressing
InputStream wsdlStream = new BufferedInputStream(new InflaterInputStream(new Base64InputStream(new ByteArrayInputStream(wsdlContent.getBytes()))));
if (!wsdlsPackageFolder.exists()) {
wsdlsPackageFolder.create(true, true, null);
}
// generate WSDL file
if (checkIsZipStream(wsdlStream)) {
ZipInputStream zipIn = new ZipInputStream(wsdlStream);
ZipEntry zipEntry = null;
while ((zipEntry = zipIn.getNextEntry()) != null) {
String outputName = zipEntry.getName();
if ("main.wsdl".equals(outputName)) {
//$NON-NLS-1$
//$NON-NLS-1$
outputName = uniqueName + ".wsdl";
}
IFile wsdlFile = wsdlsPackageFolder.getFile(outputName);
if (!wsdlFile.exists()) {
// cause create file will do a close. add a warp to ignore close.
InputStream unCloseIn = new FilterInputStream(zipIn) {
@Override
public void close() throws IOException {
}
;
};
wsdlFile.create(unCloseIn, true, null);
}
zipIn.closeEntry();
}
zipIn.close();
} else {
//$NON-NLS-1$
IFile wsdlFile = wsdlsPackageFolder.getFile(uniqueName + ".wsdl");
wsdlFile.create(wsdlStream, true, null);
}
}
}
use of java.io.FilterInputStream in project poi by apache.
the class DocumentFactoryHelper method getDecryptedStream.
/**
* Wrap the OLE2 data in the NPOIFSFileSystem into a decrypted stream by using
* the given password.
*
* @param fs The OLE2 stream for the document
* @param password The password, null if the default password should be used
* @return A stream for reading the decrypted data
* @throws IOException If an error occurs while decrypting or if the password does not match
*/
public static InputStream getDecryptedStream(final NPOIFSFileSystem fs, String password) throws IOException {
EncryptionInfo info = new EncryptionInfo(fs);
Decryptor d = Decryptor.getInstance(info);
try {
boolean passwordCorrect = false;
if (password != null && d.verifyPassword(password)) {
passwordCorrect = true;
}
if (!passwordCorrect && d.verifyPassword(Decryptor.DEFAULT_PASSWORD)) {
passwordCorrect = true;
}
if (passwordCorrect) {
// as well when the resulting OPCPackage is closed
return new FilterInputStream(d.getDataStream(fs.getRoot())) {
@Override
public void close() throws IOException {
fs.close();
super.close();
}
};
} else {
if (password != null)
throw new EncryptedDocumentException("Password incorrect");
else
throw new EncryptedDocumentException("The supplied spreadsheet is protected, but no password was supplied");
}
} catch (GeneralSecurityException e) {
throw new IOException(e);
}
}
Aggregations