use of org.apache.commons.io.input.CountingInputStream in project disunity by ata4.
the class BundleReader method read.
public Bundle read() throws BundleException, IOException {
bundle = new Bundle();
in.position(0);
BundleHeader header = bundle.header();
in.readStruct(header);
// check signature
if (!header.hasValidSignature()) {
throw new BundleException("Invalid signature");
}
List<BundleEntryInfo> entryInfos = bundle.entryInfos();
if (header.compressedDataHeaderSize() > 0) {
if (header.dataHeaderAtEndOfFile()) {
in.position(header.completeFileSize() - header.compressedDataHeaderSize());
}
// build an input stream for the uncompressed data header
InputStream headerIn = new BoundedInputStream(in.stream(), header.compressedDataHeaderSize());
DataReader inData;
switch(header.dataHeaderCompressionScheme()) {
default:
case 0:
// Not compressed
inData = DataReaders.forInputStream(headerIn);
case 1:
// LZMA
inData = DataReaders.forInputStream(new CountingInputStream(new LzmaInputStream(headerIn)));
case 3:
// LZ4
byte[] compressed = new byte[header.compressedDataHeaderSize()];
byte[] decompressed = new byte[(int) header.dataHeaderSize()];
headerIn.read(compressed);
LZ4JavaSafeFastDecompressor.INSTANCE.decompress(compressed, decompressed);
inData = DataReaders.forByteBuffer(ByteBuffer.wrap(decompressed));
}
// Block info: not captured for now
{
// 16 bytes unknown
byte[] unknown = new byte[16];
inData.readBytes(unknown);
int storageBlocks = inData.readInt();
for (int i = 0; i < storageBlocks; ++i) {
inData.readUnsignedInt();
inData.readUnsignedInt();
inData.readUnsignedShort();
}
}
int files = inData.readInt();
for (int i = 0; i < files; i++) {
BundleEntryInfo entryInfo = new BundleEntryInfoFS();
inData.readStruct(entryInfo);
entryInfos.add(entryInfo);
}
} else {
// raw or web header
long dataHeaderSize = header.dataHeaderSize();
if (dataHeaderSize == 0) {
// old stream versions don't store the data header size, so use a large
// fixed number instead
dataHeaderSize = 4096;
}
InputStream is = dataInputStream(0, dataHeaderSize);
DataReader inData = DataReaders.forInputStream(is);
int files = inData.readInt();
for (int i = 0; i < files; i++) {
BundleEntryInfo entryInfo = new BundleEntryInfo();
inData.readStruct(entryInfo);
entryInfos.add(entryInfo);
}
}
// sort entries by offset so that they're in the order in which they
// appear in the file, which is convenient for compressed bundles
entryInfos.sort((a, b) -> Long.compare(a.offset(), b.offset()));
List<BundleEntry> entries = bundle.entries();
entryInfos.forEach(entryInfo -> {
entries.add(new BundleInternalEntry(entryInfo, this::inputStreamForEntry));
});
return bundle;
}
use of org.apache.commons.io.input.CountingInputStream in project knime-core by knime.
the class ReadTableNodeModel method extractTable.
/**
* @param exec
* @return
* @throws IOException
* @throws InvalidSettingsException
*/
private ContainerTable extractTable(final ExecutionContext exec) throws IOException, InvalidSettingsException {
try (InputStream inputStream = openInputStream()) {
// possibly re-assigned
InputStream in = inputStream;
long sizeInBytes;
String loc = m_fileName.getStringValue();
try {
try {
URL url = new URL(loc);
sizeInBytes = FileUtil.getFileFromURL(url).length();
} catch (MalformedURLException mue) {
File file = new File(loc);
if (file.exists()) {
sizeInBytes = file.length();
} else {
sizeInBytes = 0L;
}
}
} catch (Exception e) {
// ignore, no progress
sizeInBytes = 0L;
}
final long sizeFinal = sizeInBytes;
if (sizeFinal > 0) {
CountingInputStream bcs = new CountingInputStream(in) {
@Override
protected synchronized void afterRead(final int n) {
super.afterRead(n);
final long byteCount = getByteCount();
exec.setProgress((double) byteCount / sizeFinal, () -> FileUtils.byteCountToDisplaySize(byteCount));
try {
exec.checkCanceled();
} catch (CanceledExecutionException e) {
throw new RuntimeException("canceled");
}
}
};
in = bcs;
}
return DataContainer.readFromStream(in);
} finally {
exec.setProgress(1.0);
}
}
use of org.apache.commons.io.input.CountingInputStream in project lavaplayer by sedmelluq.
the class RemoteNodeProcessor method handleResponseBody.
private boolean handleResponseBody(InputStream inputStream, TickBuilder tickBuilder) {
CountingInputStream countingStream = new CountingInputStream(inputStream);
DataInputStream input = new DataInputStream(countingStream);
RemoteMessage message;
try {
while ((message = mapper.decode(input)) != null) {
if (message instanceof TrackStartResponseMessage) {
handleTrackStartResponse((TrackStartResponseMessage) message);
} else if (message instanceof TrackFrameDataMessage) {
handleTrackFrameData((TrackFrameDataMessage) message);
} else if (message instanceof TrackExceptionMessage) {
handleTrackException((TrackExceptionMessage) message);
} else if (message instanceof NodeStatisticsMessage) {
handleNodeStatistics((NodeStatisticsMessage) message);
}
}
} catch (InterruptedException interruption) {
log.error("Node {} processing thread was interrupted.", nodeAddress);
Thread.currentThread().interrupt();
return false;
} catch (Throwable e) {
log.error("Error when processing response from node {}.", nodeAddress, e);
ExceptionTools.rethrowErrors(e);
} finally {
tickBuilder.responseSize = countingStream.getCount();
}
return true;
}
use of org.apache.commons.io.input.CountingInputStream in project POL-POM-5 by PhoenicisOrg.
the class PEReader method parseExecutable.
public PEFile parseExecutable(InputStream inputStream) throws IOException {
try (CountingInputStream executableInputStream = new CountingInputStream(inputStream)) {
final ImageDOSHeader imageDOSHeader = readDosHeader(executableInputStream);
final byte[] realModeStubProgram = readRealModeStubProgram(executableInputStream, imageDOSHeader);
final ImageNTHeaders imageNTHeaders = readImageNTHeaders(executableInputStream);
final SectionHeader[] sectionHeaders = readSectionHeaders(executableInputStream, imageNTHeaders);
final RsrcSection resourceSection = readResourceSection(executableInputStream, sectionHeaders);
return new PEFile(imageDOSHeader, realModeStubProgram, imageNTHeaders, sectionHeaders, resourceSection);
}
}
use of org.apache.commons.io.input.CountingInputStream in project indy by Commonjava.
the class TransferCountingInputStream method close.
@Override
public void close() throws IOException {
long start = System.nanoTime();
try {
CountingInputStream stream = (CountingInputStream) this.in;
Logger logger = LoggerFactory.getLogger(getClass());
size = stream.getByteCount();
logger.trace("Reads: {} bytes", size);
long end = System.nanoTime();
double elapsed = (end - start) / NANOS_PER_SEC;
TraceManager.getActiveSpan().ifPresent(s -> s.setInProgressField(LATENCY_TIMER_PAUSE_KEY, s.getInProgressField(LATENCY_TIMER_PAUSE_KEY, 0.0) + (end - start)));
if (metricsConfig != null && metricsManager != null) {
String name = getName(metricsConfig.getNodePrefix(), TRANSFER_UPLOAD_METRIC_NAME, getDefaultName(TransferCountingInputStream.class, "read"), METER);
Meter meter = metricsManager.getMeter(name);
meter.mark(Math.round(stream.getByteCount() / elapsed));
}
double kbCount = (double) size / 1024;
long speed = Math.round(kbCount / elapsed);
addFieldToActiveSpan(READ_SIZE, kbCount);
addFieldToActiveSpan(READ_SPEED, speed);
} finally {
super.close();
}
}
Aggregations