use of org.apache.commons.lang.mutable.MutableLong in project apex-core by apache.
the class RecordingsAgent method parseIndexLine.
@Override
protected RecordingsIndexLine parseIndexLine(String line) throws JSONException {
RecordingsIndexLine info = new RecordingsIndexLine();
if (line.startsWith("E")) {
info.isEndLine = true;
return info;
}
line = line.trim();
info.windowIdRanges = new ArrayList<>();
info.portTupleCount = new HashMap<>();
int cursor = 2;
int cursor2 = line.indexOf(':', cursor);
info.partFile = line.substring(cursor, cursor2);
cursor = cursor2 + 1;
cursor2 = line.indexOf(':', cursor);
String timeRange = line.substring(cursor, cursor2);
String[] tmp = timeRange.split("-");
info.fromTime = Long.valueOf(tmp[0]);
info.toTime = Long.valueOf(tmp[1]);
cursor = cursor2 + 1;
cursor2 = line.indexOf(':', cursor);
if (cursor2 < 0) {
info.tupleCount = Long.valueOf(line.substring(cursor));
return info;
}
info.tupleCount = Long.valueOf(line.substring(cursor, cursor2));
cursor = cursor2 + 1;
if (!line.substring(cursor, cursor + 2).equals("T:")) {
return info;
}
cursor += 2;
cursor2 = line.indexOf(':', cursor);
String windowRangesString = line.substring(cursor, cursor2);
String[] windowRanges = windowRangesString.split(",");
for (String windowRange : windowRanges) {
String[] hilow = windowRange.split("-");
long low = Long.valueOf(hilow[0]);
long hi = Long.valueOf(hilow[1]);
info.windowIdRanges.add(new TupleRecorder.Range(low, hi));
}
cursor = cursor2 + 1;
cursor2 = line.indexOf(':', cursor);
int size = Integer.valueOf(line.substring(cursor, cursor2));
cursor = cursor2 + 1;
cursor2 = cursor + size;
JSONObject json = new JSONObject(line.substring(cursor, cursor2));
Iterator<?> keys = json.keys();
while (keys.hasNext()) {
String portIndex = (String) keys.next();
long tupleCount = json.getLong(portIndex);
if (!info.portTupleCount.containsKey(portIndex)) {
info.portTupleCount.put(portIndex, new MutableLong(tupleCount));
} else {
info.portTupleCount.get(portIndex).add(tupleCount);
}
}
return info;
}
use of org.apache.commons.lang.mutable.MutableLong in project gatk by broadinstitute.
the class Concordance method onTraversalStart.
@Override
public void onTraversalStart() {
Set<VCFHeaderLine> defaultToolHeaderLines = getDefaultToolVCFHeaderLines();
for (final ConcordanceState state : ConcordanceState.values()) {
snpCounts.put(state, new MutableLong(0));
indelCounts.put(state, new MutableLong(0));
}
if (truePositivesAndFalseNegativesVcf != null) {
truePositivesAndFalseNegativesVcfWriter = createVCFWriter(truePositivesAndFalseNegativesVcf);
final VCFHeader truthHeader = getTruthHeader();
truthHeader.addMetaDataLine(TRUTH_STATUS_HEADER_LINE);
defaultToolHeaderLines.forEach(truthHeader::addMetaDataLine);
truePositivesAndFalseNegativesVcfWriter.writeHeader(truthHeader);
}
if (truePositivesAndFalsePositivesVcf != null) {
truePositivesAndFalsePositivesVcfWriter = createVCFWriter(truePositivesAndFalsePositivesVcf);
final VCFHeader evalHeader = getEvalHeader();
defaultToolHeaderLines.forEach(evalHeader::addMetaDataLine);
evalHeader.addMetaDataLine(TRUTH_STATUS_HEADER_LINE);
truePositivesAndFalsePositivesVcfWriter.writeHeader(evalHeader);
}
if (filteredTrueNegativesAndFalseNegativesVcf != null) {
filteredTrueNegativesAndFalseNegativesVcfWriter = createVCFWriter(filteredTrueNegativesAndFalseNegativesVcf);
final VCFHeader evalHeader = getEvalHeader();
evalHeader.addMetaDataLine(TRUTH_STATUS_HEADER_LINE);
defaultToolHeaderLines.forEach(evalHeader::addMetaDataLine);
filteredTrueNegativesAndFalseNegativesVcfWriter.writeHeader(evalHeader);
}
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractBlockReaderTest method testCountersTransfer.
@Test
public void testCountersTransfer() throws Exception {
TestUtils.MockBatchedOperatorStats readerStats = new TestUtils.MockBatchedOperatorStats(2);
readerStats.operatorStats = Lists.newArrayList();
readerStats.operatorStats.add(new ReaderStats(10, 1, 100, 1));
TestReader sliceReader = new TestReader();
sliceReader.processStats(readerStats);
List<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> partitions = Lists.newArrayList();
DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> apartition = new DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>(sliceReader);
TestUtils.MockPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> pseudoParttion = new TestUtils.MockPartition<>(apartition, readerStats);
partitions.add(pseudoParttion);
Collection<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> newPartitions = sliceReader.definePartitions(partitions, null);
List<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> newMocks = Lists.newArrayList();
for (Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> partition : newPartitions) {
partition.getPartitionedInstance().counters.setCounter(AbstractBlockReader.ReaderCounterKeys.BLOCKS, new MutableLong(1));
newMocks.add(new TestUtils.MockPartition<>((DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>) partition, readerStats));
}
sliceReader.partitionCount = 1;
newPartitions = sliceReader.definePartitions(newMocks, null);
Assert.assertEquals(1, newPartitions.size());
AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream> last = newPartitions.iterator().next().getPartitionedInstance();
Assert.assertEquals("num blocks", 8, last.counters.getCounter(AbstractBlockReader.ReaderCounterKeys.BLOCKS).longValue());
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractFileOutputOperator method createCacheLoader.
/**
* Creates the {@link CacheLoader} for loading an output stream when it is not present in the cache.
* @return cache loader
*/
private CacheLoader<String, FSFilterStreamContext> createCacheLoader() {
return new CacheLoader<String, FSFilterStreamContext>() {
@Override
public FSFilterStreamContext load(@Nonnull String filename) {
if (rollingFile) {
RotationState state = getRotationState(filename);
if (rollingFile && state.rotated) {
openPart.get(filename).add(1);
state.rotated = false;
MutableLong offset = endOffsets.get(filename);
offset.setValue(0);
}
}
String partFileName = getPartFileNamePri(filename);
Path originalFilePath = new Path(filePath + Path.SEPARATOR + partFileName);
Path activeFilePath;
if (!alwaysWriteToTmp) {
activeFilePath = originalFilePath;
} else {
// MLHR-1776 : writing to tmp file
String tmpFileName = fileNameToTmpName.get(partFileName);
if (tmpFileName == null) {
tmpFileName = partFileName + '.' + System.currentTimeMillis() + TMP_EXTENSION;
fileNameToTmpName.put(partFileName, tmpFileName);
}
activeFilePath = new Path(filePath + Path.SEPARATOR + tmpFileName);
}
FSDataOutputStream fsOutput;
boolean sawThisFileBefore = endOffsets.containsKey(filename);
try {
if (fs.exists(originalFilePath) || (alwaysWriteToTmp && fs.exists(activeFilePath))) {
if (sawThisFileBefore) {
FileStatus fileStatus = fs.getFileStatus(activeFilePath);
MutableLong endOffset = endOffsets.get(filename);
if (endOffset != null) {
endOffset.setValue(fileStatus.getLen());
} else {
endOffsets.put(filename, new MutableLong(fileStatus.getLen()));
}
fsOutput = openStream(activeFilePath, true);
LOG.debug("appending to {}", activeFilePath);
} else {
// If the file is rolling we need to delete all its parts.
if (rollingFile) {
int part = 0;
while (true) {
Path seenPartFilePath = new Path(filePath + Path.SEPARATOR + getPartFileName(filename, part));
if (!fs.exists(seenPartFilePath)) {
break;
}
fs.delete(seenPartFilePath, true);
part = part + 1;
}
fsOutput = openStream(activeFilePath, false);
} else {
// Not rolling is easy, just delete the file and create it again.
fs.delete(activeFilePath, true);
if (alwaysWriteToTmp) {
// we need to delete original file if that exists
if (fs.exists(originalFilePath)) {
fs.delete(originalFilePath, true);
}
}
fsOutput = openStream(activeFilePath, false);
}
}
} else {
fsOutput = openStream(activeFilePath, false);
}
filesWithOpenStreams.add(filename);
LOG.info("opened {}, active {}", partFileName, activeFilePath);
return new FSFilterStreamContext(fsOutput);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class FileSplitter method setup.
@Override
public void setup(Context.OperatorContext context) {
Preconditions.checkArgument(!scanner.files.isEmpty(), "empty files");
Preconditions.checkArgument(blockSize == null || blockSize > 0, "invalid block size");
operatorId = context.getId();
this.context = context;
fileCounters.setCounter(Counters.PROCESSED_FILES, new MutableLong());
windowDataManager.setup(context);
try {
fs = scanner.getFSInstance();
} catch (IOException e) {
throw new RuntimeException("creating fs", e);
}
if (blockSize == null) {
blockSize = fs.getDefaultBlockSize(new Path(scanner.files.iterator().next()));
}
if (context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID) < windowDataManager.getLargestCompletedWindow()) {
blockMetadataIterator = null;
} else {
// don't setup scanner while recovery
scanner.setup(context);
}
}
Aggregations