use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class FSOutputOperatorBenchmark method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration conf) {
String filePath = "HDFSOutputOperatorBenchmarkingApp/" + System.currentTimeMillis();
dag.setAttribute(DAG.STREAMING_WINDOW_SIZE_MILLIS, 1000);
RandomWordGenerator wordGenerator = dag.addOperator("wordGenerator", RandomWordGenerator.class);
dag.getOperatorMeta("wordGenerator").getMeta(wordGenerator.output).getAttributes().put(PortContext.QUEUE_CAPACITY, 10000);
dag.getOperatorMeta("wordGenerator").getAttributes().put(OperatorContext.APPLICATION_WINDOW_COUNT, 1);
FSByteOutputOperator hdfsOutputOperator = dag.addOperator("hdfsOutputOperator", new FSByteOutputOperator());
hdfsOutputOperator.setFilePath(filePath);
dag.getOperatorMeta("hdfsOutputOperator").getAttributes().put(OperatorContext.COUNTERS_AGGREGATOR, new BasicCounters.LongAggregator<MutableLong>());
dag.addStream("Generator2HDFSOutput", wordGenerator.output, hdfsOutputOperator.input);
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class HiveOperator method setup.
@Override
public void setup(OperatorContext context) {
try {
fs = getHDFSInstance();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
this.context = context;
lastTimeStamp = System.currentTimeMillis();
fileCounters.setCounter(Counters.TOTAL_BYTES_WRITTEN, new MutableLong());
fileCounters.setCounter(Counters.TOTAL_TIME_ELAPSED, new MutableLong());
super.setup(context);
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractSingleFileOutputOperatorTest method checkpoint.
private CheckPointOutputOperator checkpoint(AbstractSingleFileOutputOperator<Integer> writer) {
CheckPointOutputOperator checkPointWriter = new CheckPointOutputOperator();
checkPointWriter.counts = Maps.newHashMap();
for (String keys : writer.counts.keySet()) {
checkPointWriter.counts.put(keys, new MutableLong(writer.counts.get(keys).longValue()));
}
checkPointWriter.endOffsets = Maps.newHashMap();
for (String keys : writer.endOffsets.keySet()) {
checkPointWriter.endOffsets.put(keys, new MutableLong(writer.endOffsets.get(keys).longValue()));
}
checkPointWriter.openPart = Maps.newHashMap();
for (String keys : writer.openPart.keySet()) {
checkPointWriter.openPart.put(keys, new MutableInt(writer.openPart.get(keys).intValue()));
}
checkPointWriter.filePath = writer.filePath;
checkPointWriter.maxOpenFiles = writer.maxOpenFiles;
checkPointWriter.replication = writer.replication;
checkPointWriter.totalBytesWritten = writer.totalBytesWritten;
checkPointWriter.maxLength = writer.maxLength;
checkPointWriter.rollingFile = writer.rollingFile;
checkPointWriter.outputFileName = writer.outputFileName;
return checkPointWriter;
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractBlockReaderTest method testCountersTransfer.
@Test
public void testCountersTransfer() throws Exception {
TestUtils.MockBatchedOperatorStats readerStats = new TestUtils.MockBatchedOperatorStats(2);
readerStats.operatorStats = Lists.newArrayList();
readerStats.operatorStats.add(new ReaderStats(10, 1, 100, 1));
TestReader sliceReader = new TestReader();
sliceReader.processStats(readerStats);
List<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> partitions = Lists.newArrayList();
DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> apartition = new DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>(sliceReader);
TestUtils.MockPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> pseudoParttion = new TestUtils.MockPartition<>(apartition, readerStats);
partitions.add(pseudoParttion);
Collection<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> newPartitions = sliceReader.definePartitions(partitions, null);
List<Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>> newMocks = Lists.newArrayList();
for (Partitioner.Partition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>> partition : newPartitions) {
partition.getPartitionedInstance().counters.setCounter(AbstractBlockReader.ReaderCounterKeys.BLOCKS, new MutableLong(1));
newMocks.add(new TestUtils.MockPartition<>((DefaultPartition<AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream>>) partition, readerStats));
}
sliceReader.partitionCount = 1;
newPartitions = sliceReader.definePartitions(newMocks, null);
Assert.assertEquals(1, newPartitions.size());
AbstractBlockReader<Slice, BlockMetadata.FileBlockMetadata, FSDataInputStream> last = newPartitions.iterator().next().getPartitionedInstance();
Assert.assertEquals("num blocks", 8, last.counters.getCounter(AbstractBlockReader.ReaderCounterKeys.BLOCKS).longValue());
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-core by apache.
the class RecordingsAgent method getRecordingInfoHelper.
private RecordingInfo getRecordingInfoHelper(String appId, String opId, String id, Set<String> containers) {
RecordingInfo info = new RecordingInfo();
info.id = id;
info.appId = appId;
info.operatorId = opId;
BufferedReader br = null;
IndexFileBufferedReader ifbr = null;
try {
String dir = getRecordingDirectory(appId, opId, id);
if (dir == null) {
throw new Exception("recording directory is null");
}
Path path = new Path(dir);
JSONObject json;
FileStatus fileStatus = stramAgent.getFileSystem().getFileStatus(path);
HashMap<String, PortInfo> portMap = new HashMap<>();
if (!fileStatus.isDirectory()) {
throw new Exception(path + " is not a directory");
}
// META file processing
br = new BufferedReader(new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.META_FILE))));
String line;
line = br.readLine();
if (!line.equals("1.2")) {
throw new Exception("Unexpected line: " + line);
}
line = br.readLine();
json = new JSONObject(line);
info.startTime = json.getLong("startTime");
info.containerId = json.optString("containerId");
info.properties = new HashMap<>();
if (!StringUtils.isBlank(info.containerId) && !containers.contains(info.containerId)) {
info.ended = true;
}
json = json.optJSONObject("properties");
if (json != null) {
@SuppressWarnings("unchecked") Iterator<String> keys = json.keys();
while (keys.hasNext()) {
String key = keys.next();
// ugly 2 lines of code below since JSONObject.get(key).toString() doesn't give you json representation for plain strings
String strValue = json.isNull(key) ? null : json.optString(key);
info.properties.put(key, strValue != null ? strValue : new ObjectMapperString(json.get(key).toString()));
}
}
info.ports = new ArrayList<>();
while ((line = br.readLine()) != null) {
PortInfo portInfo = new PortInfo();
json = new JSONObject(line);
portInfo.id = json.getInt("id");
portInfo.name = json.getString("name");
portInfo.type = json.getString("type");
portInfo.streamName = json.getString("streamName");
info.ports.add(portInfo);
portMap.put(String.valueOf(portInfo.id), portInfo);
}
// INDEX file processing
ifbr = new IndexFileBufferedReader(new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.INDEX_FILE))), dir);
info.windowIdRanges = new ArrayList<>();
long prevHiWindowId = -1;
RecordingsIndexLine indexLine;
while ((indexLine = (RecordingsIndexLine) ifbr.readIndexLine()) != null) {
if (indexLine.isEndLine) {
info.ended = true;
} else {
info.totalTuples += indexLine.tupleCount;
for (Map.Entry<String, MutableLong> entry : indexLine.portTupleCount.entrySet()) {
PortInfo portInfo = portMap.get(entry.getKey());
if (portInfo == null) {
throw new Exception("port info does not exist for " + entry.getKey());
}
portInfo.tupleCount += entry.getValue().longValue();
}
for (TupleRecorder.Range r : indexLine.windowIdRanges) {
if (info.windowIdRanges.isEmpty()) {
TupleRecorder.Range range = new TupleRecorder.Range();
range.low = r.low;
info.windowIdRanges.add(range);
} else if (prevHiWindowId + 1 != r.low) {
TupleRecorder.Range range = info.windowIdRanges.get(info.windowIdRanges.size() - 1);
range.high = prevHiWindowId;
range = new TupleRecorder.Range();
range.low = r.low;
info.windowIdRanges.add(range);
}
prevHiWindowId = r.high;
}
}
}
if (!info.windowIdRanges.isEmpty()) {
TupleRecorder.Range range = info.windowIdRanges.get(info.windowIdRanges.size() - 1);
range.high = prevHiWindowId;
}
} catch (Exception ex) {
LOG.warn("Cannot get recording info for app id {}: {}", appId, ex);
return null;
} finally {
IOUtils.closeQuietly(ifbr);
IOUtils.closeQuietly(br);
}
return info;
}
Aggregations