use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractJMSInputOperator method setup.
@Override
public void setup(OperatorContext context) {
this.context = context;
spinMillis = context.getValue(OperatorContext.SPIN_MILLIS);
counters.setCounter(CounterKeys.RECEIVED, new MutableLong());
counters.setCounter(CounterKeys.REDELIVERED, new MutableLong());
windowDataManager.setup(context);
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class BlockWriter method addCounters.
/**
* Transfers the counters in partitioning.
*
* @param target
* target counter
* @param source
* removed counter
*/
protected void addCounters(BasicCounters<MutableLong> target, BasicCounters<MutableLong> source) {
for (Enum<BlockWriter.Counters> key : BlockWriter.Counters.values()) {
MutableLong tcounter = target.getCounter(key);
if (tcounter == null) {
tcounter = new MutableLong();
target.setCounter(key, tcounter);
}
MutableLong scounter = source.getCounter(key);
if (scounter != null) {
tcounter.add(scounter.longValue());
}
}
}
use of org.apache.commons.lang.mutable.MutableLong in project knime-core by knime.
the class MedianTable method sortOnDisk.
/**
* Sorts the data on the disk, it moves the missing values to the end.
*
* @param context An {@link ExecutionContext}.
* @param k The indices to read from the different columns
* (first dim: length 2 (above & below median indices), second dim: columns)
* @throws CanceledExecutionException Execution was cancelled.
*/
private void sortOnDisk(final ExecutionContext context, final long[][] k) throws CanceledExecutionException {
final SortingDescription[] sorting = new SortingDescription[m_indices.length];
final DataTableSpec spec = m_table.getSpec();
for (int i = 0; i < m_indices.length; i++) {
final DataColumnSpec columnSpec = spec.getColumnSpec(m_indices[i]);
final DataValueComparator comparator = columnSpec.getType().getComparator();
sorting[i] = new SortingDescription(columnSpec.getName()) {
@Override
public int compare(final DataRow o1, final DataRow o2) {
// Move missing values to the end.
final DataCell c1 = o1.getCell(0);
final DataCell c2 = o2.getCell(0);
if (c1.isMissing()) {
return c2.isMissing() ? 0 : 1;
}
if (c2.isMissing()) {
return -1;
}
return comparator.compare(c1, c2);
}
};
}
final ColumnBufferedDataTableSorter tableSorter;
try {
tableSorter = new ColumnBufferedDataTableSorter(m_table.getSpec(), m_table.size(), sorting);
} catch (InvalidSettingsException e) {
throw new IllegalStateException(e);
}
final MutableLong counter = new MutableLong();
final DoubleValue[][] cells = new DoubleValue[2][m_indices.length];
tableSorter.sort(m_table, context, new SortingConsumer() {
@Override
public void consume(final DataRow row) {
for (int kindex = 0; kindex < 2; kindex++) {
for (int i = 0; i < m_indices.length; i++) {
if (counter.longValue() == k[kindex][i]) {
DataCell cell = row.getCell(i);
if (cell instanceof DoubleValue) {
DoubleValue dv = (DoubleValue) cell;
cells[kindex][i] = dv;
} else {
cells[kindex][i] = new DoubleCell(Double.NaN);
}
}
}
}
counter.increment();
}
});
for (int index = m_indices.length; index-- > 0; ) {
if (cells[0][index] == null || cells[1][index] == null) {
// No non-missing rows
m_medians[index] = Double.NaN;
} else {
m_medians[index] = (cells[0][index].getDoubleValue() + cells[1][index].getDoubleValue()) / 2;
}
}
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class HiveOperator method setup.
@Override
public void setup(OperatorContext context) {
try {
fs = getHDFSInstance();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
this.context = context;
lastTimeStamp = System.currentTimeMillis();
fileCounters.setCounter(Counters.TOTAL_BYTES_WRITTEN, new MutableLong());
fileCounters.setCounter(Counters.TOTAL_TIME_ELAPSED, new MutableLong());
super.setup(context);
}
use of org.apache.commons.lang.mutable.MutableLong in project apex-malhar by apache.
the class AbstractSingleFileOutputOperatorTest method checkpoint.
private CheckPointOutputOperator checkpoint(AbstractSingleFileOutputOperator<Integer> writer) {
CheckPointOutputOperator checkPointWriter = new CheckPointOutputOperator();
checkPointWriter.counts = Maps.newHashMap();
for (String keys : writer.counts.keySet()) {
checkPointWriter.counts.put(keys, new MutableLong(writer.counts.get(keys).longValue()));
}
checkPointWriter.endOffsets = Maps.newHashMap();
for (String keys : writer.endOffsets.keySet()) {
checkPointWriter.endOffsets.put(keys, new MutableLong(writer.endOffsets.get(keys).longValue()));
}
checkPointWriter.openPart = Maps.newHashMap();
for (String keys : writer.openPart.keySet()) {
checkPointWriter.openPart.put(keys, new MutableInt(writer.openPart.get(keys).intValue()));
}
checkPointWriter.filePath = writer.filePath;
checkPointWriter.maxOpenFiles = writer.maxOpenFiles;
checkPointWriter.replication = writer.replication;
checkPointWriter.totalBytesWritten = writer.totalBytesWritten;
checkPointWriter.maxLength = writer.maxLength;
checkPointWriter.rollingFile = writer.rollingFile;
checkPointWriter.outputFileName = writer.outputFileName;
return checkPointWriter;
}
Aggregations