use of org.apache.storm.tuple.TupleImpl in project storm by apache.
the class Executor method setupMetrics.
protected void setupMetrics() {
for (final Integer interval : intervalToTaskToMetricToRegistry.keySet()) {
StormTimer timerTask = workerData.getUserTimer();
timerTask.scheduleRecurring(interval, interval, new Runnable() {
@Override
public void run() {
TupleImpl tuple = new TupleImpl(workerTopologyContext, new Values(interval), (int) Constants.SYSTEM_TASK_ID, Constants.METRICS_TICK_STREAM_ID);
List<AddressedTuple> metricsTickTuple = Lists.newArrayList(new AddressedTuple(AddressedTuple.BROADCAST_DEST, tuple));
receiveQueue.publish(metricsTickTuple);
}
});
}
}
use of org.apache.storm.tuple.TupleImpl in project storm by apache.
the class BoltOutputCollectorImpl method boltEmit.
private List<Integer> boltEmit(String streamId, Collection<Tuple> anchors, List<Object> values, Integer targetTaskId) {
List<Integer> outTasks;
if (targetTaskId != null) {
outTasks = taskData.getOutgoingTasks(targetTaskId, streamId, values);
} else {
outTasks = taskData.getOutgoingTasks(streamId, values);
}
for (Integer t : outTasks) {
Map<Long, Long> anchorsToIds = new HashMap<>();
if (anchors != null) {
for (Tuple a : anchors) {
Set<Long> rootIds = a.getMessageId().getAnchorsToIds().keySet();
if (rootIds.size() > 0) {
long edgeId = MessageId.generateId(random);
((TupleImpl) a).updateAckVal(edgeId);
for (Long root_id : rootIds) {
putXor(anchorsToIds, root_id, edgeId);
}
}
}
}
MessageId msgId = MessageId.makeId(anchorsToIds);
TupleImpl tupleExt = new TupleImpl(executor.getWorkerTopologyContext(), values, taskId, streamId, msgId);
executor.getExecutorTransfer().transfer(t, tupleExt);
}
if (isEventLoggers) {
executor.sendToEventLogger(executor, taskData, values, executor.getComponentId(), null, random);
}
return outTasks;
}
use of org.apache.storm.tuple.TupleImpl in project storm by apache.
the class Testing method testTuple.
/**
* Create a {@link org.apache.storm.tuple.Tuple} for use with testing
* @param values the values to appear in the tuple
* @param param parametrs describing more details about the tuple
*/
public static Tuple testTuple(List<Object> values, MkTupleParam param) {
String stream = param.getStream();
if (stream == null) {
stream = Utils.DEFAULT_STREAM_ID;
}
String component = param.getComponent();
if (component == null) {
component = "component";
}
int task = 1;
List<String> fields = param.getFields();
if (fields == null) {
fields = new ArrayList<>(values.size());
for (int i = 1; i <= values.size(); i++) {
fields.add("field" + i);
}
}
Map<Integer, String> taskToComp = new HashMap<>();
taskToComp.put(task, component);
Map<String, Map<String, Fields>> compToStreamToFields = new HashMap<>();
Map<String, Fields> streamToFields = new HashMap<>();
streamToFields.put(stream, new Fields(fields));
compToStreamToFields.put(component, streamToFields);
TopologyContext context = new TopologyContext(null, ConfigUtils.readStormConfig(), taskToComp, null, compToStreamToFields, "test-storm-id", null, null, 1, null, null, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new AtomicBoolean(false));
return new TupleImpl(context, values, 1, stream);
}
use of org.apache.storm.tuple.TupleImpl in project storm by apache.
the class TestJoinBolt method makeStream.
private static ArrayList<Tuple> makeStream(String streamName, String[] fieldNames, Object[][] data) {
ArrayList<Tuple> result = new ArrayList<>();
MockContext mockContext = new MockContext(fieldNames);
for (Object[] record : data) {
TupleImpl rec = new TupleImpl(mockContext, Arrays.asList(record), 0, streamName);
result.add(rec);
}
return result;
}
use of org.apache.storm.tuple.TupleImpl in project storm by apache.
the class TestJoinBolt method makeNestedEventsStream.
private static ArrayList<Tuple> makeNestedEventsStream(String streamName, String[] fieldNames, Object[][] records) {
MockContext mockContext = new MockContext(new String[] { "outer" });
ArrayList<Tuple> result = new ArrayList<>(records.length);
// convert each record into a HashMap using fieldNames as keys
for (Object[] record : records) {
HashMap<String, Object> recordMap = new HashMap<>(fieldNames.length);
for (int i = 0; i < fieldNames.length; i++) {
recordMap.put(fieldNames[i], record[i]);
}
ArrayList<Object> tupleValues = new ArrayList<>(1);
tupleValues.add(recordMap);
TupleImpl tuple = new TupleImpl(mockContext, tupleValues, 0, streamName);
result.add(tuple);
}
return result;
}
Aggregations