use of org.apache.storm.generated.SpoutSpec in project storm by apache.
the class TestUtilsForBlacklistScheduler method genExecsAndComps.
public static Map<ExecutorDetails, String> genExecsAndComps(StormTopology topology, int spoutParallelism, int boltParallelism) {
Map<ExecutorDetails, String> retMap = new HashMap<>();
int startTask = 0;
int endTask = 1;
for (Map.Entry<String, SpoutSpec> entry : topology.get_spouts().entrySet()) {
for (int i = 0; i < spoutParallelism; i++) {
retMap.put(new ExecutorDetails(startTask, endTask), entry.getKey());
startTask++;
endTask++;
}
}
for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) {
for (int i = 0; i < boltParallelism; i++) {
retMap.put(new ExecutorDetails(startTask, endTask), entry.getKey());
startTask++;
endTask++;
}
}
return retMap;
}
use of org.apache.storm.generated.SpoutSpec in project storm by apache.
the class CaptureLoad method captureTopology.
static TopologyLoadConf captureTopology(Nimbus.Iface client, TopologySummary topologySummary) throws Exception {
String topologyName = topologySummary.get_name();
LOG.info("Capturing {}...", topologyName);
String topologyId = topologySummary.get_id();
TopologyInfo info = client.getTopologyInfo(topologyId);
TopologyPageInfo tpinfo = client.getTopologyPageInfo(topologyId, ":all-time", false);
@SuppressWarnings("checkstyle:VariableDeclarationUsageDistance") StormTopology topo = client.getUserTopology(topologyId);
// Done capturing topology information...
Map<String, Object> savedTopoConf = new HashMap<>();
Map<String, Object> topoConf = (Map<String, Object>) JSONValue.parse(client.getTopologyConf(topologyId));
for (String key : TopologyLoadConf.IMPORTANT_CONF_KEYS) {
Object o = topoConf.get(key);
if (o != null) {
savedTopoConf.put(key, o);
LOG.info("with config {}: {}", key, o);
}
}
// Lets use the number of actually scheduled workers as a way to bridge RAS and non-RAS
int numWorkers = tpinfo.get_num_workers();
if (savedTopoConf.containsKey(Config.TOPOLOGY_WORKERS)) {
numWorkers = Math.max(numWorkers, ((Number) savedTopoConf.get(Config.TOPOLOGY_WORKERS)).intValue());
}
savedTopoConf.put(Config.TOPOLOGY_WORKERS, numWorkers);
Map<String, LoadCompConf.Builder> boltBuilders = new HashMap<>();
Map<String, LoadCompConf.Builder> spoutBuilders = new HashMap<>();
List<InputStream.Builder> inputStreams = new ArrayList<>();
Map<GlobalStreamId, OutputStream.Builder> outStreams = new HashMap<>();
// Bolts
if (topo.get_bolts() != null) {
for (Map.Entry<String, Bolt> boltSpec : topo.get_bolts().entrySet()) {
String boltComp = boltSpec.getKey();
LOG.info("Found bolt {}...", boltComp);
Bolt bolt = boltSpec.getValue();
ComponentCommon common = bolt.get_common();
Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
if (inputs != null) {
for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
GlobalStreamId id = input.getKey();
LOG.info("with input {}...", id);
Grouping grouping = input.getValue();
InputStream.Builder builder = new InputStream.Builder().withId(id.get_streamId()).withFromComponent(id.get_componentId()).withToComponent(boltComp).withGroupingType(grouping);
inputStreams.add(builder);
}
}
Map<String, StreamInfo> outputs = common.get_streams();
if (outputs != null) {
for (String name : outputs.keySet()) {
GlobalStreamId id = new GlobalStreamId(boltComp, name);
LOG.info("and output {}...", id);
OutputStream.Builder builder = new OutputStream.Builder().withId(name);
outStreams.put(id, builder);
}
}
LoadCompConf.Builder builder = new LoadCompConf.Builder().withParallelism(common.get_parallelism_hint()).withId(boltComp);
boltBuilders.put(boltComp, builder);
}
Map<String, Map<String, Double>> boltResources = getBoltsResources(topo, topoConf);
for (Map.Entry<String, Map<String, Double>> entry : boltResources.entrySet()) {
LoadCompConf.Builder bd = boltBuilders.get(entry.getKey());
if (bd != null) {
Map<String, Double> resources = entry.getValue();
Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT);
if (cpu != null) {
bd.withCpuLoad(cpu);
}
Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB);
if (mem != null) {
bd.withMemoryLoad(mem);
}
}
}
}
// Spouts
if (topo.get_spouts() != null) {
for (Map.Entry<String, SpoutSpec> spoutSpec : topo.get_spouts().entrySet()) {
String spoutComp = spoutSpec.getKey();
LOG.info("Found Spout {}...", spoutComp);
SpoutSpec spout = spoutSpec.getValue();
ComponentCommon common = spout.get_common();
Map<String, StreamInfo> outputs = common.get_streams();
if (outputs != null) {
for (String name : outputs.keySet()) {
GlobalStreamId id = new GlobalStreamId(spoutComp, name);
LOG.info("with output {}...", id);
OutputStream.Builder builder = new OutputStream.Builder().withId(name);
outStreams.put(id, builder);
}
}
LoadCompConf.Builder builder = new LoadCompConf.Builder().withParallelism(common.get_parallelism_hint()).withId(spoutComp);
spoutBuilders.put(spoutComp, builder);
}
Map<String, Map<String, Double>> spoutResources = getSpoutsResources(topo, topoConf);
for (Map.Entry<String, Map<String, Double>> entry : spoutResources.entrySet()) {
LoadCompConf.Builder sd = spoutBuilders.get(entry.getKey());
if (sd != null) {
Map<String, Double> resources = entry.getValue();
Double cpu = resources.get(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT);
if (cpu != null) {
sd.withCpuLoad(cpu);
}
Double mem = resources.get(Config.TOPOLOGY_COMPONENT_RESOURCES_ONHEAP_MEMORY_MB);
if (mem != null) {
sd.withMemoryLoad(mem);
}
}
}
}
// Stats...
Map<String, List<ExecutorSummary>> byComponent = new HashMap<>();
for (ExecutorSummary executor : info.get_executors()) {
String component = executor.get_component_id();
List<ExecutorSummary> list = byComponent.get(component);
if (list == null) {
list = new ArrayList<>();
byComponent.put(component, list);
}
list.add(executor);
}
List<InputStream> streams = new ArrayList<>(inputStreams.size());
// Compute the stats for the different input streams
for (InputStream.Builder builder : inputStreams) {
GlobalStreamId streamId = new GlobalStreamId(builder.getFromComponent(), builder.getId());
List<ExecutorSummary> summaries = byComponent.get(builder.getToComponent());
// Execute and process latency...
builder.withProcessTime(new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_process_ms_avg)));
builder.withExecTime(new NormalDistStats(extractBoltValues(summaries, streamId, BoltStats::get_execute_ms_avg)));
// InputStream is done
streams.add(builder.build());
}
// There is a bug in some versions that returns 0 for the uptime.
// To work around it we should get it an alternative (working) way.
Map<String, Integer> workerToUptime = new HashMap<>();
for (WorkerSummary ws : tpinfo.get_workers()) {
workerToUptime.put(ws.get_supervisor_id() + ":" + ws.get_port(), ws.get_uptime_secs());
}
LOG.debug("WORKER TO UPTIME {}", workerToUptime);
for (Map.Entry<GlobalStreamId, OutputStream.Builder> entry : outStreams.entrySet()) {
OutputStream.Builder builder = entry.getValue();
GlobalStreamId id = entry.getKey();
List<Double> emittedRate = new ArrayList<>();
List<ExecutorSummary> summaries = byComponent.get(id.get_componentId());
if (summaries != null) {
for (ExecutorSummary summary : summaries) {
if (summary.is_set_stats()) {
int uptime = summary.get_uptime_secs();
LOG.debug("UPTIME {}", uptime);
if (uptime <= 0) {
// Likely it is because of a bug, so try to get it another way
String key = summary.get_host() + ":" + summary.get_port();
uptime = workerToUptime.getOrDefault(key, 1);
LOG.debug("Getting uptime for worker {}, {}", key, uptime);
}
for (Map.Entry<String, Map<String, Long>> statEntry : summary.get_stats().get_emitted().entrySet()) {
String timeWindow = statEntry.getKey();
long timeSecs = uptime;
try {
timeSecs = Long.valueOf(timeWindow);
} catch (NumberFormatException e) {
// Ignored...
}
timeSecs = Math.min(timeSecs, uptime);
Long count = statEntry.getValue().get(id.get_streamId());
if (count != null) {
LOG.debug("{} emitted {} for {} secs or {} tuples/sec", id, count, timeSecs, count.doubleValue() / timeSecs);
emittedRate.add(count.doubleValue() / timeSecs);
}
}
}
}
}
builder.withRate(new NormalDistStats(emittedRate));
// The OutputStream is done
LoadCompConf.Builder comp = boltBuilders.get(id.get_componentId());
if (comp == null) {
comp = spoutBuilders.get(id.get_componentId());
}
comp.withStream(builder.build());
}
List<LoadCompConf> spouts = spoutBuilders.values().stream().map((b) -> b.build()).collect(Collectors.toList());
List<LoadCompConf> bolts = boltBuilders.values().stream().map((b) -> b.build()).collect(Collectors.toList());
return new TopologyLoadConf(topologyName, savedTopoConf, spouts, bolts, streams);
}
use of org.apache.storm.generated.SpoutSpec in project storm by apache.
the class StreamBuilderTest method testRepartition.
@Test
public void testRepartition() throws Exception {
Stream<String> stream = streamBuilder.newStream(newSpout(Utils.DEFAULT_STREAM_ID), new ValueMapper<>(0));
stream.repartition(3).filter(x -> true).repartition(2).filter(x -> true).aggregate(new Count<>());
StormTopology topology = streamBuilder.build();
assertEquals(1, topology.get_spouts_size());
SpoutSpec spout = topology.get_spouts().get("spout1");
assertEquals(4, topology.get_bolts_size());
Bolt bolt1 = topology.get_bolts().get("bolt1");
Bolt bolt2 = topology.get_bolts().get("bolt2");
Bolt bolt3 = topology.get_bolts().get("bolt3");
Bolt bolt4 = topology.get_bolts().get("bolt4");
assertEquals(1, spout.get_common().get_parallelism_hint());
assertEquals(1, bolt1.get_common().get_parallelism_hint());
assertEquals(3, bolt2.get_common().get_parallelism_hint());
assertEquals(2, bolt3.get_common().get_parallelism_hint());
assertEquals(2, bolt4.get_common().get_parallelism_hint());
}
use of org.apache.storm.generated.SpoutSpec in project incubator-atlas by apache.
the class StormAtlasHook method addSpouts.
private void addSpouts(Map<String, SpoutSpec> spouts, Map<String, Referenceable> nodeEntities) throws IllegalAccessException {
for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) {
final String spoutName = entry.getKey();
Referenceable spoutReferenceable = createSpoutInstance(spoutName, entry.getValue());
nodeEntities.put(spoutName, spoutReferenceable);
}
}
use of org.apache.storm.generated.SpoutSpec in project storm by apache.
the class TopologyDetails method getComponents.
/**
* Returns a representation of the non-system components of the topology graph
* Each Component object in the returning map is populated with the list of its
* parents, children and execs assigned to that component.
* @return a map of components
*/
public Map<String, Component> getComponents() {
Map<String, Component> all_comp = new HashMap<>();
StormTopology storm_topo = this.topology;
// spouts
if (storm_topo.get_spouts() != null) {
for (Map.Entry<String, SpoutSpec> spoutEntry : storm_topo.get_spouts().entrySet()) {
if (!Utils.isSystemId(spoutEntry.getKey())) {
Component newComp;
if (all_comp.containsKey(spoutEntry.getKey())) {
newComp = all_comp.get(spoutEntry.getKey());
newComp.execs = componentToExecs(newComp.id);
} else {
newComp = new Component(spoutEntry.getKey());
newComp.execs = componentToExecs(newComp.id);
all_comp.put(spoutEntry.getKey(), newComp);
}
newComp.type = Component.ComponentType.SPOUT;
for (Map.Entry<GlobalStreamId, Grouping> spoutInput : spoutEntry.getValue().get_common().get_inputs().entrySet()) {
newComp.parents.add(spoutInput.getKey().get_componentId());
if (!all_comp.containsKey(spoutInput.getKey().get_componentId())) {
all_comp.put(spoutInput.getKey().get_componentId(), new Component(spoutInput.getKey().get_componentId()));
}
all_comp.get(spoutInput.getKey().get_componentId()).children.add(spoutEntry.getKey());
}
}
}
}
// bolts
if (storm_topo.get_bolts() != null) {
for (Map.Entry<String, Bolt> boltEntry : storm_topo.get_bolts().entrySet()) {
if (!Utils.isSystemId(boltEntry.getKey())) {
Component newComp;
if (all_comp.containsKey(boltEntry.getKey())) {
newComp = all_comp.get(boltEntry.getKey());
newComp.execs = componentToExecs(newComp.id);
} else {
newComp = new Component(boltEntry.getKey());
newComp.execs = componentToExecs(newComp.id);
all_comp.put(boltEntry.getKey(), newComp);
}
newComp.type = Component.ComponentType.BOLT;
for (Map.Entry<GlobalStreamId, Grouping> boltInput : boltEntry.getValue().get_common().get_inputs().entrySet()) {
newComp.parents.add(boltInput.getKey().get_componentId());
if (!all_comp.containsKey(boltInput.getKey().get_componentId())) {
all_comp.put(boltInput.getKey().get_componentId(), new Component(boltInput.getKey().get_componentId()));
}
all_comp.get(boltInput.getKey().get_componentId()).children.add(boltEntry.getKey());
}
}
}
}
return all_comp;
}
Aggregations