Search in sources :

Example 71 with StormTopology

use of org.apache.storm.generated.StormTopology in project storm by apache.

the class BasicContainerTest method testLaunch.

@Test
public void testLaunch() throws Exception {
    final String topoId = "test_topology_current";
    final int supervisorPort = 6628;
    final int port = 8080;
    final String stormHome = ContainerTest.asAbsPath("tmp", "storm-home");
    final String stormLogDir = ContainerTest.asFile(".", "target").getCanonicalPath();
    final String workerId = "worker-id";
    final String stormLocal = ContainerTest.asAbsPath("tmp", "storm-local");
    final String distRoot = ContainerTest.asAbsPath(stormLocal, "supervisor", "stormdist", topoId);
    final File stormcode = new File(distRoot, "stormcode.ser");
    final File stormjar = new File(distRoot, "stormjar.jar");
    final String log4jdir = ContainerTest.asAbsPath(stormHome, "conf");
    final String workerConf = ContainerTest.asAbsPath(log4jdir, "worker.xml");
    final String workerRoot = ContainerTest.asAbsPath(stormLocal, "workers", workerId);
    final String workerTmpDir = ContainerTest.asAbsPath(workerRoot, "tmp");
    final StormTopology st = new StormTopology();
    st.set_spouts(new HashMap<>());
    st.set_bolts(new HashMap<>());
    st.set_state_spouts(new HashMap<>());
    byte[] serializedState = Utils.gzip(Utils.thriftSerialize(st));
    final Map<String, Object> superConf = new HashMap<>();
    superConf.put(Config.STORM_LOCAL_DIR, stormLocal);
    superConf.put(Config.STORM_WORKERS_ARTIFACTS_DIR, stormLocal);
    superConf.put(DaemonConfig.STORM_LOG4J2_CONF_DIR, log4jdir);
    superConf.put(Config.WORKER_CHILDOPTS, " -Dtesting=true");
    LocalAssignment la = new LocalAssignment();
    la.set_topology_id(topoId);
    AdvancedFSOps ops = mock(AdvancedFSOps.class);
    when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true);
    when(ops.slurp(stormcode)).thenReturn(serializedState);
    LocalState ls = mock(LocalState.class);
    MockResourceIsolationManager iso = new MockResourceIsolationManager();
    checkpoint(() -> {
        MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", supervisorPort, port, la, iso, ls, workerId, new StormMetricsRegistry(), new HashMap<>(), ops, "profile");
        mc.launch();
        assertEquals(1, iso.workerCmds.size());
        CommandRun cmd = iso.workerCmds.get(0);
        iso.workerCmds.clear();
        assertListEquals(Arrays.asList("java", "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "org.apache.storm.LogWriter", "java", "-server", "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "-Dtesting=true", "-Djava.library.path=JLP", "-Dstorm.conf.file=", "-Dstorm.options=", "-Djava.io.tmpdir=" + workerTmpDir, "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "org.apache.storm.daemon.worker.Worker", topoId, "SUPERVISOR", String.valueOf(supervisorPort), String.valueOf(port), workerId), cmd.cmd);
        assertEquals(new File(workerRoot), cmd.pwd);
    }, ConfigUtils.STORM_HOME, stormHome, "storm.log.dir", stormLogDir);
}
Also used : MockResourceIsolationManager(org.apache.storm.daemon.supervisor.ContainerTest.MockResourceIsolationManager) HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) StormMetricsRegistry(org.apache.storm.metric.StormMetricsRegistry) LocalAssignment(org.apache.storm.generated.LocalAssignment) LocalState(org.apache.storm.utils.LocalState) File(java.io.File) Test(org.junit.Test)

Example 72 with StormTopology

use of org.apache.storm.generated.StormTopology in project storm by apache.

the class BasicContainerTest method testLaunchStorm1version.

@Test
public void testLaunchStorm1version() throws Exception {
    final String topoId = "test_topology_storm_1.x";
    final int supervisorPort = 6628;
    final int port = 8080;
    final String stormHome = ContainerTest.asAbsPath("tmp", "storm-home");
    final String stormLogDir = ContainerTest.asFile(".", "target").getCanonicalPath();
    final String workerId = "worker-id";
    final String stormLocal = ContainerTest.asAbsPath("tmp", "storm-local");
    final String distRoot = ContainerTest.asAbsPath(stormLocal, "supervisor", "stormdist", topoId);
    final File stormcode = new File(distRoot, "stormcode.ser");
    final File stormjar = new File(distRoot, "stormjar.jar");
    final String log4jdir = ContainerTest.asAbsPath(stormHome, "conf");
    final String workerConf = ContainerTest.asAbsPath(log4jdir, "worker.xml");
    final String workerRoot = ContainerTest.asAbsPath(stormLocal, "workers", workerId);
    final String workerTmpDir = ContainerTest.asAbsPath(workerRoot, "tmp");
    final StormTopology st = new StormTopology();
    st.set_spouts(new HashMap<>());
    st.set_bolts(new HashMap<>());
    st.set_state_spouts(new HashMap<>());
    // minimum 1.x version of supporting STORM-2448 would be 1.0.4
    st.set_storm_version("1.0.4");
    byte[] serializedState = Utils.gzip(Utils.thriftSerialize(st));
    final Map<String, Object> superConf = new HashMap<>();
    superConf.put(Config.STORM_LOCAL_DIR, stormLocal);
    superConf.put(Config.STORM_WORKERS_ARTIFACTS_DIR, stormLocal);
    superConf.put(DaemonConfig.STORM_LOG4J2_CONF_DIR, log4jdir);
    superConf.put(Config.WORKER_CHILDOPTS, " -Dtesting=true");
    LocalAssignment la = new LocalAssignment();
    la.set_topology_id(topoId);
    AdvancedFSOps ops = mock(AdvancedFSOps.class);
    when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true);
    when(ops.slurp(stormcode)).thenReturn(serializedState);
    LocalState ls = mock(LocalState.class);
    MockResourceIsolationManager iso = new MockResourceIsolationManager();
    checkpoint(() -> {
        MockBasicContainer mc = new MockBasicContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", supervisorPort, port, la, iso, ls, workerId, new StormMetricsRegistry(), new HashMap<>(), ops, "profile");
        mc.launch();
        assertEquals(1, iso.workerCmds.size());
        CommandRun cmd = iso.workerCmds.get(0);
        iso.workerCmds.clear();
        assertListEquals(Arrays.asList("java", "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "org.apache.storm.LogWriter", "java", "-server", "-Dlogging.sensitivity=S3", "-Dlogfile.name=worker.log", "-Dstorm.home=" + stormHome, "-Dworkers.artifacts=" + stormLocal, "-Dstorm.id=" + topoId, "-Dworker.id=" + workerId, "-Dworker.port=" + port, "-Dstorm.log.dir=" + stormLogDir, "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector", "-Dstorm.local.dir=" + stormLocal, "-Dworker.memory_limit_mb=768", "-Dlog4j.configurationFile=" + workerConf, "-Dtesting=true", "-Djava.library.path=JLP", "-Dstorm.conf.file=", "-Dstorm.options=", "-Djava.io.tmpdir=" + workerTmpDir, "-cp", "FRAMEWORK_CP:" + stormjar.getAbsolutePath(), "org.apache.storm.daemon.worker", topoId, "SUPERVISOR", String.valueOf(port), workerId), cmd.cmd);
        assertEquals(new File(workerRoot), cmd.pwd);
    }, ConfigUtils.STORM_HOME, stormHome, "storm.log.dir", stormLogDir);
}
Also used : MockResourceIsolationManager(org.apache.storm.daemon.supervisor.ContainerTest.MockResourceIsolationManager) HashMap(java.util.HashMap) StormTopology(org.apache.storm.generated.StormTopology) StormMetricsRegistry(org.apache.storm.metric.StormMetricsRegistry) LocalAssignment(org.apache.storm.generated.LocalAssignment) LocalState(org.apache.storm.utils.LocalState) File(java.io.File) Test(org.junit.Test)

Example 73 with StormTopology

use of org.apache.storm.generated.StormTopology in project metron by apache.

the class FluxTopologyComponent method startTopology.

private void startTopology(String topologyName, File topologyLoc, File templateFile, Properties properties) throws IOException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException, TException, NoSuchFieldException {
    TopologyDef topologyDef = loadYaml(topologyName, topologyLoc, templateFile, properties);
    Config conf = FluxBuilder.buildConfig(topologyDef);
    ExecutionContext context = new ExecutionContext(topologyDef, conf);
    StormTopology topology = FluxBuilder.buildTopology(context);
    assertNotNull(topology);
    topology.validate();
    try {
        stormCluster.submitTopology(topologyName, conf, topology);
    } catch (Exception nne) {
        try {
            Thread.sleep(2000);
        } catch (InterruptedException e) {
        }
        stormCluster.submitTopology(topologyName, conf, topology);
    }
}
Also used : TopologyDef(org.apache.storm.flux.model.TopologyDef) ExecutionContext(org.apache.storm.flux.model.ExecutionContext) Config(org.apache.storm.Config) StormTopology(org.apache.storm.generated.StormTopology) UnableToStartException(org.apache.metron.integration.UnableToStartException) TException(org.apache.storm.thrift.TException) InvocationTargetException(java.lang.reflect.InvocationTargetException) TProtocolException(org.apache.storm.thrift.protocol.TProtocolException)

Example 74 with StormTopology

use of org.apache.storm.generated.StormTopology in project open-kilda by telstra.

the class ControllerToSpeakerProxyBoltTest method setUp.

@Before
public void setUp() {
    Set<String> regions = new HashSet<>();
    regions.add(REGION_ONE);
    regions.add(REGION_TWO);
    subject = new ControllerToSpeakerProxyBolt(TARGET_TOPIC, regions, Duration.ofSeconds(900));
    when(topologyContext.getThisTaskId()).thenReturn(1);
    subject.prepare(topologyConfig, topologyContext, outputCollector);
    StormTopology topology = mock(StormTopology.class);
    Map<Integer, String> taskToComponent = ImmutableMap.of(TASK_ID_SPOUT, ComponentType.SPEAKER_KAFKA_SPOUT, SWITCH_MONITOR_BOLT, SwitchMonitorBolt.BOLT_ID, ZOOKEEPER_SPOUT, ZooKeeperSpout.SPOUT_ID);
    Map<String, Map<String, Fields>> componentToFields = ImmutableMap.of(ComponentType.SPEAKER_KAFKA_SPOUT, ImmutableMap.of(Utils.DEFAULT_STREAM_ID, new Fields(KafkaRecordTranslator.FIELD_ID_KEY, KafkaRecordTranslator.FIELD_ID_PAYLOAD, AbstractBolt.FIELD_ID_CONTEXT)), SwitchMonitorBolt.BOLT_ID, ImmutableMap.of(SwitchMonitorBolt.STREAM_REGION_MAPPING_ID, SwitchMonitorBolt.STREAM_REGION_MAPPING_FIELDS), ZooKeeperSpout.SPOUT_ID, ImmutableMap.of(Utils.DEFAULT_STREAM_ID, new Fields(ZooKeeperSpout.FIELD_ID_LIFECYCLE_EVENT, ZooKeeperSpout.FIELD_ID_CONTEXT)));
    generalTopologyContext = new GeneralTopologyContext(topology, topologyConfig, taskToComponent, Collections.emptyMap(), componentToFields, "dummy");
}
Also used : Fields(org.apache.storm.tuple.Fields) GeneralTopologyContext(org.apache.storm.task.GeneralTopologyContext) StormTopology(org.apache.storm.generated.StormTopology) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashSet(java.util.HashSet) Before(org.junit.Before)

Example 75 with StormTopology

use of org.apache.storm.generated.StormTopology in project open-kilda by telstra.

the class OpenTsdbTopologyTest method shouldSuccessfulSendDatapoint.

@Test
public void shouldSuccessfulSendDatapoint() {
    Datapoint datapoint = new Datapoint("metric", timestamp, Collections.emptyMap(), 123);
    MockedSources sources = new MockedSources();
    Testing.withTrackedCluster(clusterParam, (cluster) -> {
        OpenTsdbTopology topology = new OpenTsdbTopology(makeLaunchEnvironment(getProperties()));
        sources.addMockData(ZooKeeperSpout.SPOUT_ID, new Values(LifecycleEvent.builder().signal(Signal.NONE).build(), null));
        sources.addMockData(OpenTsdbTopology.OTSDB_SPOUT_ID, new Values(null, datapoint));
        completeTopologyParam.setMockedSources(sources);
        StormTopology stormTopology = topology.createTopology();
        stormTopology.get_bolts().remove(ZooKeeperBolt.BOLT_ID);
        activateDatapointParserBolt(stormTopology);
        Map result = Testing.completeTopology(cluster, stormTopology, completeTopologyParam);
    });
    // verify that request is sent to OpenTSDB server
    mockServer.verify(REQUEST, VerificationTimes.exactly(1));
}
Also used : Datapoint(org.openkilda.messaging.info.Datapoint) MockedSources(org.apache.storm.testing.MockedSources) StormTopology(org.apache.storm.generated.StormTopology) Values(org.apache.storm.tuple.Values) Map(java.util.Map) StableAbstractStormTest(org.openkilda.wfm.StableAbstractStormTest) Test(org.junit.Test)

Aggregations

StormTopology (org.apache.storm.generated.StormTopology)162 Config (org.apache.storm.Config)72 HashMap (java.util.HashMap)67 Test (org.junit.Test)59 TopologyBuilder (org.apache.storm.topology.TopologyBuilder)44 Map (java.util.Map)35 ArrayList (java.util.ArrayList)29 TopologyDetails (org.apache.storm.scheduler.TopologyDetails)27 Test (org.junit.jupiter.api.Test)26 List (java.util.List)24 Bolt (org.apache.storm.generated.Bolt)23 Values (org.apache.storm.tuple.Values)23 StormMetricsRegistry (org.apache.storm.metric.StormMetricsRegistry)22 Cluster (org.apache.storm.scheduler.Cluster)22 SupervisorDetails (org.apache.storm.scheduler.SupervisorDetails)22 Topologies (org.apache.storm.scheduler.Topologies)22 Fields (org.apache.storm.tuple.Fields)22 INimbus (org.apache.storm.scheduler.INimbus)21 TopologyDef (org.apache.storm.flux.model.TopologyDef)20 TestUtilsForResourceAwareScheduler (org.apache.storm.scheduler.resource.TestUtilsForResourceAwareScheduler)20