Search in sources :

Example 1 with SimpleVersion

use of org.apache.storm.utils.SimpleVersion in project storm by apache.

the class Nimbus method submitTopologyWithOpts.

@Override
public void submitTopologyWithOpts(String topoName, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException, TException {
    try {
        submitTopologyWithOptsCalls.mark();
        assertIsLeader();
        assert (options != null);
        validateTopologyName(topoName);
        checkAuthorization(topoName, null, "submitTopology");
        assertTopoActive(topoName, false);
        @SuppressWarnings("unchecked") Map<String, Object> topoConf = (Map<String, Object>) JSONValue.parse(jsonConf);
        try {
            ConfigValidation.validateTopoConf(topoConf);
        } catch (IllegalArgumentException ex) {
            throw new WrappedInvalidTopologyException(ex.getMessage());
        }
        validator.validate(topoName, topoConf, topology);
        if ((boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false)) {
            @SuppressWarnings("unchecked") Map<String, Object> blobMap = (Map<String, Object>) topoConf.get(Config.TOPOLOGY_BLOBSTORE_MAP);
            if (blobMap != null && !blobMap.isEmpty()) {
                throw new WrappedInvalidTopologyException("symlinks are disabled so blobs are not supported but " + Config.TOPOLOGY_BLOBSTORE_MAP + " = " + blobMap);
            }
        }
        ServerUtils.validateTopologyWorkerMaxHeapSizeConfigs(topoConf, topology, ObjectReader.getDouble(conf.get(Config.TOPOLOGY_WORKER_MAX_HEAP_SIZE_MB)));
        Utils.validateTopologyBlobStoreMap(topoConf, blobStore);
        long uniqueNum = submittedCount.incrementAndGet();
        String topoId = topoName + "-" + uniqueNum + "-" + Time.currentTimeSecs();
        Map<String, String> creds = null;
        if (options.is_set_creds()) {
            creds = options.get_creds().get_creds();
        }
        topoConf.put(Config.STORM_ID, topoId);
        topoConf.put(Config.TOPOLOGY_NAME, topoName);
        topoConf = normalizeConf(conf, topoConf, topology);
        OciUtils.adjustImageConfigForTopo(conf, topoConf, topoId);
        ReqContext req = ReqContext.context();
        Principal principal = req.principal();
        String submitterPrincipal = principal == null ? null : principal.toString();
        Set<String> topoAcl = new HashSet<>(ObjectReader.getStrings(topoConf.get(Config.TOPOLOGY_USERS)));
        topoAcl.add(submitterPrincipal);
        String submitterUser = principalToLocal.toLocal(principal);
        topoAcl.add(submitterUser);
        String topologyPrincipal = Utils.OR(submitterPrincipal, "");
        topoConf.put(Config.TOPOLOGY_SUBMITTER_PRINCIPAL, topologyPrincipal);
        String systemUser = System.getProperty("user.name");
        String topologyOwner = Utils.OR(submitterUser, systemUser);
        // Don't let the user set who we launch as
        topoConf.put(Config.TOPOLOGY_SUBMITTER_USER, topologyOwner);
        topoConf.put(Config.TOPOLOGY_USERS, new ArrayList<>(topoAcl));
        topoConf.put(Config.STORM_ZOOKEEPER_SUPERACL, conf.get(Config.STORM_ZOOKEEPER_SUPERACL));
        if (!Utils.isZkAuthenticationConfiguredStormServer(conf)) {
            topoConf.remove(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME);
            topoConf.remove(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD);
        }
        if (!(Boolean) conf.getOrDefault(DaemonConfig.STORM_TOPOLOGY_CLASSPATH_BEGINNING_ENABLED, false)) {
            topoConf.remove(Config.TOPOLOGY_CLASSPATH_BEGINNING);
        }
        String topoVersionString = topology.get_storm_version();
        if (topoVersionString == null) {
            topoVersionString = (String) conf.getOrDefault(Config.SUPERVISOR_WORKER_DEFAULT_VERSION, VersionInfo.getVersion());
        }
        // Check if we can run a topology with that version of storm.
        SimpleVersion topoVersion = new SimpleVersion(topoVersionString);
        List<String> cp = Utils.getCompatibleVersion(supervisorClasspaths, topoVersion, "classpath", null);
        if (cp == null) {
            throw new WrappedInvalidTopologyException("Topology submitted with storm version " + topoVersionString + " but could not find a configured compatible version to use " + supervisorClasspaths.keySet());
        }
        Map<String, Object> otherConf = Utils.getConfigFromClasspath(cp, conf);
        Map<String, Object> totalConfToSave = Utils.merge(otherConf, topoConf);
        Map<String, Object> totalConf = Utils.merge(conf, totalConfToSave);
        // When reading the conf in nimbus we want to fall back to our own settings
        // if the other config does not have it set.
        topology = normalizeTopology(totalConf, topology);
        // we might need to set the number of acker executors and eventlogger executors to be the estimated number of workers.
        if (ServerUtils.isRas(conf)) {
            int estimatedNumWorker = ServerUtils.getEstimatedWorkerCountForRasTopo(totalConf, topology);
            setUpAckerExecutorConfigs(topoName, totalConfToSave, totalConf, estimatedNumWorker);
            ServerUtils.validateTopologyAckerBundleResource(totalConfToSave, topology, topoName);
            int numEventLoggerExecs = ObjectReader.getInt(totalConf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS), estimatedNumWorker);
            totalConfToSave.put(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS, numEventLoggerExecs);
            LOG.debug("Config {} set to: {} for topology: {}", Config.TOPOLOGY_EVENTLOGGER_EXECUTORS, numEventLoggerExecs, topoName);
        }
        // Remove any configs that are specific to a host that might mess with the running topology.
        // Don't override the host name, or everything looks like it is on nimbus
        totalConfToSave.remove(Config.STORM_LOCAL_HOSTNAME);
        IStormClusterState state = stormClusterState;
        if (creds == null && workerTokenManager != null) {
            // Make sure we can store the worker tokens even if no creds are provided.
            creds = new HashMap<>();
        }
        if (creds != null) {
            Map<String, Object> finalConf = Collections.unmodifiableMap(topoConf);
            for (INimbusCredentialPlugin autocred : nimbusAutocredPlugins) {
                autocred.populateCredentials(creds, finalConf);
            }
            upsertWorkerTokensInCreds(creds, topologyPrincipal, topoId);
        }
        if (ObjectReader.getBoolean(conf.get(Config.SUPERVISOR_RUN_WORKER_AS_USER), false) && (submitterUser == null || submitterUser.isEmpty())) {
            throw new WrappedAuthorizationException("Could not determine the user to run this topology as.");
        }
        // this validates the structure of the topology
        StormCommon.systemTopology(totalConf, topology);
        validateTopologySize(topoConf, conf, topology);
        if (Utils.isZkAuthenticationConfiguredStormServer(conf) && !Utils.isZkAuthenticationConfiguredTopology(topoConf)) {
            throw new IllegalArgumentException("The cluster is configured for zookeeper authentication, but no payload was provided.");
        }
        LOG.info("Received topology submission for {} (storm-{} JDK-{}) with conf {}", topoName, topoVersionString, topology.get_jdk_version(), ConfigUtils.maskPasswords(topoConf));
        // cleanup thread killing topology in b/w assignment and starting the topology
        synchronized (submitLock) {
            assertTopoActive(topoName, false);
            // cred-update-lock is not needed here because creds are being added for the first time.
            if (creds != null) {
                state.setCredentials(topoId, new Credentials(creds), topoConf);
            }
            LOG.info("uploadedJar {} for {}", uploadedJarLocation, topoName);
            setupStormCode(conf, topoId, uploadedJarLocation, totalConfToSave, topology);
            waitForDesiredCodeReplication(totalConf, topoId);
            state.setupHeatbeats(topoId, topoConf);
            state.setupErrors(topoId, topoConf);
            if (ObjectReader.getBoolean(totalConf.get(Config.TOPOLOGY_BACKPRESSURE_ENABLE), false)) {
                state.setupBackpressure(topoId, topoConf);
            }
            notifyTopologyActionListener(topoName, "submitTopology");
            TopologyStatus status = null;
            switch(options.get_initial_status()) {
                case INACTIVE:
                    status = TopologyStatus.INACTIVE;
                    break;
                case ACTIVE:
                    status = TopologyStatus.ACTIVE;
                    break;
                default:
                    throw new IllegalArgumentException("Inital Status of " + options.get_initial_status() + " is not allowed.");
            }
            startTopology(topoName, topoId, status, topologyOwner, topologyPrincipal, totalConfToSave, topology);
        }
    } catch (Exception e) {
        LOG.warn("Topology submission exception. (topology name='{}')", topoName, e);
        if (e instanceof TException) {
            throw (TException) e;
        }
        throw new RuntimeException(e);
    }
}
Also used : INimbusCredentialPlugin(org.apache.storm.security.INimbusCredentialPlugin) TException(org.apache.storm.thrift.TException) WrappedAuthorizationException(org.apache.storm.utils.WrappedAuthorizationException) ReqContext(org.apache.storm.security.auth.ReqContext) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) SimpleVersion(org.apache.storm.utils.SimpleVersion) IStormClusterState(org.apache.storm.cluster.IStormClusterState) TopologyStatus(org.apache.storm.generated.TopologyStatus) HashSet(java.util.HashSet) WorkerMetricPoint(org.apache.storm.generated.WorkerMetricPoint) DataPoint(org.apache.storm.metric.api.DataPoint) WrappedAuthorizationException(org.apache.storm.utils.WrappedAuthorizationException) IOException(java.io.IOException) IllegalStateException(org.apache.storm.generated.IllegalStateException) AlreadyAliveException(org.apache.storm.generated.AlreadyAliveException) WrappedNotAliveException(org.apache.storm.utils.WrappedNotAliveException) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) AuthorizationException(org.apache.storm.generated.AuthorizationException) NotAliveException(org.apache.storm.generated.NotAliveException) WrappedAlreadyAliveException(org.apache.storm.utils.WrappedAlreadyAliveException) InterruptedIOException(java.io.InterruptedIOException) KeyAlreadyExistsException(org.apache.storm.generated.KeyAlreadyExistsException) TException(org.apache.storm.thrift.TException) WrappedIllegalStateException(org.apache.storm.utils.WrappedIllegalStateException) KeyNotFoundException(org.apache.storm.generated.KeyNotFoundException) InvalidTopologyException(org.apache.storm.generated.InvalidTopologyException) BindException(java.net.BindException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) RotatingMap(org.apache.storm.utils.RotatingMap) ImmutableMap(org.apache.storm.shade.com.google.common.collect.ImmutableMap) TimeCacheMap(org.apache.storm.utils.TimeCacheMap) HashMap(java.util.HashMap) NimbusPrincipal(org.apache.storm.security.auth.NimbusPrincipal) Principal(java.security.Principal) Credentials(org.apache.storm.generated.Credentials)

Example 2 with SimpleVersion

use of org.apache.storm.utils.SimpleVersion in project storm by apache.

the class Nimbus method supportRpcHeartbeat.

private boolean supportRpcHeartbeat(TopologyDetails topo) {
    if (stormClusterState.isPacemakerStateStore()) {
        // While using PacemakerStateStorage, ignore RPC heartbeat.
        return false;
    }
    if (!topo.getTopology().is_set_storm_version()) {
        // current version supports RPC heartbeat
        return true;
    }
    String stormVersionStr = topo.getTopology().get_storm_version();
    SimpleVersion stormVersion = new SimpleVersion(stormVersionStr);
    return stormVersion.compareTo(MIN_VERSION_SUPPORT_RPC_HEARTBEAT) >= 0;
}
Also used : SimpleVersion(org.apache.storm.utils.SimpleVersion)

Example 3 with SimpleVersion

use of org.apache.storm.utils.SimpleVersion in project storm by apache.

the class BasicContainer method mkLaunchCommand.

/**
 * Create the command to launch the worker process.
 *
 * @param memOnheap the on heap memory for the worker
 * @param stormRoot the root dist dir for the topology
 * @param jlp       java library path for the topology
 * @return the command to run
 *
 * @throws IOException on any error.
 */
private List<String> mkLaunchCommand(final int memOnheap, final int memOffheap, final String stormRoot, final String jlp, final String numaId) throws IOException {
    final String javaCmd = javaCmd("java");
    final String stormOptions = ConfigUtils.concatIfNotNull(System.getProperty("storm.options"));
    final String topoConfFile = ConfigUtils.concatIfNotNull(System.getProperty("storm.conf.file"));
    final String workerTmpDir = ConfigUtils.workerTmpRoot(conf, workerId);
    String topoVersionString = getStormVersionFor(conf, topologyId, ops, stormRoot);
    if (topoVersionString == null) {
        topoVersionString = (String) conf.getOrDefault(Config.SUPERVISOR_WORKER_DEFAULT_VERSION, VersionInfo.getVersion());
    }
    final SimpleVersion topoVersion = new SimpleVersion(topoVersionString);
    List<String> classPathParams = getClassPathParams(stormRoot, topoVersion);
    List<String> commonParams = getCommonParams();
    String log4jConfigurationFile = getWorkerLoggingConfigFile();
    String workerLog4jConfig = log4jConfigurationFile;
    if (topoConf.get(Config.TOPOLOGY_LOGGING_CONFIG_FILE) != null) {
        workerLog4jConfig = workerLog4jConfig + "," + topoConf.get(Config.TOPOLOGY_LOGGING_CONFIG_FILE);
    }
    List<String> commandList = new ArrayList<>();
    String logWriter = getWorkerLogWriter(topoVersion);
    if (logWriter != null) {
        // Log Writer Command...
        commandList.add(javaCmd);
        commandList.addAll(classPathParams);
        commandList.addAll(substituteChildopts(topoConf.get(Config.TOPOLOGY_WORKER_LOGWRITER_CHILDOPTS)));
        commandList.addAll(commonParams);
        commandList.add("-Dlog4j.configurationFile=" + log4jConfigurationFile);
        // The LogWriter in turn launches the actual worker.
        commandList.add(logWriter);
    }
    // Worker Command...
    commandList.add(javaCmd);
    commandList.add("-server");
    commandList.addAll(commonParams);
    commandList.add("-Dlog4j.configurationFile=" + workerLog4jConfig);
    commandList.addAll(substituteChildopts(conf.get(Config.WORKER_CHILDOPTS), memOnheap, memOffheap));
    commandList.addAll(substituteChildopts(topoConf.get(Config.TOPOLOGY_WORKER_CHILDOPTS), memOnheap, memOffheap));
    commandList.addAll(substituteChildopts(Utils.OR(topoConf.get(Config.TOPOLOGY_WORKER_GC_CHILDOPTS), conf.get(Config.WORKER_GC_CHILDOPTS)), memOnheap, memOffheap));
    commandList.addAll(getWorkerProfilerChildOpts(memOnheap, memOffheap));
    commandList.add("-Djava.library.path=" + jlp);
    commandList.add("-Dstorm.conf.file=" + topoConfFile);
    commandList.add("-Dstorm.options=" + stormOptions);
    commandList.add("-Djava.io.tmpdir=" + workerTmpDir);
    commandList.addAll(classPathParams);
    commandList.add(getWorkerMain(topoVersion));
    commandList.add(topologyId);
    String supervisorId = this.supervisorId;
    if (numaId != null) {
        supervisorId += ServerConstants.NUMA_ID_SEPARATOR + numaId;
    }
    commandList.add(supervisorId);
    // unknown version should be treated as "current version", which supports RPC heartbeat
    if ((topoVersion.getMajor() == -1 && topoVersion.getMinor() == -1) || topoVersion.compareTo(MIN_VERSION_SUPPORT_RPC_HEARTBEAT) >= 0) {
        commandList.add(String.valueOf(supervisorPort));
    }
    commandList.add(String.valueOf(port));
    commandList.add(workerId);
    return commandList;
}
Also used : ArrayList(java.util.ArrayList) SimpleVersion(org.apache.storm.utils.SimpleVersion)

Example 4 with SimpleVersion

use of org.apache.storm.utils.SimpleVersion in project storm by apache.

the class BasicContainer method frameworkClasspath.

protected List<String> frameworkClasspath(SimpleVersion topoVersion) {
    File stormWorkerLibDir = new File(stormHome, "lib-worker");
    String topoConfDir = System.getenv("STORM_CONF_DIR") != null ? System.getenv("STORM_CONF_DIR") : new File(stormHome, "conf").getAbsolutePath();
    File stormExtlibDir = new File(stormHome, "extlib");
    String extcp = System.getenv("STORM_EXT_CLASSPATH");
    List<String> pathElements = new LinkedList<>();
    pathElements.add(getWildcardDir(stormWorkerLibDir));
    pathElements.add(getWildcardDir(stormExtlibDir));
    pathElements.add(extcp);
    pathElements.add(topoConfDir);
    NavigableMap<SimpleVersion, List<String>> classpaths = Utils.getConfiguredClasspathVersions(conf, pathElements);
    return Utils.getCompatibleVersion(classpaths, topoVersion, "classpath", pathElements);
}
Also used : SimpleVersion(org.apache.storm.utils.SimpleVersion) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) File(java.io.File) LinkedList(java.util.LinkedList)

Aggregations

SimpleVersion (org.apache.storm.utils.SimpleVersion)4 ArrayList (java.util.ArrayList)2 File (java.io.File)1 IOException (java.io.IOException)1 InterruptedIOException (java.io.InterruptedIOException)1 BindException (java.net.BindException)1 Principal (java.security.Principal)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1 List (java.util.List)1 Map (java.util.Map)1 NavigableMap (java.util.NavigableMap)1 IStormClusterState (org.apache.storm.cluster.IStormClusterState)1 AlreadyAliveException (org.apache.storm.generated.AlreadyAliveException)1 AuthorizationException (org.apache.storm.generated.AuthorizationException)1 Credentials (org.apache.storm.generated.Credentials)1 IllegalStateException (org.apache.storm.generated.IllegalStateException)1 InvalidTopologyException (org.apache.storm.generated.InvalidTopologyException)1 KeyAlreadyExistsException (org.apache.storm.generated.KeyAlreadyExistsException)1