Search in sources :

Example 1 with WrappedInvalidTopologyException

use of org.apache.storm.utils.WrappedInvalidTopologyException in project storm by apache.

the class OciUtils method adjustImageConfigForTopo.

/**
 * Adjust the image config for the topology.
 * If OCI container is not supported, remove the oci image setting from the topoConf;
 * otherwise, set it to the default image if it's null.
 * @param conf the daemon conf
 * @param topoConf the topology conf
 * @param topoId the topology Id
 * @throws InvalidTopologyException if image config is invalid
 */
public static void adjustImageConfigForTopo(Map<String, Object> conf, Map<String, Object> topoConf, String topoId) throws InvalidTopologyException {
    // don't need sanity check here as we assume it's already done during daemon startup
    List<String> allowedImages = getAllowedImages(conf, false);
    String topoImage = (String) topoConf.get(Config.TOPOLOGY_OCI_IMAGE);
    if (allowedImages.isEmpty()) {
        if (topoImage != null) {
            LOG.warn("{} is not configured; this indicates OCI container is not supported; " + "{} config for topology {} will be removed", DaemonConfig.STORM_OCI_ALLOWED_IMAGES, Config.TOPOLOGY_OCI_IMAGE, topoId);
            topoConf.remove(Config.TOPOLOGY_OCI_IMAGE);
        }
    } else {
        if (topoImage == null) {
            // we assume the default image is already validated during daemon startup
            String defaultImage = (String) conf.get(DaemonConfig.STORM_OCI_IMAGE);
            topoImage = defaultImage;
            topoConf.put(Config.TOPOLOGY_OCI_IMAGE, topoImage);
            LOG.info("{} is not set for topology {}; set it to the default image {} configured in {}", Config.TOPOLOGY_OCI_IMAGE, topoId, defaultImage, DaemonConfig.STORM_OCI_IMAGE);
        } else {
            try {
                validateImage(allowedImages, topoImage, Config.TOPOLOGY_OCI_IMAGE);
            } catch (IllegalArgumentException e) {
                throw new WrappedInvalidTopologyException(e.getMessage());
            }
        }
    }
}
Also used : WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException)

Example 2 with WrappedInvalidTopologyException

use of org.apache.storm.utils.WrappedInvalidTopologyException in project storm by apache.

the class Nimbus method submitTopologyWithOpts.

@Override
public void submitTopologyWithOpts(String topoName, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException, TException {
    try {
        submitTopologyWithOptsCalls.mark();
        assertIsLeader();
        assert (options != null);
        validateTopologyName(topoName);
        checkAuthorization(topoName, null, "submitTopology");
        assertTopoActive(topoName, false);
        @SuppressWarnings("unchecked") Map<String, Object> topoConf = (Map<String, Object>) JSONValue.parse(jsonConf);
        try {
            ConfigValidation.validateTopoConf(topoConf);
        } catch (IllegalArgumentException ex) {
            throw new WrappedInvalidTopologyException(ex.getMessage());
        }
        validator.validate(topoName, topoConf, topology);
        if ((boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false)) {
            @SuppressWarnings("unchecked") Map<String, Object> blobMap = (Map<String, Object>) topoConf.get(Config.TOPOLOGY_BLOBSTORE_MAP);
            if (blobMap != null && !blobMap.isEmpty()) {
                throw new WrappedInvalidTopologyException("symlinks are disabled so blobs are not supported but " + Config.TOPOLOGY_BLOBSTORE_MAP + " = " + blobMap);
            }
        }
        ServerUtils.validateTopologyWorkerMaxHeapSizeConfigs(topoConf, topology, ObjectReader.getDouble(conf.get(Config.TOPOLOGY_WORKER_MAX_HEAP_SIZE_MB)));
        Utils.validateTopologyBlobStoreMap(topoConf, blobStore);
        long uniqueNum = submittedCount.incrementAndGet();
        String topoId = topoName + "-" + uniqueNum + "-" + Time.currentTimeSecs();
        Map<String, String> creds = null;
        if (options.is_set_creds()) {
            creds = options.get_creds().get_creds();
        }
        topoConf.put(Config.STORM_ID, topoId);
        topoConf.put(Config.TOPOLOGY_NAME, topoName);
        topoConf = normalizeConf(conf, topoConf, topology);
        OciUtils.adjustImageConfigForTopo(conf, topoConf, topoId);
        ReqContext req = ReqContext.context();
        Principal principal = req.principal();
        String submitterPrincipal = principal == null ? null : principal.toString();
        Set<String> topoAcl = new HashSet<>(ObjectReader.getStrings(topoConf.get(Config.TOPOLOGY_USERS)));
        topoAcl.add(submitterPrincipal);
        String submitterUser = principalToLocal.toLocal(principal);
        topoAcl.add(submitterUser);
        String topologyPrincipal = Utils.OR(submitterPrincipal, "");
        topoConf.put(Config.TOPOLOGY_SUBMITTER_PRINCIPAL, topologyPrincipal);
        String systemUser = System.getProperty("user.name");
        String topologyOwner = Utils.OR(submitterUser, systemUser);
        // Don't let the user set who we launch as
        topoConf.put(Config.TOPOLOGY_SUBMITTER_USER, topologyOwner);
        topoConf.put(Config.TOPOLOGY_USERS, new ArrayList<>(topoAcl));
        topoConf.put(Config.STORM_ZOOKEEPER_SUPERACL, conf.get(Config.STORM_ZOOKEEPER_SUPERACL));
        if (!Utils.isZkAuthenticationConfiguredStormServer(conf)) {
            topoConf.remove(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME);
            topoConf.remove(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD);
        }
        if (!(Boolean) conf.getOrDefault(DaemonConfig.STORM_TOPOLOGY_CLASSPATH_BEGINNING_ENABLED, false)) {
            topoConf.remove(Config.TOPOLOGY_CLASSPATH_BEGINNING);
        }
        String topoVersionString = topology.get_storm_version();
        if (topoVersionString == null) {
            topoVersionString = (String) conf.getOrDefault(Config.SUPERVISOR_WORKER_DEFAULT_VERSION, VersionInfo.getVersion());
        }
        // Check if we can run a topology with that version of storm.
        SimpleVersion topoVersion = new SimpleVersion(topoVersionString);
        List<String> cp = Utils.getCompatibleVersion(supervisorClasspaths, topoVersion, "classpath", null);
        if (cp == null) {
            throw new WrappedInvalidTopologyException("Topology submitted with storm version " + topoVersionString + " but could not find a configured compatible version to use " + supervisorClasspaths.keySet());
        }
        Map<String, Object> otherConf = Utils.getConfigFromClasspath(cp, conf);
        Map<String, Object> totalConfToSave = Utils.merge(otherConf, topoConf);
        Map<String, Object> totalConf = Utils.merge(conf, totalConfToSave);
        // When reading the conf in nimbus we want to fall back to our own settings
        // if the other config does not have it set.
        topology = normalizeTopology(totalConf, topology);
        // we might need to set the number of acker executors and eventlogger executors to be the estimated number of workers.
        if (ServerUtils.isRas(conf)) {
            int estimatedNumWorker = ServerUtils.getEstimatedWorkerCountForRasTopo(totalConf, topology);
            setUpAckerExecutorConfigs(topoName, totalConfToSave, totalConf, estimatedNumWorker);
            ServerUtils.validateTopologyAckerBundleResource(totalConfToSave, topology, topoName);
            int numEventLoggerExecs = ObjectReader.getInt(totalConf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS), estimatedNumWorker);
            totalConfToSave.put(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS, numEventLoggerExecs);
            LOG.debug("Config {} set to: {} for topology: {}", Config.TOPOLOGY_EVENTLOGGER_EXECUTORS, numEventLoggerExecs, topoName);
        }
        // Remove any configs that are specific to a host that might mess with the running topology.
        // Don't override the host name, or everything looks like it is on nimbus
        totalConfToSave.remove(Config.STORM_LOCAL_HOSTNAME);
        IStormClusterState state = stormClusterState;
        if (creds == null && workerTokenManager != null) {
            // Make sure we can store the worker tokens even if no creds are provided.
            creds = new HashMap<>();
        }
        if (creds != null) {
            Map<String, Object> finalConf = Collections.unmodifiableMap(topoConf);
            for (INimbusCredentialPlugin autocred : nimbusAutocredPlugins) {
                autocred.populateCredentials(creds, finalConf);
            }
            upsertWorkerTokensInCreds(creds, topologyPrincipal, topoId);
        }
        if (ObjectReader.getBoolean(conf.get(Config.SUPERVISOR_RUN_WORKER_AS_USER), false) && (submitterUser == null || submitterUser.isEmpty())) {
            throw new WrappedAuthorizationException("Could not determine the user to run this topology as.");
        }
        // this validates the structure of the topology
        StormCommon.systemTopology(totalConf, topology);
        validateTopologySize(topoConf, conf, topology);
        if (Utils.isZkAuthenticationConfiguredStormServer(conf) && !Utils.isZkAuthenticationConfiguredTopology(topoConf)) {
            throw new IllegalArgumentException("The cluster is configured for zookeeper authentication, but no payload was provided.");
        }
        LOG.info("Received topology submission for {} (storm-{} JDK-{}) with conf {}", topoName, topoVersionString, topology.get_jdk_version(), ConfigUtils.maskPasswords(topoConf));
        // cleanup thread killing topology in b/w assignment and starting the topology
        synchronized (submitLock) {
            assertTopoActive(topoName, false);
            // cred-update-lock is not needed here because creds are being added for the first time.
            if (creds != null) {
                state.setCredentials(topoId, new Credentials(creds), topoConf);
            }
            LOG.info("uploadedJar {} for {}", uploadedJarLocation, topoName);
            setupStormCode(conf, topoId, uploadedJarLocation, totalConfToSave, topology);
            waitForDesiredCodeReplication(totalConf, topoId);
            state.setupHeatbeats(topoId, topoConf);
            state.setupErrors(topoId, topoConf);
            if (ObjectReader.getBoolean(totalConf.get(Config.TOPOLOGY_BACKPRESSURE_ENABLE), false)) {
                state.setupBackpressure(topoId, topoConf);
            }
            notifyTopologyActionListener(topoName, "submitTopology");
            TopologyStatus status = null;
            switch(options.get_initial_status()) {
                case INACTIVE:
                    status = TopologyStatus.INACTIVE;
                    break;
                case ACTIVE:
                    status = TopologyStatus.ACTIVE;
                    break;
                default:
                    throw new IllegalArgumentException("Inital Status of " + options.get_initial_status() + " is not allowed.");
            }
            startTopology(topoName, topoId, status, topologyOwner, topologyPrincipal, totalConfToSave, topology);
        }
    } catch (Exception e) {
        LOG.warn("Topology submission exception. (topology name='{}')", topoName, e);
        if (e instanceof TException) {
            throw (TException) e;
        }
        throw new RuntimeException(e);
    }
}
Also used : INimbusCredentialPlugin(org.apache.storm.security.INimbusCredentialPlugin) TException(org.apache.storm.thrift.TException) WrappedAuthorizationException(org.apache.storm.utils.WrappedAuthorizationException) ReqContext(org.apache.storm.security.auth.ReqContext) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) SimpleVersion(org.apache.storm.utils.SimpleVersion) IStormClusterState(org.apache.storm.cluster.IStormClusterState) TopologyStatus(org.apache.storm.generated.TopologyStatus) HashSet(java.util.HashSet) WorkerMetricPoint(org.apache.storm.generated.WorkerMetricPoint) DataPoint(org.apache.storm.metric.api.DataPoint) WrappedAuthorizationException(org.apache.storm.utils.WrappedAuthorizationException) IOException(java.io.IOException) IllegalStateException(org.apache.storm.generated.IllegalStateException) AlreadyAliveException(org.apache.storm.generated.AlreadyAliveException) WrappedNotAliveException(org.apache.storm.utils.WrappedNotAliveException) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) AuthorizationException(org.apache.storm.generated.AuthorizationException) NotAliveException(org.apache.storm.generated.NotAliveException) WrappedAlreadyAliveException(org.apache.storm.utils.WrappedAlreadyAliveException) InterruptedIOException(java.io.InterruptedIOException) KeyAlreadyExistsException(org.apache.storm.generated.KeyAlreadyExistsException) TException(org.apache.storm.thrift.TException) WrappedIllegalStateException(org.apache.storm.utils.WrappedIllegalStateException) KeyNotFoundException(org.apache.storm.generated.KeyNotFoundException) InvalidTopologyException(org.apache.storm.generated.InvalidTopologyException) BindException(java.net.BindException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) RotatingMap(org.apache.storm.utils.RotatingMap) ImmutableMap(org.apache.storm.shade.com.google.common.collect.ImmutableMap) TimeCacheMap(org.apache.storm.utils.TimeCacheMap) HashMap(java.util.HashMap) NimbusPrincipal(org.apache.storm.security.auth.NimbusPrincipal) Principal(java.security.Principal) Credentials(org.apache.storm.generated.Credentials)

Example 3 with WrappedInvalidTopologyException

use of org.apache.storm.utils.WrappedInvalidTopologyException in project storm by apache.

the class StormCommon method validateStructure.

public static void validateStructure(StormTopology topology) throws InvalidTopologyException {
    Map<String, Object> componentMap = allComponents(topology);
    for (Map.Entry<String, Object> entry : componentMap.entrySet()) {
        String componentId = entry.getKey();
        ComponentCommon common = getComponentCommon(entry.getValue());
        Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
        for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
            String sourceStreamId = input.getKey().get_streamId();
            String sourceComponentId = input.getKey().get_componentId();
            if (!componentMap.keySet().contains(sourceComponentId)) {
                throw new WrappedInvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent component [" + sourceComponentId + "]");
            }
            ComponentCommon sourceComponent = getComponentCommon(componentMap.get(sourceComponentId));
            if (!sourceComponent.get_streams().containsKey(sourceStreamId)) {
                throw new WrappedInvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent stream: " + "[" + sourceStreamId + "] of component [" + sourceComponentId + "]");
            }
            Grouping grouping = input.getValue();
            if (Thrift.groupingType(grouping) == Grouping._Fields.FIELDS) {
                List<String> fields = new ArrayList<>(grouping.get_fields());
                Map<String, StreamInfo> streams = sourceComponent.get_streams();
                Set<String> sourceOutputFields = getStreamOutputFields(streams);
                fields.removeAll(sourceOutputFields);
                if (fields.size() != 0) {
                    throw new WrappedInvalidTopologyException("Component: [" + componentId + "] subscribes from stream: [" + sourceStreamId + "] of component " + "[" + sourceComponentId + "] + with non-existent fields: " + fields);
                }
            }
        }
    }
}
Also used : ComponentCommon(org.apache.storm.generated.ComponentCommon) ArrayList(java.util.ArrayList) Grouping(org.apache.storm.generated.Grouping) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) GlobalStreamId(org.apache.storm.generated.GlobalStreamId) StreamInfo(org.apache.storm.generated.StreamInfo) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 4 with WrappedInvalidTopologyException

use of org.apache.storm.utils.WrappedInvalidTopologyException in project storm by apache.

the class StormCommon method validateBasic.

@SuppressWarnings("unchecked")
public static void validateBasic(StormTopology topology) throws InvalidTopologyException {
    validateIds(topology);
    for (StormTopology._Fields field : Thrift.getSpoutFields()) {
        Map<String, Object> spoutComponents = (Map<String, Object>) topology.getFieldValue(field);
        if (spoutComponents != null) {
            for (Object obj : spoutComponents.values()) {
                ComponentCommon common = getComponentCommon(obj);
                if (!isEmptyInputs(common)) {
                    throw new WrappedInvalidTopologyException("May not declare inputs for a spout");
                }
            }
        }
    }
    Map<String, Object> componentMap = allComponents(topology);
    for (Object componentObj : componentMap.values()) {
        Map<String, Object> conf = componentConf(componentObj);
        ComponentCommon common = getComponentCommon(componentObj);
        int parallelismHintNum = Thrift.getParallelismHint(common);
        Integer taskNum = ObjectReader.getInt(conf.get(Config.TOPOLOGY_TASKS), 0);
        if (taskNum > 0 && parallelismHintNum <= 0) {
            throw new WrappedInvalidTopologyException("Number of executors must be greater than 0 when number of tasks is greater than 0");
        }
    }
}
Also used : ComponentCommon(org.apache.storm.generated.ComponentCommon) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException) StormTopology(org.apache.storm.generated.StormTopology) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 5 with WrappedInvalidTopologyException

use of org.apache.storm.utils.WrappedInvalidTopologyException in project storm by apache.

the class StormCommon method validateIds.

private static Set<String> validateIds(Map<String, ?> componentMap) throws InvalidTopologyException {
    Set<String> keys = componentMap.keySet();
    for (String id : keys) {
        if (Utils.isSystemId(id)) {
            throw new WrappedInvalidTopologyException(id + " is not a valid component id.");
        }
    }
    for (Object componentObj : componentMap.values()) {
        ComponentCommon common = getComponentCommon(componentObj);
        Set<String> streamIds = common.get_streams().keySet();
        for (String id : streamIds) {
            if (Utils.isSystemId(id)) {
                throw new WrappedInvalidTopologyException(id + " is not a valid stream id.");
            }
        }
    }
    return keys;
}
Also used : ComponentCommon(org.apache.storm.generated.ComponentCommon) WrappedInvalidTopologyException(org.apache.storm.utils.WrappedInvalidTopologyException)

Aggregations

WrappedInvalidTopologyException (org.apache.storm.utils.WrappedInvalidTopologyException)9 HashMap (java.util.HashMap)5 Map (java.util.Map)4 AlreadyAliveException (org.apache.storm.generated.AlreadyAliveException)3 AuthorizationException (org.apache.storm.generated.AuthorizationException)3 ComponentCommon (org.apache.storm.generated.ComponentCommon)3 InvalidTopologyException (org.apache.storm.generated.InvalidTopologyException)3 TException (org.apache.storm.thrift.TException)3 IOException (java.io.IOException)2 InterruptedIOException (java.io.InterruptedIOException)2 BindException (java.net.BindException)2 NavigableMap (java.util.NavigableMap)2 TreeMap (java.util.TreeMap)2 Credentials (org.apache.storm.generated.Credentials)2 IllegalStateException (org.apache.storm.generated.IllegalStateException)2 KeyAlreadyExistsException (org.apache.storm.generated.KeyAlreadyExistsException)2 KeyNotFoundException (org.apache.storm.generated.KeyNotFoundException)2 NotAliveException (org.apache.storm.generated.NotAliveException)2 StormTopology (org.apache.storm.generated.StormTopology)2 WorkerMetricPoint (org.apache.storm.generated.WorkerMetricPoint)2