use of org.apache.storm.generated.ComponentCommon in project storm by apache.
the class StormCommon method validateStructure.
public static void validateStructure(StormTopology topology) throws InvalidTopologyException {
Map<String, Object> componentMap = allComponents(topology);
for (Map.Entry<String, Object> entry : componentMap.entrySet()) {
String componentId = entry.getKey();
ComponentCommon common = getComponentCommon(entry.getValue());
Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
String sourceStreamId = input.getKey().get_streamId();
String sourceComponentId = input.getKey().get_componentId();
if (!componentMap.keySet().contains(sourceComponentId)) {
throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent component [" + sourceComponentId + "]");
}
ComponentCommon sourceComponent = getComponentCommon(componentMap.get(sourceComponentId));
if (!sourceComponent.get_streams().containsKey(sourceStreamId)) {
throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from non-existent stream: " + "[" + sourceStreamId + "] of component [" + sourceComponentId + "]");
}
Grouping grouping = input.getValue();
if (Thrift.groupingType(grouping) == Grouping._Fields.FIELDS) {
List<String> fields = new ArrayList<>(grouping.get_fields());
Map<String, StreamInfo> streams = sourceComponent.get_streams();
Set<String> sourceOutputFields = getStreamOutputFields(streams);
fields.removeAll(sourceOutputFields);
if (fields.size() != 0) {
throw new InvalidTopologyException("Component: [" + componentId + "] subscribes from stream: [" + sourceStreamId + "] of component " + "[" + sourceComponentId + "] + with non-existent fields: " + fields);
}
}
}
}
}
use of org.apache.storm.generated.ComponentCommon in project storm by apache.
the class StormCommon method componentConf.
@SuppressWarnings("unchecked")
public static Map<String, Object> componentConf(Object component) {
try {
Map<String, Object> conf = new HashMap<>();
ComponentCommon common = getComponentCommon(component);
String jconf = common.get_json_conf();
if (jconf != null) {
conf.putAll((Map<String, Object>) JSONValue.parseWithException(jconf));
}
return conf;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.storm.generated.ComponentCommon in project storm by apache.
the class StormCommon method validateBasic.
@SuppressWarnings("unchecked")
public static void validateBasic(StormTopology topology) throws InvalidTopologyException {
validateIds(topology);
for (StormTopology._Fields field : Thrift.getSpoutFields()) {
Map<String, Object> spoutComponents = (Map<String, Object>) topology.getFieldValue(field);
if (spoutComponents != null) {
for (Object obj : spoutComponents.values()) {
ComponentCommon common = getComponentCommon(obj);
if (!isEmptyInputs(common)) {
throw new InvalidTopologyException("May not declare inputs for a spout");
}
}
}
}
Map<String, Object> componentMap = allComponents(topology);
for (Object componentObj : componentMap.values()) {
Map conf = componentConf(componentObj);
ComponentCommon common = getComponentCommon(componentObj);
int parallelismHintNum = Thrift.getParallelismHint(common);
Integer taskNum = Utils.getInt(conf.get(Config.TOPOLOGY_TASKS), 0);
if (taskNum > 0 && parallelismHintNum <= 0) {
throw new InvalidTopologyException("Number of executors must be greater than 0 when number of tasks is greater than 0");
}
}
}
use of org.apache.storm.generated.ComponentCommon in project storm by apache.
the class TopologySpoutLag method getLagResultForKafka.
private static Map<String, Object> getLagResultForKafka(String spoutId, SpoutSpec spoutSpec, Map topologyConf, boolean old) throws IOException {
ComponentCommon componentCommon = spoutSpec.get_common();
String json = componentCommon.get_json_conf();
Map<String, Object> result = null;
String errorMsg = "Offset lags for kafka not supported for older versions. Please update kafka spout to latest version.";
if (json != null && !json.isEmpty()) {
List<String> commands = new ArrayList<>();
String stormHomeDir = System.getenv("STORM_BASE_DIR");
if (stormHomeDir != null && !stormHomeDir.endsWith("/")) {
stormHomeDir += File.separator;
}
commands.add(stormHomeDir != null ? stormHomeDir + "bin" + File.separator + "storm-kafka-monitor" : "storm-kafka-monitor");
Map<String, Object> jsonMap = null;
try {
jsonMap = (Map<String, Object>) JSONValue.parseWithException(json);
} catch (ParseException e) {
throw new IOException(e);
}
commands.addAll(old ? getCommandLineOptionsForOldKafkaSpout(jsonMap, topologyConf) : getCommandLineOptionsForNewKafkaSpout(jsonMap));
logger.debug("Command to run: {}", commands);
// if commands contains one or more null value, spout is compiled with lower version of storm-kafka / storm-kafka-client
if (!commands.contains(null)) {
String resultFromMonitor = ShellUtils.execCommand(commands.toArray(new String[0]));
try {
result = (Map<String, Object>) JSONValue.parseWithException(resultFromMonitor);
} catch (ParseException e) {
logger.debug("JSON parsing failed, assuming message as error message: {}", resultFromMonitor);
// json parsing fail -> error received
errorMsg = resultFromMonitor;
}
}
}
Map<String, Object> kafkaSpoutLagInfo = new HashMap<>();
kafkaSpoutLagInfo.put(SPOUT_ID, spoutId);
kafkaSpoutLagInfo.put(SPOUT_TYPE, "KAFKA");
if (result != null) {
kafkaSpoutLagInfo.put(SPOUT_LAG_RESULT, result);
} else {
kafkaSpoutLagInfo.put(ERROR_INFO, errorMsg);
}
return kafkaSpoutLagInfo;
}
Aggregations