use of org.apache.atlas.typesystem.Referenceable in project incubator-atlas by apache.
the class StormAtlasHook method addBolts.
private void addBolts(Map<String, Bolt> bolts, Map<String, Referenceable> nodeEntities) throws IllegalAccessException {
for (Map.Entry<String, Bolt> entry : bolts.entrySet()) {
Referenceable boltInstance = createBoltInstance(entry.getKey(), entry.getValue());
nodeEntities.put(entry.getKey(), boltInstance);
}
}
use of org.apache.atlas.typesystem.Referenceable in project incubator-atlas by apache.
the class StormAtlasHook method addSpouts.
private void addSpouts(Map<String, SpoutSpec> spouts, Map<String, Referenceable> nodeEntities) throws IllegalAccessException {
for (Map.Entry<String, SpoutSpec> entry : spouts.entrySet()) {
final String spoutName = entry.getKey();
Referenceable spoutReferenceable = createSpoutInstance(spoutName, entry.getValue());
nodeEntities.put(spoutName, spoutReferenceable);
}
}
use of org.apache.atlas.typesystem.Referenceable in project incubator-atlas by apache.
the class StormAtlasHook method createDataSet.
private Referenceable createDataSet(String name, String topologyOwner, Serializable instance, Map stormConf, List<Referenceable> dependentEntities) throws IllegalAccessException {
Map<String, String> config = StormTopologyUtil.getFieldValues(instance, true, null);
String clusterName = null;
Referenceable dataSetReferenceable;
// todo: need to redo this with a config driven approach
switch(name) {
case "KafkaSpout":
dataSetReferenceable = new Referenceable(StormDataTypes.KAFKA_TOPIC.getName());
final String topicName = config.get("KafkaSpout._spoutConfig.topic");
dataSetReferenceable.set("topic", topicName);
dataSetReferenceable.set("uri", config.get("KafkaSpout._spoutConfig.hosts.brokerZkStr"));
if (StringUtils.isEmpty(topologyOwner)) {
topologyOwner = ANONYMOUS_OWNER;
}
dataSetReferenceable.set(AtlasClient.OWNER, topologyOwner);
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getKafkaTopicQualifiedName(getClusterName(stormConf), topicName));
dataSetReferenceable.set(AtlasClient.NAME, topicName);
break;
case "HBaseBolt":
dataSetReferenceable = new Referenceable(StormDataTypes.HBASE_TABLE.getName());
final String hbaseTableName = config.get("HBaseBolt.tableName");
dataSetReferenceable.set("uri", stormConf.get("hbase.rootdir"));
dataSetReferenceable.set(AtlasClient.NAME, hbaseTableName);
dataSetReferenceable.set(AtlasClient.OWNER, stormConf.get("storm.kerberos.principal"));
clusterName = extractComponentClusterName(HBaseConfiguration.create(), stormConf);
//TODO - Hbase Namespace is hardcoded to 'default'. need to check how to get this or is it already part of tableName
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, getHbaseTableQualifiedName(clusterName, HBASE_NAMESPACE_DEFAULT, hbaseTableName));
break;
case "HdfsBolt":
dataSetReferenceable = new Referenceable(HiveMetaStoreBridge.HDFS_PATH);
String hdfsUri = config.get("HdfsBolt.rotationActions") == null ? config.get("HdfsBolt.fileNameFormat.path") : config.get("HdfsBolt.rotationActions");
final String hdfsPathStr = config.get("HdfsBolt.fsUrl") + hdfsUri;
dataSetReferenceable.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getClusterName(stormConf));
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, hdfsPathStr);
dataSetReferenceable.set("path", hdfsPathStr);
dataSetReferenceable.set(AtlasClient.OWNER, stormConf.get("hdfs.kerberos.principal"));
final Path hdfsPath = new Path(hdfsPathStr);
dataSetReferenceable.set(AtlasClient.NAME, Path.getPathWithoutSchemeAndAuthority(hdfsPath).toString().toLowerCase());
break;
case "HiveBolt":
// todo: verify if hive table has everything needed to retrieve existing table
Referenceable dbReferenceable = new Referenceable("hive_db");
String databaseName = config.get("HiveBolt.options.databaseName");
dbReferenceable.set(AtlasClient.NAME, databaseName);
dbReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getDBQualifiedName(getClusterName(stormConf), databaseName));
dbReferenceable.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getClusterName(stormConf));
dependentEntities.add(dbReferenceable);
clusterName = extractComponentClusterName(new HiveConf(), stormConf);
final String hiveTableName = config.get("HiveBolt.options.tableName");
dataSetReferenceable = new Referenceable("hive_table");
final String tableQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(clusterName, databaseName, hiveTableName);
dataSetReferenceable.set(AtlasClient.NAME, hiveTableName);
dataSetReferenceable.set(HiveMetaStoreBridge.DB, dbReferenceable);
dataSetReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
break;
default:
//TODO - What should we do for custom data sets. Not sure what name we can set here?
return null;
}
dependentEntities.add(dataSetReferenceable);
return dataSetReferenceable;
}
use of org.apache.atlas.typesystem.Referenceable in project incubator-atlas by apache.
the class StormAtlasHook method createTopologyInstance.
private Referenceable createTopologyInstance(TopologyInfo topologyInfo, Map stormConf) throws Exception {
Referenceable topologyReferenceable = new Referenceable(StormDataTypes.STORM_TOPOLOGY.getName());
topologyReferenceable.set("id", topologyInfo.get_id());
topologyReferenceable.set(AtlasClient.NAME, topologyInfo.get_name());
topologyReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, topologyInfo.get_name());
String owner = topologyInfo.get_owner();
if (StringUtils.isEmpty(owner)) {
owner = ANONYMOUS_OWNER;
}
topologyReferenceable.set(AtlasClient.OWNER, owner);
topologyReferenceable.set("startTime", new Date(System.currentTimeMillis()));
topologyReferenceable.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, getClusterName(stormConf));
return topologyReferenceable;
}
use of org.apache.atlas.typesystem.Referenceable in project incubator-atlas by apache.
the class StormAtlasHook method notify.
/**
* This is the client-side hook that storm fires when a topology is added.
*
* @param topologyInfo topology info
* @param stormConf configuration
* @param stormTopology a storm topology
* @throws IllegalAccessException
*/
@Override
public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology) throws IllegalAccessException {
LOG.info("Collecting metadata for a new storm topology: {}", topologyInfo.get_name());
try {
ArrayList<Referenceable> entities = new ArrayList<>();
Referenceable topologyReferenceable = createTopologyInstance(topologyInfo, stormConf);
List<Referenceable> dependentEntities = addTopologyDataSets(stormTopology, topologyReferenceable, topologyInfo.get_owner(), stormConf);
if (dependentEntities.size() > 0) {
entities.addAll(dependentEntities);
}
// create the graph for the topology
ArrayList<Referenceable> graphNodes = createTopologyGraph(stormTopology, stormTopology.get_spouts(), stormTopology.get_bolts());
// add the connection from topology to the graph
topologyReferenceable.set("nodes", graphNodes);
entities.add(topologyReferenceable);
LOG.debug("notifying entities, size = {}", entities.size());
String user = getUser(topologyInfo.get_owner(), null);
notifyEntities(user, entities);
} catch (Exception e) {
throw new RuntimeException("Atlas hook is unable to process the topology.", e);
}
}
Aggregations