use of co.cask.cdap.api.flow.FlowletConnection in project cdap by caskdata.
the class DeletedProgramHandlerStage method process.
@Override
public void process(ApplicationDeployable appSpec) throws Exception {
List<ProgramSpecification> deletedSpecs = store.getDeletedProgramSpecifications(appSpec.getApplicationId(), appSpec.getSpecification());
// TODO: this should also delete logs and run records (or not?), and do it for all program types [CDAP-2187]
List<String> deletedFlows = Lists.newArrayList();
for (ProgramSpecification spec : deletedSpecs) {
//call the deleted spec
ProgramType type = ProgramTypes.fromSpecification(spec);
final ProgramId programId = appSpec.getApplicationId().program(type, spec.getName());
programTerminator.stop(programId);
// revoke privileges
privilegesManager.revoke(programId);
// drop all queues and stream states of a deleted flow
if (ProgramType.FLOW.equals(type)) {
FlowSpecification flowSpecification = (FlowSpecification) spec;
// Collects stream name to all group ids consuming that stream
final Multimap<String, Long> streamGroups = HashMultimap.create();
for (FlowletConnection connection : flowSpecification.getConnections()) {
if (connection.getSourceType() == FlowletConnection.Type.STREAM) {
long groupId = FlowUtils.generateConsumerGroupId(programId, connection.getTargetName());
streamGroups.put(connection.getSourceName(), groupId);
}
}
// Remove all process states and group states for each stream
final String namespace = String.format("%s.%s", programId.getApplication(), programId.getProgram());
final NamespaceId namespaceId = appSpec.getApplicationId().getParent();
impersonator.doAs(appSpec.getApplicationId(), new Callable<Void>() {
@Override
public Void call() throws Exception {
for (Map.Entry<String, Collection<Long>> entry : streamGroups.asMap().entrySet()) {
streamConsumerFactory.dropAll(namespaceId.stream(entry.getKey()), namespace, entry.getValue());
}
queueAdmin.dropAllForFlow(programId.getParent().flow(programId.getEntityName()));
return null;
}
});
deletedFlows.add(programId.getEntityName());
}
// Remove metadata for the deleted program
metadataStore.removeMetadata(programId);
}
if (!deletedFlows.isEmpty()) {
deleteMetrics(appSpec.getApplicationId(), deletedFlows);
}
emit(appSpec);
}
use of co.cask.cdap.api.flow.FlowletConnection in project cdap by caskdata.
the class SimpleQueueSpecificationGenerator method create.
/**
* Given a {@link FlowSpecification}.
*
* @param input {@link FlowSpecification}
* @return A {@link Table}
*/
@Override
public Table<Node, String, Set<QueueSpecification>> create(FlowSpecification input) {
Table<Node, String, Set<QueueSpecification>> table = HashBasedTable.create();
String flow = input.getName();
Map<String, FlowletDefinition> flowlets = input.getFlowlets();
// Iterate through connections of a flow.
for (FlowletConnection connection : input.getConnections()) {
final String source = connection.getSourceName();
final String target = connection.getTargetName();
String sourceNamespace = connection.getSourceNamespace() == null ? appId.getNamespace() : connection.getSourceNamespace();
Node sourceNode;
Set<QueueSpecification> queueSpec;
if (connection.getSourceType() == FlowletConnection.Type.FLOWLET) {
sourceNode = new Node(connection.getSourceType(), source);
queueSpec = generateQueueSpecification(appId, flow, connection, flowlets.get(target).getInputs(), flowlets.get(source).getOutputs());
} else {
sourceNode = new Node(connection.getSourceType(), sourceNamespace, source);
queueSpec = generateQueueSpecification(appId, flow, connection, flowlets.get(target).getInputs(), ImmutableMap.<String, Set<Schema>>of(connection.getSourceName(), ImmutableSet.of(STREAM_EVENT_SCHEMA)));
}
Set<QueueSpecification> queueSpecifications = table.get(sourceNode, target);
if (queueSpecifications == null) {
queueSpecifications = Sets.newHashSet();
table.put(sourceNode, target, queueSpecifications);
}
queueSpecifications.addAll(queueSpec);
}
return table;
}
use of co.cask.cdap.api.flow.FlowletConnection in project cdap by caskdata.
the class ApplicationRegistrationStage method registerDatasets.
// Register dataset usage, based upon the program specifications.
// Note that worker specifications' datasets are not registered upon app deploy because the useDataset of the
// WorkerConfigurer is deprecated. Workers' access to datasets is aimed to be completely dynamic. Other programs are
// moving in this direction.
// Also, SparkSpecifications are the same in that a Spark program's dataset access is completely dynamic.
private void registerDatasets(ApplicationWithPrograms input) {
ApplicationSpecification appSpec = input.getSpecification();
ApplicationId appId = input.getApplicationId();
NamespaceId namespaceId = appId.getParent();
for (FlowSpecification flow : appSpec.getFlows().values()) {
ProgramId programId = appId.flow(flow.getName());
for (FlowletConnection connection : flow.getConnections()) {
if (connection.getSourceType().equals(FlowletConnection.Type.STREAM)) {
usageRegistry.register(programId, namespaceId.stream(connection.getSourceName()));
}
}
for (FlowletDefinition flowlet : flow.getFlowlets().values()) {
for (String dataset : flowlet.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
}
for (MapReduceSpecification program : appSpec.getMapReduce().values()) {
ProgramId programId = appId.mr(program.getName());
for (String dataset : program.getDataSets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (SparkSpecification sparkSpec : appSpec.getSpark().values()) {
ProgramId programId = appId.spark(sparkSpec.getName());
for (String dataset : sparkSpec.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
for (ServiceSpecification serviceSpecification : appSpec.getServices().values()) {
ProgramId programId = appId.service(serviceSpecification.getName());
for (HttpServiceHandlerSpecification handlerSpecification : serviceSpecification.getHandlers().values()) {
for (String dataset : handlerSpecification.getDatasets()) {
usageRegistry.register(programId, namespaceId.dataset(dataset));
}
}
}
}
Aggregations