use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.
the class PhysicalPlan method removeLogicalStream.
/**
* Remove physical representation of given stream. Operators that are affected
* in the execution layer will be added to the set. This method does not
* automatically remove operators from the plan.
*
* @param sm
*/
public void removeLogicalStream(StreamMeta sm) {
// remove incoming connections for logical stream
for (InputPortMeta ipm : sm.getSinks()) {
OperatorMeta om = ipm.getOperatorMeta();
PMapping m = this.logicalToPTOperator.get(om);
if (m == null) {
throw new AssertionError("Unknown operator " + om);
}
for (PTOperator oper : m.partitions) {
List<PTInput> inputsCopy = Lists.newArrayList(oper.inputs);
for (PTInput input : oper.inputs) {
if (input.logicalStream == sm) {
input.source.sinks.remove(input);
inputsCopy.remove(input);
undeployOpers.add(oper);
deployOpers.add(oper);
}
}
oper.inputs = inputsCopy;
}
}
// remove outgoing connections for logical stream
PMapping m = this.logicalToPTOperator.get(sm.getSource().getOperatorMeta());
for (PTOperator oper : m.partitions) {
List<PTOutput> outputsCopy = Lists.newArrayList(oper.outputs);
for (PTOutput out : oper.outputs) {
if (out.logicalStream == sm) {
for (PTInput input : out.sinks) {
PTOperator downstreamOper = input.source.source;
downstreamOper.inputs.remove(input);
Set<PTOperator> deps = this.getDependents(Collections.singletonList(downstreamOper));
undeployOpers.addAll(deps);
deployOpers.addAll(deps);
}
outputsCopy.remove(out);
undeployOpers.add(oper);
deployOpers.add(oper);
}
}
oper.outputs = outputsCopy;
}
}
use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.
the class StreamCodecWrapperForPersistance method shouldCaptureEvent.
public boolean shouldCaptureEvent(T o) {
for (Entry<InputPortMeta, Collection<PartitionKeys>> entry : inputPortToPartitionMap.entrySet()) {
StreamCodec<Object> codec = codecsToMerge.get(entry.getKey());
Collection<PartitionKeys> partitionKeysList = entry.getValue();
for (PartitionKeys keys : partitionKeysList) {
if (keys.partitions != null && keys.partitions.contains(keys.mask & codec.getPartition(o))) {
// So send the event to persist operator
return true;
}
}
}
return false;
}
use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.
the class StreamingContainerManager method deploy.
@Override
public void deploy(Set<PTContainer> releaseContainers, Collection<PTOperator> undeploy, Set<PTContainer> startContainers, Collection<PTOperator> deploy) {
try {
this.deployChangeInProgress.set(true);
Map<PTContainer, List<PTOperator>> undeployGroups = groupByContainer(undeploy);
// order does not matter, remove all operators in each container in one sweep
for (Map.Entry<PTContainer, List<PTOperator>> e : undeployGroups.entrySet()) {
// container may already be in failed or pending deploy state, notified by RM or timed out
PTContainer c = e.getKey();
if (!startContainers.contains(c) && !releaseContainers.contains(c) && c.getState() != PTContainer.State.KILLED) {
LOG.debug("scheduling undeploy {} {}", e.getKey().getExternalId(), e.getValue());
for (PTOperator oper : e.getValue()) {
oper.setState(PTOperator.State.PENDING_UNDEPLOY);
}
}
}
// start new containers
for (PTContainer c : startContainers) {
requestContainer(c);
}
// (re)deploy affected operators
// can happen in parallel after buffer server for recovered publishers is reset
Map<PTContainer, List<PTOperator>> deployGroups = groupByContainer(deploy);
for (Map.Entry<PTContainer, List<PTOperator>> e : deployGroups.entrySet()) {
if (!startContainers.contains(e.getKey())) {
// to reset publishers, clean buffer server past checkpoint so subscribers don't read stale data (including end of stream)
for (PTOperator operator : e.getValue()) {
for (PTOperator.PTOutput out : operator.getOutputs()) {
if (!out.isDownStreamInline()) {
for (InputPortMeta ipm : out.logicalStream.getSinks()) {
StreamCodec<?> streamCodec = ipm.getStreamCodec();
Integer codecId = plan.getStreamCodecIdentifier(streamCodec);
// following needs to match the concat logic in StreamingContainer
String sourceIdentifier = Integer.toString(operator.getId()).concat(Component.CONCAT_SEPARATOR).concat(out.portName).concat(Component.CONCAT_SEPARATOR).concat(codecId.toString());
if (operator.getContainer().getState() == PTContainer.State.ACTIVE) {
// TODO: unit test - find way to mock this when testing rest of logic
if (operator.getContainer().bufferServerAddress.getPort() != 0) {
BufferServerController bsc = getBufferServerClient(operator);
// ensures new subscriber starting to read from checkpoint will wait until publisher redeploy cycle is complete
try {
bsc.reset(null, sourceIdentifier, 0);
} catch (Exception ex) {
LOG.error("Failed to reset buffer server {} {}", sourceIdentifier, ex);
}
}
}
}
}
}
}
}
// add to operators that we expect to deploy
LOG.debug("scheduling deploy {} {}", e.getKey().getExternalId(), e.getValue());
for (PTOperator oper : e.getValue()) {
// operator will be deployed after it has been undeployed, if still referenced by the container
if (oper.getState() != PTOperator.State.PENDING_UNDEPLOY) {
oper.setState(PTOperator.State.PENDING_DEPLOY);
}
}
}
// stop containers that are no longer used
for (PTContainer c : releaseContainers) {
if (c.getExternalId() == null) {
continue;
}
StreamingContainerAgent sca = containers.get(c.getExternalId());
if (sca != null) {
LOG.debug("Container marked for shutdown: {}", c);
// container already removed from plan
// TODO: monitor soft shutdown
sca.requestShutDown(ShutdownType.ABORT);
}
}
} finally {
this.deployChangeCnt++;
this.deployChangeInProgress.set(false);
}
}
use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.
the class LogicalPlanSerializer method convertToProperties.
public static PropertiesConfiguration convertToProperties(LogicalPlan dag) {
PropertiesConfiguration props = new PropertiesConfiguration();
Collection<OperatorMeta> allOperators = dag.getAllOperators();
for (OperatorMeta operatorMeta : allOperators) {
String operatorKey = LogicalPlanConfiguration.OPERATOR_PREFIX + operatorMeta.getName();
Operator operator = operatorMeta.getOperator();
props.setProperty(operatorKey + "." + LogicalPlanConfiguration.OPERATOR_CLASSNAME, operator.getClass().getName());
BeanMap operatorProperties = LogicalPlanConfiguration.getObjectProperties(operator);
@SuppressWarnings("rawtypes") Iterator entryIterator = operatorProperties.entryIterator();
while (entryIterator.hasNext()) {
try {
@SuppressWarnings("unchecked") Map.Entry<String, Object> entry = (Map.Entry<String, Object>) entryIterator.next();
if (!entry.getKey().equals("class") && !entry.getKey().equals("name") && entry.getValue() != null) {
props.setProperty(operatorKey + "." + entry.getKey(), entry.getValue());
}
} catch (Exception ex) {
LOG.warn("Error trying to get a property of operator {}", operatorMeta.getName(), ex);
}
}
}
Collection<StreamMeta> allStreams = dag.getAllStreams();
for (StreamMeta streamMeta : allStreams) {
String streamKey = LogicalPlanConfiguration.STREAM_PREFIX + streamMeta.getName();
OutputPortMeta source = streamMeta.getSource();
Collection<InputPortMeta> sinks = streamMeta.getSinks();
props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_SOURCE, source.getOperatorMeta().getName() + "." + source.getPortName());
String sinksValue = "";
for (InputPortMeta sink : sinks) {
if (!sinksValue.isEmpty()) {
sinksValue += ",";
}
sinksValue += sink.getOperatorMeta().getName() + "." + sink.getPortName();
}
props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_SINKS, sinksValue);
if (streamMeta.getLocality() != null) {
props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_LOCALITY, streamMeta.getLocality().name());
}
}
return props;
}
use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.
the class LogicalPlanSerializer method convertToMap.
/**
*
* @param dag
* @return
*/
public static Map<String, Object> convertToMap(LogicalPlan dag, boolean includeModules) {
HashMap<String, Object> result = new HashMap<>();
ArrayList<Object> operatorArray = new ArrayList<>();
ArrayList<Object> streamMap = new ArrayList<>();
//result.put("applicationName", appConfig.getName());
result.put("operators", operatorArray);
result.put("streams", streamMap);
//LogicalPlan dag = StramAppLauncher.prepareDAG(appConfig, StreamingApplication.LAUNCHMODE_YARN);
//
// should we put the DAGContext info here?
Map<String, Object> dagAttrs = new HashMap<>();
for (Map.Entry<Attribute<Object>, Object> e : Attribute.AttributeMap.AttributeInitializer.getAllAttributes(dag, Context.DAGContext.class).entrySet()) {
dagAttrs.put(e.getKey().getSimpleName(), e.getValue());
}
result.put("attributes", dagAttrs);
Collection<OperatorMeta> allOperators = dag.getAllOperators();
ObjectMapper propertyObjectMapper = new ObjectMapper();
propertyObjectMapper.configure(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS, true);
propertyObjectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
StdTypeResolverBuilder typer = new PropertyTypeResolverBuilder();
typer.init(JsonTypeInfo.Id.CLASS, null);
typer = typer.inclusion(JsonTypeInfo.As.PROPERTY);
propertyObjectMapper.setDefaultTyping(typer);
for (OperatorMeta operatorMeta : allOperators) {
HashMap<String, Object> operatorDetailMap = new HashMap<>();
ArrayList<Map<String, Object>> portList = new ArrayList<>();
Map<String, Object> attributeMap = new HashMap<>();
String operatorName = operatorMeta.getName();
operatorArray.add(operatorDetailMap);
operatorDetailMap.put("name", operatorName);
operatorDetailMap.put("ports", portList);
operatorDetailMap.put("class", operatorMeta.getOperator().getClass().getName());
operatorDetailMap.put("attributes", attributeMap);
Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(operatorMeta, Context.OperatorContext.class);
for (Map.Entry<Attribute<Object>, Object> entry : rawAttributes.entrySet()) {
attributeMap.put(entry.getKey().getSimpleName(), entry.getValue());
}
ObjectMapperString str;
try {
str = new ObjectMapperString(propertyObjectMapper.writeValueAsString(operatorMeta.getOperator()));
} catch (Throwable ex) {
LOG.error("Got exception when trying to get properties for operator {}", operatorMeta.getName(), ex);
str = null;
}
operatorDetailMap.put("properties", str);
Operators.PortMappingDescriptor pmd = new Operators.PortMappingDescriptor();
Operators.describe(operatorMeta.getOperator(), pmd);
for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : pmd.inputPorts.entrySet()) {
HashMap<String, Object> portDetailMap = new HashMap<>();
HashMap<String, Object> portAttributeMap = new HashMap<>();
InputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
String portName = portMeta.getPortName();
portDetailMap.put("name", portName);
portDetailMap.put("type", "input");
portDetailMap.put("attributes", portAttributeMap);
rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
}
portList.add(portDetailMap);
}
for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : pmd.outputPorts.entrySet()) {
HashMap<String, Object> portDetailMap = new HashMap<>();
HashMap<String, Object> portAttributeMap = new HashMap<>();
OutputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
String portName = portMeta.getPortName();
portDetailMap.put("name", portName);
portDetailMap.put("type", "output");
portDetailMap.put("attributes", portAttributeMap);
rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
}
portList.add(portDetailMap);
}
}
Collection<StreamMeta> allStreams = dag.getAllStreams();
for (StreamMeta streamMeta : allStreams) {
HashMap<String, Object> streamDetailMap = new HashMap<>();
String streamName = streamMeta.getName();
streamMap.add(streamDetailMap);
String sourcePortName = streamMeta.getSource().getPortName();
OperatorMeta operatorMeta = streamMeta.getSource().getOperatorMeta();
HashMap<String, Object> sourcePortDetailMap = new HashMap<>();
sourcePortDetailMap.put("operatorName", operatorMeta.getName());
sourcePortDetailMap.put("portName", sourcePortName);
streamDetailMap.put("name", streamName);
streamDetailMap.put("source", sourcePortDetailMap);
Collection<InputPortMeta> sinks = streamMeta.getSinks();
ArrayList<HashMap<String, Object>> sinkPortList = new ArrayList<>();
for (InputPortMeta sinkPort : sinks) {
HashMap<String, Object> sinkPortDetailMap = new HashMap<>();
sinkPortDetailMap.put("operatorName", sinkPort.getOperatorMeta().getName());
sinkPortDetailMap.put("portName", sinkPort.getPortName());
sinkPortList.add(sinkPortDetailMap);
}
streamDetailMap.put("sinks", sinkPortList);
if (streamMeta.getLocality() != null) {
streamDetailMap.put("locality", streamMeta.getLocality().name());
}
}
if (includeModules) {
ArrayList<Map<String, Object>> modulesArray = new ArrayList<>();
result.put("modules", modulesArray);
for (LogicalPlan.ModuleMeta meta : dag.getAllModules()) {
modulesArray.add(getLogicalModuleDetails(dag, meta));
}
}
return result;
}
Aggregations