use of org.drools.core.common.InternalWorkingMemory in project drools by kiegroup.
the class ProtobufOutputMarshaller method writeAgenda.
private static void writeAgenda(MarshallerWriteContext context, ProtobufMessages.RuleData.Builder _ksb) throws IOException {
InternalWorkingMemory wm = context.wm;
InternalAgenda agenda = wm.getAgenda();
org.drools.core.marshalling.impl.ProtobufMessages.Agenda.Builder _ab = ProtobufMessages.Agenda.newBuilder();
AgendaGroup[] agendaGroups = agenda.getAgendaGroupsMap().values().toArray(new AgendaGroup[agenda.getAgendaGroupsMap().size()]);
Arrays.sort(agendaGroups, AgendaGroupSorter.instance);
for (AgendaGroup ag : agendaGroups) {
AgendaGroupQueueImpl group = (AgendaGroupQueueImpl) ag;
org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup.Builder _agb = ProtobufMessages.Agenda.AgendaGroup.newBuilder();
_agb.setName(group.getName()).setIsActive(group.isActive()).setIsAutoDeactivate(group.isAutoDeactivate()).setClearedForRecency(group.getClearedForRecency()).setHasRuleFlowLister(group.isRuleFlowListener()).setActivatedForRecency(group.getActivatedForRecency());
Map<Long, String> nodeInstances = group.getNodeInstances();
for (Map.Entry<Long, String> entry : nodeInstances.entrySet()) {
org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup.NodeInstance.Builder _nib = ProtobufMessages.Agenda.AgendaGroup.NodeInstance.newBuilder();
_nib.setProcessInstanceId(entry.getKey());
_nib.setNodeInstanceId(entry.getValue());
_agb.addNodeInstance(_nib.build());
}
_ab.addAgendaGroup(_agb.build());
}
org.drools.core.marshalling.impl.ProtobufMessages.Agenda.FocusStack.Builder _fsb = ProtobufMessages.Agenda.FocusStack.newBuilder();
LinkedList<AgendaGroup> focusStack = agenda.getStackList();
for (AgendaGroup group : focusStack) {
_fsb.addGroupName(group.getName());
}
_ab.setFocusStack(_fsb.build());
// serialize all dormant activations
org.drools.core.util.Iterator it = ActivationIterator.iterator(wm);
List<org.drools.core.spi.Activation> dormant = new ArrayList<org.drools.core.spi.Activation>();
for (org.drools.core.spi.Activation item = (org.drools.core.spi.Activation) it.next(); item != null; item = (org.drools.core.spi.Activation) it.next()) {
if (!item.isQueued()) {
dormant.add(item);
}
}
Collections.sort(dormant, ActivationsSorter.INSTANCE);
for (org.drools.core.spi.Activation activation : dormant) {
_ab.addMatch(writeActivation(context, (AgendaItem) activation));
}
// serialize all network evaluator activations
for (Activation activation : agenda.getActivations()) {
if (activation.isRuleAgendaItem()) {
// serialize it
_ab.addRuleActivation(writeActivation(context, (AgendaItem) activation));
}
}
_ksb.setAgenda(_ab.build());
}
use of org.drools.core.common.InternalWorkingMemory in project drools by kiegroup.
the class ProtobufOutputMarshaller method writeNodeMemories.
private static void writeNodeMemories(MarshallerWriteContext context, ProtobufMessages.RuleData.Builder _ksb) throws IOException {
InternalWorkingMemory wm = context.wm;
NodeMemories memories = wm.getNodeMemories();
// so we iterate over all of them and process only those that require it
for (BaseNode baseNode : context.sinks.values()) {
Memory memory = memories.peekNodeMemory(baseNode);
if (memory != null) {
ProtobufMessages.NodeMemory _node = null;
switch(memory.getNodeType()) {
case NodeTypeEnums.AccumulateNode:
{
_node = writeAccumulateNodeMemory(baseNode.getId(), memory);
break;
}
case NodeTypeEnums.RightInputAdaterNode:
{
_node = writeRIANodeMemory(baseNode.getId(), baseNode, memories);
break;
}
case NodeTypeEnums.FromNode:
case NodeTypeEnums.ReactiveFromNode:
{
_node = writeFromNodeMemory(baseNode.getId(), memory);
break;
}
case NodeTypeEnums.QueryElementNode:
{
_node = writeQueryElementNodeMemory(baseNode.getId(), memory, wm);
break;
}
}
if (_node != null) {
// not all node memories require serialization
_ksb.addNodeMemory(_node);
}
}
}
}
use of org.drools.core.common.InternalWorkingMemory in project drools by kiegroup.
the class AddRemoveRule method flushStagedTuples.
private static void flushStagedTuples(TerminalNode tn, PathMemory pmem, PathEndNodes pathEndNodes, InternalWorkingMemory wm) {
// first flush the subject rule, then flush any staging lists that are part of a merge
if (pmem.isInitialized()) {
new RuleNetworkEvaluator().evaluateNetwork(pmem, pmem.getRuleAgendaItem().getRuleExecutor(), wm);
}
// With the removing rules being flushed, we need to check any splits that will be merged, to see if they need flushing
// Beware that flushing a higher up node, might again cause lower nodes to have more staged items. So track flushed items
// incase they need to be reflushed
List<Flushed> flushed = new ArrayList<Flushed>();
for (LeftTupleNode node : pathEndNodes.subjectSplits) {
if (!isSplit(node, tn)) {
// check if the split is there even without the processed rule
Memory mem = wm.getNodeMemories().peekNodeMemory(node);
if (mem != null) {
SegmentMemory smem = mem.getSegmentMemory();
if (!smem.isEmpty()) {
for (SegmentMemory childSmem = smem.getFirst(); childSmem != null; childSmem = childSmem.getNext()) {
if (!childSmem.getStagedLeftTuples().isEmpty()) {
PathMemory childPmem = childSmem.getPathMemories().get(0);
flushed.add(new Flushed(childSmem, childPmem));
forceFlushLeftTuple(childPmem, childSmem, wm, childSmem.getStagedLeftTuples().takeAll());
}
}
}
}
}
}
// need to ensure that there is one full iteration, without any flushing. To avoid one flush causing populat of another already flushed segment
int flushCount = 1;
while (!flushed.isEmpty() && flushCount != 0) {
flushCount = 0;
for (Flushed path : flushed) {
if (!path.segmentMemory.getStagedLeftTuples().isEmpty()) {
flushCount++;
forceFlushLeftTuple(pmem, path.segmentMemory, wm, path.segmentMemory.getStagedLeftTuples().takeAll());
}
}
}
}
use of org.drools.core.common.InternalWorkingMemory in project drools by kiegroup.
the class AddRemoveRule method removeRule.
/**
* This method is called before the rule nodes are removed from the network.
* For remove tuples are processed before the segments and pmems have been adjusted
*/
public static void removeRule(TerminalNode tn, InternalWorkingMemory[] wms, InternalKnowledgeBase kBase) {
if (log.isTraceEnabled()) {
log.trace("Removing Rule {}", tn.getRule().getName());
}
boolean hasProtos = kBase.hasSegmentPrototypes();
boolean hasWms = wms.length > 0;
if (!hasProtos && !hasWms) {
return;
}
RuleImpl rule = tn.getRule();
LeftTupleNode firstSplit = getNetworkSplitPoint(tn);
PathEndNodes pathEndNodes = getPathEndNodes(kBase, firstSplit, tn, rule, hasProtos, hasWms);
for (InternalWorkingMemory wm : wms) {
wm.flushPropagations();
PathEndNodeMemories tnms = getPathEndMemories(wm, pathEndNodes);
if (!tnms.subjectPmems.isEmpty()) {
if (NodeTypeEnums.LeftInputAdapterNode == firstSplit.getType() && firstSplit.getAssociationsSize() == 1) {
if (tnms.subjectPmem != null) {
flushStagedTuples(firstSplit, tnms.subjectPmem, wm);
}
processLeftTuples(firstSplit, wm, false, tn.getRule());
removeNewPaths(wm, tnms.subjectPmems);
} else {
flushStagedTuples(tn, tnms.subjectPmem, pathEndNodes, wm);
processLeftTuples(firstSplit, wm, false, tn.getRule());
removeNewPaths(wm, tnms.subjectPmems);
Map<PathMemory, SegmentMemory[]> prevSmemsLookup = reInitPathMemories(tnms.otherPmems, tn);
// must collect all visited SegmentMemories, for link notification
Set<SegmentMemory> smemsToNotify = handleExistingPaths(tn, prevSmemsLookup, tnms.otherPmems, wm, ExistingPathStrategy.REMOVE_STRATEGY);
notifySegments(smemsToNotify, wm);
}
}
if (tnms.subjectPmem != null && tnms.subjectPmem.isInitialized() && tnms.subjectPmem.getRuleAgendaItem().isQueued()) {
// SubjectPmem can be null, if it was never initialized
tnms.subjectPmem.getRuleAgendaItem().dequeue();
}
}
}
use of org.drools.core.common.InternalWorkingMemory in project drools by kiegroup.
the class AddRemoveRule method removeNewPaths.
private static void removeNewPaths(InternalWorkingMemory wm, List<PathMemory> pmems) {
Set<Integer> visitedNodes = new HashSet<Integer>();
for (PathMemory pmem : pmems) {
LeftTupleSink tipNode = (LeftTupleSink) pmem.getPathEndNode();
LeftTupleNode child = tipNode;
LeftTupleNode parent = tipNode.getLeftTupleSource();
while (true) {
if (child.getAssociationsSize() == 1 && NodeTypeEnums.isBetaNode(child)) {
// If this is a beta node, it'll delete all the right input data
deleteRightInputData((LeftTupleSink) child, wm);
}
if (parent != null && parent.getAssociationsSize() != 1 && child.getAssociationsSize() == 1) {
// all right input data must be propagated
if (!visitedNodes.contains(child.getId())) {
Memory mem = wm.getNodeMemories().peekNodeMemory(parent);
if (mem != null && mem.getSegmentMemory() != null) {
SegmentMemory sm = mem.getSegmentMemory();
if (sm.getFirst() != null) {
SegmentMemory childSm = wm.getNodeMemories().peekNodeMemory(child).getSegmentMemory();
sm.remove(childSm);
}
}
}
} else {
Memory mem = wm.getNodeMemories().peekNodeMemory(child);
// The root of each segment
if (mem != null) {
SegmentMemory sm = mem.getSegmentMemory();
if (sm != null && sm.getPathMemories().contains(pmem)) {
mem.getSegmentMemory().removePathMemory(pmem);
}
}
}
if (parent == null) {
break;
}
visitedNodes.add(child.getId());
child = parent;
parent = parent.getLeftTupleSource();
}
}
}
Aggregations