use of won.bot.framework.eventbot.event.impl.crawlconnection.CrawlConnectionCommandEvent in project webofneeds by researchstudio-sat.
the class DebugBotIncomingMessageToEventMappingAction method referToEarlierMessages.
private void referToEarlierMessages(EventListenerContext ctx, EventBus bus, Connection con, String crawlAnnouncement, MessageFinder messageFinder, MessageReferrer messageReferrer, TextMessageMaker textMessageMaker) {
Model messageModel = WonRdfUtils.MessageUtils.textMessage(crawlAnnouncement);
bus.publish(new ConnectionMessageCommandEvent(con, messageModel));
// initiate crawl behaviour
CrawlConnectionCommandEvent command = new CrawlConnectionCommandEvent(con.getNeedURI(), con.getConnectionURI());
CrawlConnectionDataBehaviour crawlConnectionDataBehaviour = new CrawlConnectionDataBehaviour(ctx, command, Duration.ofSeconds(60));
final StopWatch crawlStopWatch = new StopWatch();
crawlStopWatch.start("crawl");
AgreementProtocolState state = WonConversationUtils.getAgreementProtocolState(con.getConnectionURI(), ctx.getLinkedDataSource());
crawlStopWatch.stop();
Duration crawlDuration = Duration.ofMillis(crawlStopWatch.getLastTaskTimeMillis());
messageModel = WonRdfUtils.MessageUtils.textMessage("Finished crawl in " + getDurationString(crawlDuration) + " seconds. The dataset has " + state.getConversationDataset().asDatasetGraph().size() + " rdf graphs.");
getEventListenerContext().getEventBus().publish(new ConnectionMessageCommandEvent(con, messageModel));
messageModel = makeReferringMessage(state, messageFinder, messageReferrer, textMessageMaker);
getEventListenerContext().getEventBus().publish(new ConnectionMessageCommandEvent(con, messageModel));
crawlConnectionDataBehaviour.activate();
}
use of won.bot.framework.eventbot.event.impl.crawlconnection.CrawlConnectionCommandEvent in project webofneeds by researchstudio-sat.
the class CrawlConnectionDataBehaviour method onActivate.
@Override
protected void onActivate(Optional<Object> message) {
logger.debug("activating crawling connection data for connection {}", command.getConnectionURI());
logger.debug("will deactivate autmatically after " + abortTimeout);
LinkedDataSource linkedDataSource = context.getLinkedDataSource();
if (linkedDataSource instanceof CachingLinkedDataSource) {
URI toInvalidate = WonLinkedDataUtils.getEventContainerURIforConnectionURI(command.getConnectionURI(), linkedDataSource);
((CachingLinkedDataSource) linkedDataSource).invalidate(toInvalidate);
((CachingLinkedDataSource) linkedDataSource).invalidate(toInvalidate, command.getNeedURI());
URI remoteConnectionUri = WonLinkedDataUtils.getRemoteConnectionURIforConnectionURI(command.getConnectionURI(), linkedDataSource);
toInvalidate = WonLinkedDataUtils.getEventContainerURIforConnectionURI(remoteConnectionUri, linkedDataSource);
((CachingLinkedDataSource) linkedDataSource).invalidate(toInvalidate);
((CachingLinkedDataSource) linkedDataSource).invalidate(toInvalidate, command.getNeedURI());
}
context.getTaskScheduler().schedule(new Runnable() {
@Override
public void run() {
deactivate();
}
}, new Date(System.currentTimeMillis() + abortTimeout.toMillis()));
;
List<Path> propertyPaths = new ArrayList<>();
PrefixMapping pmap = new PrefixMappingImpl();
pmap.withDefaultMappings(PrefixMapping.Standard);
pmap.setNsPrefix("won", WON.getURI());
pmap.setNsPrefix("msg", WONMSG.getURI());
propertyPaths.add(PathParser.parse("won:hasEventContainer", pmap));
propertyPaths.add(PathParser.parse("won:hasEventContainer/rdfs:member", pmap));
CrawlCommandEvent crawlNeedCommandEvent = new CrawlCommandEvent(command.getNeedURI(), command.getNeedURI(), propertyPaths, 10000, 5);
propertyPaths = new ArrayList();
propertyPaths.add(PathParser.parse("won:hasEventContainer", pmap));
propertyPaths.add(PathParser.parse("won:hasEventContainer/rdfs:member", pmap));
propertyPaths.add(PathParser.parse("won:hasEventContainer/rdfs:member/msg:hasCorrespondingRemoteMessage", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteNeed", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteNeed/won:hasEventContainer", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteNeed/won:hasEventContainer/rdfs:member", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteConnection", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteConnection/won:hasEventContainer", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteConnection/won:hasEventContainer/rdfs:member", pmap));
propertyPaths.add(PathParser.parse("won:hasRemoteConnection/won:hasEventContainer/rdfs:member/msg:hasCorrespondingRemoteMessage", pmap));
CrawlCommandEvent crawlConnectionCommandEvent = new CrawlCommandEvent(command.getNeedURI(), command.getConnectionURI(), propertyPaths, 10000, 5);
Dataset crawledData = DatasetFactory.createGeneral();
// add crawlcommand listener
this.subscribeWithAutoCleanup(CrawlCommandEvent.class, new ActionOnEventListener(context, new OrFilter(new SameEventFilter(crawlNeedCommandEvent), new SameEventFilter(crawlConnectionCommandEvent)), new CrawlAction(context)));
// when the first crawl succeeds, start the second
this.subscribeWithAutoCleanup(CrawlCommandSuccessEvent.class, new ActionOnEventListener(context, new CommandResultFilter(crawlNeedCommandEvent), new BaseEventBotAction(context) {
@Override
protected void doRun(Event event, EventListener executingListener) throws Exception {
logger.debug("finished crawling need data. ");
Dataset dataset = ((CrawlCommandSuccessEvent) event).getCrawledData();
RdfUtils.addDatasetToDataset(crawledData, dataset);
// now crawl connection data
context.getEventBus().publish(crawlConnectionCommandEvent);
}
}));
// when we're done crawling, validate:
this.subscribeWithAutoCleanup(CrawlCommandSuccessEvent.class, new ActionOnEventListener(context, new CommandResultFilter(crawlConnectionCommandEvent), new BaseEventBotAction(context) {
@Override
protected void doRun(Event event, EventListener executingListener) throws Exception {
logger.debug("finished crawling need data for connection {}", command.getConnectionURI());
Dataset dataset = ((CrawlCommandSuccessEvent) event).getCrawledData();
RdfUtils.addDatasetToDataset(crawledData, dataset);
context.getEventBus().publish(new CrawlConnectionCommandSuccessEvent(command, crawledData));
deactivate();
}
}));
// when something goes wrong, abort
this.subscribeWithAutoCleanup(CrawlCommandFailureEvent.class, new ActionOnFirstEventListener(context, new OrFilter(new CommandResultFilter(crawlConnectionCommandEvent), new CommandResultFilter(crawlNeedCommandEvent)), new BaseEventBotAction(context) {
@Override
protected void doRun(Event event, EventListener executingListener) throws Exception {
CrawlCommandFailureEvent failureEvent = (CrawlCommandFailureEvent) event;
logger.debug("crawling failed for connection {}, message: {}", command.getConnectionURI(), failureEvent.getMessage());
context.getEventBus().publish(new CrawlConnectionCommandFailureEvent(failureEvent.getMessage(), command));
deactivate();
}
}));
// start crawling the need - connection will be crawled when need crawling is done
context.getEventBus().publish(crawlNeedCommandEvent);
}
use of won.bot.framework.eventbot.event.impl.crawlconnection.CrawlConnectionCommandEvent in project webofneeds by researchstudio-sat.
the class DebugBotIncomingMessageToEventMappingAction method validate.
private void validate(EventListenerContext ctx, EventBus bus, Connection con) {
Model messageModel = WonRdfUtils.MessageUtils.textMessage("ok, I'll validate the connection - but I'll need to crawl the connection data first, please be patient.");
bus.publish(new ConnectionMessageCommandEvent(con, messageModel));
// initiate crawl behaviour
CrawlConnectionCommandEvent command = new CrawlConnectionCommandEvent(con.getNeedURI(), con.getConnectionURI());
CrawlConnectionDataBehaviour crawlConnectionDataBehaviour = new CrawlConnectionDataBehaviour(ctx, command, Duration.ofSeconds(60));
final StopWatch crawlStopWatch = new StopWatch();
crawlStopWatch.start("crawl");
crawlConnectionDataBehaviour.onResult(new SendMessageReportingCrawlResultAction(ctx, con, crawlStopWatch));
crawlConnectionDataBehaviour.onResult(new SendMessageOnCrawlResultAction(ctx, con) {
@Override
protected Model makeSuccessMessage(CrawlConnectionCommandSuccessEvent successEvent) {
try {
logger.debug("validating data of connection {}", command.getConnectionURI());
// TODO: use one validator for all invocations
WonConnectionValidator validator = new WonConnectionValidator();
StringBuilder message = new StringBuilder();
boolean valid = validator.validate(successEvent.getCrawledData(), message);
String successMessage = "Connection " + command.getConnectionURI() + " is valid: " + valid + " " + message.toString();
return WonRdfUtils.MessageUtils.textMessage(successMessage);
} catch (Exception e) {
return WonRdfUtils.MessageUtils.textMessage("Caught exception during validation: " + e);
}
}
});
crawlConnectionDataBehaviour.activate();
}
Aggregations