use of org.wso2.siddhi.annotation.Example in project siddhi by wso2.
the class SiddhiAnnotationProcessor method process.
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
// Iterate over all @Extension annotated elements.
for (Element element : roundEnv.getElementsAnnotatedWith(Extension.class)) {
// Check if a class has been annotated with @Extension.
if (element.getKind() == ElementKind.CLASS) {
String superClass = getMatchingSuperClass(element, new String[] { AnnotationConstants.SINK_MAPPER_SUPER_CLASS, AnnotationConstants.SINK_SUPER_CLASS, AnnotationConstants.FUNCTION_EXECUTOR_SUPER_CLASS, AnnotationConstants.AGGREGATION_ATTRIBUTE_SUPER_CLASS, AnnotationConstants.DISTRIBUTION_STRATEGY_SUPER_CLASS, AnnotationConstants.STREAM_PROCESSOR_SUPER_CLASS, AnnotationConstants.STREAM_FUNCTION_PROCESSOR_SUPER_CLASS, AnnotationConstants.STORE_SUPER_CLASS, AnnotationConstants.SOURCE_SUPER_CLASS, AnnotationConstants.SOURCE_MAPPER_SUPER_CLASS, AnnotationConstants.WINDOW_PROCESSOR_CLASS, AnnotationConstants.SCRIPT_SUPER_CLASS, AnnotationConstants.INCREMENTAL_ATTRIBUTE_AGGREGATOR_SUPER_CLASS });
AbstractAnnotationProcessor abstractAnnotationProcessor = null;
Extension annotation = element.getAnnotation(Extension.class);
String name = annotation.name();
String description = annotation.description();
String namespace = annotation.namespace();
Parameter[] parameters = annotation.parameters();
ReturnAttribute[] returnAttributes = annotation.returnAttributes();
SystemParameter[] systemParameters = annotation.systemParameter();
Example[] examples = annotation.examples();
String extensionClassFullName = element.asType().toString();
if (superClass != null) {
switch(superClass) {
case AnnotationConstants.DISTRIBUTION_STRATEGY_SUPER_CLASS:
abstractAnnotationProcessor = new DistributionStrategyValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.SINK_MAPPER_SUPER_CLASS:
abstractAnnotationProcessor = new SinkMapperValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.SINK_SUPER_CLASS:
abstractAnnotationProcessor = new SinkValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.FUNCTION_EXECUTOR_SUPER_CLASS:
abstractAnnotationProcessor = new FunctionExecutorValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.AGGREGATION_ATTRIBUTE_SUPER_CLASS:
abstractAnnotationProcessor = new AggregationAttributeValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.STREAM_PROCESSOR_SUPER_CLASS:
abstractAnnotationProcessor = new StreamProcessorValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.SOURCE_SUPER_CLASS:
abstractAnnotationProcessor = new SourceValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.SOURCE_MAPPER_SUPER_CLASS:
abstractAnnotationProcessor = new SourceMapperValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.STORE_SUPER_CLASS:
abstractAnnotationProcessor = new StoreValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.STREAM_FUNCTION_PROCESSOR_SUPER_CLASS:
abstractAnnotationProcessor = new StreamFunctionProcessorValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.WINDOW_PROCESSOR_CLASS:
abstractAnnotationProcessor = new WindowProcessorValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.SCRIPT_SUPER_CLASS:
abstractAnnotationProcessor = new ScriptValidationAnnotationProcessor(extensionClassFullName);
break;
case AnnotationConstants.INCREMENTAL_ATTRIBUTE_AGGREGATOR_SUPER_CLASS:
abstractAnnotationProcessor = new IncrementalAggregationAttributeValidationAnnotationProcessor(extensionClassFullName);
break;
default:
// Throw error if no matching super class.
showBuildError(MessageFormat.format("Default switch case executed as there is no " + "matching super class option for @{0}.", superClass), element);
break;
}
if (abstractAnnotationProcessor != null) {
try {
abstractAnnotationProcessor.basicParameterValidation(name, description, namespace);
abstractAnnotationProcessor.parameterValidation(parameters);
abstractAnnotationProcessor.returnAttributesValidation(returnAttributes);
abstractAnnotationProcessor.systemParametersValidation(systemParameters);
abstractAnnotationProcessor.examplesValidation(examples);
} catch (AnnotationValidationException e) {
showBuildError(e.getMessage(), element);
}
} else {
showBuildError(MessageFormat.format("Error while validation, " + "abstractAnnotationProcessor cannot be null.", superClass), element);
}
} else {
// Throw error if no matching super class.
showBuildError("Class does not have a matching Siddhi Extension super class.", element);
}
} else {
// Throw error if the element returned is method or package.
showBuildError(MessageFormat.format("Only classes can be annotated with @{0}.", Extension.class.getCanonicalName()), element);
}
}
// Returning false since this processor only validates.
return false;
}
use of org.wso2.siddhi.annotation.Example in project siddhi by wso2.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeAfterWindowData();
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
// Expressions to get
List<Expression> outputExpressions = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
// Expression executors to get
List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
// Create group by key generator
GroupByKeyGenerator groupByKeyGenerator = null;
if (groupBy) {
groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
int bufferSize = 0;
Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
if (element != null) {
try {
bufferSize = Integer.parseInt(element.getValue());
} catch (NumberFormatException e) {
throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
}
}
if (bufferSize > 0) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
}
if (!isProcessingOnExternalTime) {
throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
// Buffer size is used to process out of order events. However, events would never be out of
// order if they are processed based on event arrival time.
}
} else if (bufferSize < 0) {
throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
}
boolean ignoreEventsOlderThanBuffer = false;
element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
if (element != null) {
if (element.getValue().equalsIgnoreCase("true")) {
ignoreEventsOlderThanBuffer = true;
} else if (!element.getValue().equalsIgnoreCase("false")) {
throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
}
}
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
// Recreate in-memory data from tables
RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
}
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
use of org.wso2.siddhi.annotation.Example in project siddhi by wso2.
the class DocumentationUtils method addExtensionMetaDataIntoNamespaceList.
/**
* Generate extension meta data from the annotated data in the class
*
* @param namespaceList The list of namespaces to which the new extension will be added
* @param extensionClass Class from which meta data should be extracted from
* @param logger The maven plugin logger
*/
private static void addExtensionMetaDataIntoNamespaceList(List<NamespaceMetaData> namespaceList, Class<?> extensionClass, Log logger) {
Extension extensionAnnotation = extensionClass.getAnnotation(Extension.class);
if (extensionAnnotation != null) {
// Discarding extension classes without annotation
ExtensionMetaData extensionMetaData = new ExtensionMetaData();
// Finding extension type
String extensionType = null;
for (Map.Entry<ExtensionType, Class<?>> entry : ExtensionType.getSuperClassMap().entrySet()) {
Class<?> superClass = entry.getValue();
if (superClass.isAssignableFrom(extensionClass) && superClass != extensionClass) {
extensionType = entry.getKey().getValue();
break;
}
}
// Discarding the extension if it belongs to an unknown type
if (extensionType == null) {
logger.warn("Discarding extension (belonging to an unknown extension type): " + extensionClass.getCanonicalName());
return;
}
extensionMetaData.setName(extensionAnnotation.name());
extensionMetaData.setDescription(extensionAnnotation.description());
// Adding query parameters
ParameterMetaData[] parameters = new ParameterMetaData[extensionAnnotation.parameters().length];
for (int i = 0; i < extensionAnnotation.parameters().length; i++) {
Parameter parameterAnnotation = extensionAnnotation.parameters()[i];
ParameterMetaData parameter = new ParameterMetaData();
parameter.setName(parameterAnnotation.name());
parameter.setType(Arrays.asList(parameterAnnotation.type()));
parameter.setDescription(parameterAnnotation.description());
parameter.setOptional(parameterAnnotation.optional());
parameter.setDynamic(parameterAnnotation.dynamic());
parameter.setDefaultValue(parameterAnnotation.defaultValue());
parameters[i] = parameter;
}
extensionMetaData.setParameters(Arrays.asList(parameters));
// Adding system parameters
SystemParameterMetaData[] systemParameters = new SystemParameterMetaData[extensionAnnotation.systemParameter().length];
for (int i = 0; i < extensionAnnotation.systemParameter().length; i++) {
SystemParameter systemParameterAnnotation = extensionAnnotation.systemParameter()[i];
SystemParameterMetaData systemParameter = new SystemParameterMetaData();
systemParameter.setName(systemParameterAnnotation.name());
systemParameter.setDescription(systemParameterAnnotation.description());
systemParameter.setDefaultValue(systemParameterAnnotation.defaultValue());
systemParameter.setPossibleParameters(Arrays.asList(systemParameterAnnotation.possibleParameters()));
systemParameters[i] = systemParameter;
}
extensionMetaData.setSystemParameters(Arrays.asList(systemParameters));
// Adding return attributes
ReturnAttributeMetaData[] returnAttributes = new ReturnAttributeMetaData[extensionAnnotation.returnAttributes().length];
for (int i = 0; i < extensionAnnotation.returnAttributes().length; i++) {
ReturnAttribute parameterAnnotation = extensionAnnotation.returnAttributes()[i];
ReturnAttributeMetaData returnAttribute = new ReturnAttributeMetaData();
returnAttribute.setName(parameterAnnotation.name());
returnAttribute.setType(Arrays.asList(parameterAnnotation.type()));
returnAttribute.setDescription(parameterAnnotation.description());
returnAttributes[i] = returnAttribute;
}
extensionMetaData.setReturnAttributes(Arrays.asList(returnAttributes));
// Adding examples
ExampleMetaData[] examples = new ExampleMetaData[extensionAnnotation.examples().length];
for (int i = 0; i < extensionAnnotation.examples().length; i++) {
Example exampleAnnotation = extensionAnnotation.examples()[i];
ExampleMetaData exampleMetaData = new ExampleMetaData();
exampleMetaData.setSyntax(exampleAnnotation.syntax());
exampleMetaData.setDescription(exampleAnnotation.description());
examples[i] = exampleMetaData;
}
extensionMetaData.setExamples(Arrays.asList(examples));
// Finding the namespace
String namespaceName = extensionAnnotation.namespace();
if (Objects.equals(namespaceName, "")) {
namespaceName = Constants.CORE_NAMESPACE;
}
// Finding the relevant namespace in the namespace list
NamespaceMetaData namespace = null;
for (NamespaceMetaData existingNamespace : namespaceList) {
if (Objects.equals(existingNamespace.getName(), namespaceName)) {
namespace = existingNamespace;
break;
}
}
// Creating namespace if it doesn't exist
if (namespace == null) {
namespace = new NamespaceMetaData();
namespace.setName(namespaceName);
namespace.setExtensionMap(new TreeMap<>());
namespaceList.add(namespace);
}
// Adding to the relevant extension metadata list in the namespace
List<ExtensionMetaData> extensionMetaDataList = namespace.getExtensionMap().computeIfAbsent(extensionType, k -> new ArrayList<>());
extensionMetaDataList.add(extensionMetaData);
}
}
use of org.wso2.siddhi.annotation.Example in project ballerina by ballerina-lang.
the class DocumentationTest method testInlineCodeEnclosedTripleBackTicks.
@Test(description = "Test doc inline code with triple backtics.")
public void testInlineCodeEnclosedTripleBackTicks() {
CompileResult compileResult = BCompileUtil.compile("test-src/documentation/doc_inline_triple.bal");
Assert.assertEquals(0, compileResult.getWarnCount());
PackageNode packageNode = compileResult.getAST();
BLangVariable constant = (BLangVariable) packageNode.getGlobalVariables().get(0);
List<BLangDocumentation> docNodes = constant.docAttachments;
BLangDocumentation dNode = docNodes.get(0);
Assert.assertNotNull(dNode);
Assert.assertEquals(dNode.getAttributes().size(), 0);
Assert.assertEquals(dNode.documentationText, "\n" + " Example of a string template:\n" + " ```string s = string `hello {{name}}`;```\n" + "\n" + " Example for an xml literal:\n" + " ```xml x = xml `<{{tagName}}>hello</{{tagName}}>`;```\n");
}
use of org.wso2.siddhi.annotation.Example in project ballerina by ballerina-lang.
the class DocumentationTest method testInlineCode.
@Test(description = "Test doc inline code.")
public void testInlineCode() {
CompileResult compileResult = BCompileUtil.compile("test-src/documentation/doc_inline.bal");
Assert.assertEquals(0, compileResult.getWarnCount());
PackageNode packageNode = compileResult.getAST();
BLangVariable connector = (BLangVariable) packageNode.getGlobalVariables().get(0);
List<BLangDocumentation> docNodes = connector.docAttachments;
BLangDocumentation dNode = docNodes.get(0);
Assert.assertNotNull(dNode);
Assert.assertEquals(dNode.getAttributes().size(), 0);
Assert.assertEquals(dNode.documentationText, "\n" + " Example of a string template:\n" + " ``string s = string `hello {{name}}`;``\n" + "\n" + " Example for an xml literal:\n" + " ``xml x = xml `<{{tagName}}>hello</{{tagName}}>`;``\n");
}
Aggregations