use of org.opendaylight.restconf.common.context.InstanceIdentifierContext in project netconf by opendaylight.
the class JsonNormalizedNodeBodyWriter method writeNormalizedNode.
private static void writeNormalizedNode(final JsonWriter jsonWriter, final SchemaPath path, final InstanceIdentifierContext<SchemaNode> context, final NormalizedNode data, final DepthParam depth, final List<Set<QName>> fields) throws IOException {
final RestconfNormalizedNodeWriter nnWriter;
if (context.getSchemaNode() instanceof RpcDefinition) {
/*
* RpcDefinition is not supported as initial codec in JSONStreamWriter,
* so we need to emit initial output declaration..
*/
final RpcDefinition rpc = (RpcDefinition) context.getSchemaNode();
final SchemaPath rpcPath = SchemaPath.of(Absolute.of(rpc.getQName(), rpc.getOutput().getQName()));
nnWriter = createNormalizedNodeWriter(context, rpcPath, jsonWriter, depth, fields);
final Module module = context.getSchemaContext().findModule(data.getIdentifier().getNodeType().getModule()).get();
jsonWriter.name(module.getName() + ":output");
jsonWriter.beginObject();
writeChildren(nnWriter, (ContainerNode) data);
jsonWriter.endObject();
} else if (context.getSchemaNode() instanceof ActionDefinition) {
/*
* ActionDefinition is not supported as initial codec in JSONStreamWriter,
* so we need to emit initial output declaration..
*/
final ActionDefinition actDef = (ActionDefinition) context.getSchemaNode();
final List<QName> qNames = context.getInstanceIdentifier().getPathArguments().stream().filter(arg -> !(arg instanceof YangInstanceIdentifier.NodeIdentifierWithPredicates)).filter(arg -> !(arg instanceof YangInstanceIdentifier.AugmentationIdentifier)).map(PathArgument::getNodeType).collect(Collectors.toList());
qNames.add(actDef.getQName());
qNames.add(actDef.getOutput().getQName());
final SchemaPath actPath = SchemaPath.of(Absolute.of(qNames));
nnWriter = createNormalizedNodeWriter(context, actPath, jsonWriter, depth, fields);
final Module module = context.getSchemaContext().findModule(data.getIdentifier().getNodeType().getModule()).get();
jsonWriter.name(module.getName() + ":output");
jsonWriter.beginObject();
writeChildren(nnWriter, (ContainerNode) data);
jsonWriter.endObject();
} else {
if (SchemaPath.ROOT.equals(path)) {
nnWriter = createNormalizedNodeWriter(context, path, jsonWriter, depth, fields);
} else {
nnWriter = createNormalizedNodeWriter(context, path.getParent(), jsonWriter, depth, fields);
}
if (data instanceof MapEntryNode) {
// Restconf allows returning one list item. We need to wrap it
// in map node in order to serialize it properly
nnWriter.write(ImmutableNodes.mapNodeBuilder(data.getIdentifier().getNodeType()).withChild((MapEntryNode) data).build());
} else {
nnWriter.write(data);
}
}
nnWriter.flush();
}
use of org.opendaylight.restconf.common.context.InstanceIdentifierContext in project netconf by opendaylight.
the class RestconfImpl method prepareIIDSubsStreamOutput.
/**
* Prepare instance identifier.
*
* @return {@link InstanceIdentifierContext} of location leaf for
* notification
*/
private InstanceIdentifierContext<?> prepareIIDSubsStreamOutput() {
final QName qnameBase = QName.create("subscribe:to:notification", "2016-10-28", "notifi");
final EffectiveModelContext schemaCtx = controllerContext.getGlobalSchema();
final DataSchemaNode location = ((ContainerSchemaNode) schemaCtx.findModule(qnameBase.getModule()).orElse(null).getDataChildByName(qnameBase)).getDataChildByName(QName.create(qnameBase, "location"));
final List<PathArgument> path = new ArrayList<>();
path.add(NodeIdentifier.create(qnameBase));
path.add(NodeIdentifier.create(QName.create(qnameBase, "location")));
return new InstanceIdentifierContext<SchemaNode>(YangInstanceIdentifier.create(path), location, null, schemaCtx);
}
use of org.opendaylight.restconf.common.context.InstanceIdentifierContext in project netconf by opendaylight.
the class RestconfDocumentedExceptionMapper method toJsonResponseBody.
private static Object toJsonResponseBody(final NormalizedNodeContext errorsNode, final DataNodeContainer errorsSchemaNode) {
final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
NormalizedNode data = errorsNode.getData();
final InstanceIdentifierContext<?> context = errorsNode.getInstanceIdentifierContext();
final DataSchemaNode schema = (DataSchemaNode) context.getSchemaNode();
final OutputStreamWriter outputWriter = new OutputStreamWriter(outStream, StandardCharsets.UTF_8);
if (data == null) {
throw new RestconfDocumentedException(Response.Status.NOT_FOUND);
}
boolean isDataRoot = false;
XMLNamespace initialNs = null;
SchemaPath path;
if (context.getSchemaNode() instanceof SchemaContext) {
isDataRoot = true;
path = SchemaPath.ROOT;
} else {
final List<QName> qNames = context.getInstanceIdentifier().getPathArguments().stream().filter(arg -> !(arg instanceof NodeIdentifierWithPredicates)).filter(arg -> !(arg instanceof AugmentationIdentifier)).map(PathArgument::getNodeType).collect(Collectors.toList());
path = SchemaPath.of(Absolute.of(qNames)).getParent();
}
if (!schema.isAugmenting() && !(schema instanceof SchemaContext)) {
initialNs = schema.getQName().getNamespace();
}
final JsonWriter jsonWriter = JsonWriterFactory.createJsonWriter(outputWriter);
final NormalizedNodeStreamWriter jsonStreamWriter = JSONNormalizedNodeStreamWriter.createExclusiveWriter(JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context.getSchemaContext()), path, initialNs, jsonWriter);
// We create a delegating writer to special-case error-info as error-info is defined as an empty
// container in the restconf yang schema but we create a leaf node so we can output it. The delegate
// stream writer validates the node type against the schema and thus will expect a LeafSchemaNode but
// the schema has a ContainerSchemaNode so, to avoid an error, we override the leafNode behavior
// for error-info.
final NormalizedNodeStreamWriter streamWriter = new ForwardingNormalizedNodeStreamWriter() {
private boolean inOurLeaf;
@Override
protected NormalizedNodeStreamWriter delegate() {
return jsonStreamWriter;
}
@Override
public void startLeafNode(final NodeIdentifier name) throws IOException {
if (name.getNodeType().equals(RestConfModule.ERROR_INFO_QNAME)) {
inOurLeaf = true;
jsonWriter.name(RestConfModule.ERROR_INFO_QNAME.getLocalName());
} else {
super.startLeafNode(name);
}
}
@Override
public void scalarValue(final Object value) throws IOException {
if (inOurLeaf) {
jsonWriter.value(value.toString());
} else {
super.scalarValue(value);
}
}
@Override
public void endNode() throws IOException {
if (inOurLeaf) {
inOurLeaf = false;
} else {
super.endNode();
}
}
};
final NormalizedNodeWriter nnWriter = NormalizedNodeWriter.forStreamWriter(streamWriter);
try {
if (isDataRoot) {
writeDataRoot(outputWriter, nnWriter, (ContainerNode) data);
} else {
if (data instanceof MapEntryNode) {
data = ImmutableNodes.mapNodeBuilder(data.getIdentifier().getNodeType()).withChild((MapEntryNode) data).build();
}
nnWriter.write(data);
}
nnWriter.flush();
outputWriter.flush();
} catch (final IOException e) {
LOG.warn("Error writing error response body", e);
}
try {
streamWriter.close();
} catch (IOException e) {
LOG.warn("Failed to close stream writer", e);
}
return outStream.toString(StandardCharsets.UTF_8);
}
use of org.opendaylight.restconf.common.context.InstanceIdentifierContext in project netconf by opendaylight.
the class XmlNormalizedNodeBodyReader method parse.
private NormalizedNodeContext parse(final InstanceIdentifierContext<?> pathContext, final Document doc) throws XMLStreamException, IOException, ParserConfigurationException, SAXException, URISyntaxException {
final SchemaNode schemaNodeContext = pathContext.getSchemaNode();
DataSchemaNode schemaNode;
boolean isRpc = false;
if (schemaNodeContext instanceof RpcDefinition) {
schemaNode = ((RpcDefinition) schemaNodeContext).getInput();
isRpc = true;
} else if (schemaNodeContext instanceof DataSchemaNode) {
schemaNode = (DataSchemaNode) schemaNodeContext;
} else {
throw new IllegalStateException("Unknown SchemaNode");
}
final String docRootElm = doc.getDocumentElement().getLocalName();
final String docRootNamespace = doc.getDocumentElement().getNamespaceURI();
final List<YangInstanceIdentifier.PathArgument> iiToDataList = new ArrayList<>();
if (isPost() && !isRpc) {
final Deque<Object> foundSchemaNodes = findPathToSchemaNodeByName(schemaNode, docRootElm, docRootNamespace);
if (foundSchemaNodes.isEmpty()) {
throw new IllegalStateException(String.format("Child \"%s\" was not found in parent schema node \"%s\"", docRootElm, schemaNode.getQName()));
}
while (!foundSchemaNodes.isEmpty()) {
final Object child = foundSchemaNodes.pop();
if (child instanceof AugmentationSchemaNode) {
final AugmentationSchemaNode augmentSchemaNode = (AugmentationSchemaNode) child;
iiToDataList.add(DataSchemaContextNode.augmentationIdentifierFrom(augmentSchemaNode));
} else if (child instanceof DataSchemaNode) {
schemaNode = (DataSchemaNode) child;
iiToDataList.add(new YangInstanceIdentifier.NodeIdentifier(schemaNode.getQName()));
}
}
// PUT
} else if (!isRpc) {
final QName scQName = schemaNode.getQName();
Preconditions.checkState(docRootElm.equals(scQName.getLocalName()) && docRootNamespace.equals(scQName.getNamespace().toString()), String.format("Not correct message root element \"%s\", should be \"%s\"", docRootElm, scQName));
}
NormalizedNode parsed;
final NormalizedNodeResult resultHolder = new NormalizedNodeResult();
final NormalizedNodeStreamWriter writer = ImmutableNormalizedNodeStreamWriter.from(resultHolder);
if (schemaNode instanceof ContainerLike || schemaNode instanceof ListSchemaNode || schemaNode instanceof LeafSchemaNode) {
final XmlParserStream xmlParser = XmlParserStream.create(writer, SchemaInferenceStack.ofSchemaPath(pathContext.getSchemaContext(), schemaNode.getPath()).toInference());
xmlParser.traverse(new DOMSource(doc.getDocumentElement()));
parsed = resultHolder.getResult();
// Therefore we now have to extract the MapEntryNode from the parsed MapNode.
if (parsed instanceof MapNode) {
final MapNode mapNode = (MapNode) parsed;
// extracting the MapEntryNode
parsed = mapNode.body().iterator().next();
}
if (schemaNode instanceof ListSchemaNode && isPost()) {
iiToDataList.add(parsed.getIdentifier());
}
} else {
LOG.warn("Unknown schema node extension {} was not parsed", schemaNode.getClass());
parsed = null;
}
final YangInstanceIdentifier fullIIToData = YangInstanceIdentifier.create(Iterables.concat(pathContext.getInstanceIdentifier().getPathArguments(), iiToDataList));
final InstanceIdentifierContext<? extends SchemaNode> outIIContext = new InstanceIdentifierContext<>(fullIIToData, pathContext.getSchemaNode(), pathContext.getMountPoint(), pathContext.getSchemaContext());
return new NormalizedNodeContext(outIIContext, parsed);
}
use of org.opendaylight.restconf.common.context.InstanceIdentifierContext in project netconf by opendaylight.
the class XmlToPatchBodyReader method parse.
private static PatchContext parse(final InstanceIdentifierContext<?> pathContext, final Document doc) throws XMLStreamException, IOException, ParserConfigurationException, SAXException, URISyntaxException {
final List<PatchEntity> resultCollection = new ArrayList<>();
final String patchId = doc.getElementsByTagName("patch-id").item(0).getFirstChild().getNodeValue();
final NodeList editNodes = doc.getElementsByTagName("edit");
for (int i = 0; i < editNodes.getLength(); i++) {
DataSchemaNode schemaNode = (DataSchemaNode) pathContext.getSchemaNode();
final Element element = (Element) editNodes.item(i);
final String operation = element.getElementsByTagName("operation").item(0).getFirstChild().getNodeValue();
final PatchEditOperation oper = PatchEditOperation.valueOf(operation.toUpperCase(Locale.ROOT));
final String editId = element.getElementsByTagName("edit-id").item(0).getFirstChild().getNodeValue();
final String target = element.getElementsByTagName("target").item(0).getFirstChild().getNodeValue();
final List<Element> values = readValueNodes(element, oper);
final Element firstValueElement = values != null ? values.get(0) : null;
// get namespace according to schema node from path context or value
final String namespace = firstValueElement == null ? schemaNode.getQName().getNamespace().toString() : firstValueElement.getNamespaceURI();
// find module according to namespace
final Module module = pathContext.getSchemaContext().findModules(XMLNamespace.of(namespace)).iterator().next();
// initialize codec + set default prefix derived from module name
final StringModuleInstanceIdentifierCodec codec = new StringModuleInstanceIdentifierCodec(pathContext.getSchemaContext(), module.getName());
// find complete path to target and target schema node
// target can be also empty (only slash)
YangInstanceIdentifier targetII;
final SchemaNode targetNode;
final Inference inference;
if (target.equals("/")) {
targetII = pathContext.getInstanceIdentifier();
targetNode = pathContext.getSchemaContext();
inference = Inference.ofDataTreePath(pathContext.getSchemaContext(), schemaNode.getQName());
} else {
targetII = codec.deserialize(codec.serialize(pathContext.getInstanceIdentifier()).concat(prepareNonCondXpath(schemaNode, target.replaceFirst("/", ""), firstValueElement, namespace, module.getQNameModule().getRevision().map(Revision::toString).orElse(null))));
// move schema node
schemaNode = verifyNotNull(codec.getDataContextTree().findChild(targetII).orElseThrow().getDataSchemaNode());
final SchemaInferenceStack stack = SchemaInferenceStack.of(pathContext.getSchemaContext());
targetII.getPathArguments().stream().filter(arg -> !(arg instanceof YangInstanceIdentifier.NodeIdentifierWithPredicates)).filter(arg -> !(arg instanceof YangInstanceIdentifier.AugmentationIdentifier)).forEach(p -> stack.enterSchemaTree(p.getNodeType()));
final EffectiveStatement<?, ?> parentStmt = stack.exit();
verify(parentStmt instanceof SchemaNode, "Unexpected parent %s", parentStmt);
targetNode = (SchemaNode) parentStmt;
inference = stack.toInference();
}
if (targetNode == null) {
LOG.debug("Target node {} not found in path {} ", target, pathContext.getSchemaNode());
throw new RestconfDocumentedException("Error parsing input", ErrorType.PROTOCOL, ErrorTag.MALFORMED_MESSAGE);
}
if (oper.isWithValue()) {
final NormalizedNode parsed;
if (schemaNode instanceof ContainerSchemaNode || schemaNode instanceof ListSchemaNode) {
final NormalizedNodeResult resultHolder = new NormalizedNodeResult();
final NormalizedNodeStreamWriter writer = ImmutableNormalizedNodeStreamWriter.from(resultHolder);
final XmlParserStream xmlParser = XmlParserStream.create(writer, inference);
xmlParser.traverse(new DOMSource(firstValueElement));
parsed = resultHolder.getResult();
} else {
parsed = null;
}
// for lists allow to manipulate with list items through their parent
if (targetII.getLastPathArgument() instanceof NodeIdentifierWithPredicates) {
targetII = targetII.getParent();
}
resultCollection.add(new PatchEntity(editId, oper, targetII, parsed));
} else {
resultCollection.add(new PatchEntity(editId, oper, targetII));
}
}
return new PatchContext(pathContext, ImmutableList.copyOf(resultCollection), patchId);
}
Aggregations