Add InputStreamNormalizer 93/109293/13
authorRobert Varga <robert.varga@pantheon.tech>
Mon, 11 Dec 2023 22:49:54 +0000 (23:49 +0100)
committerRobert Varga <robert.varga@pantheon.tech>
Sat, 16 Dec 2023 08:54:39 +0000 (09:54 +0100)
This patch adds InputStreamNormalizer interface to act as a unified API
displacing direct use of XmlParserStream/JsonParserStream.

It is modelled after the needs of RFC8040, yet it is useful in general,
as it supports essentially all use cases serviced by the *ParserStreams.

This core implementation logic is in
data.util.AbstractInputStreamNormalizer, which is a subclass of
AbstractCodecFactory.

The JSON-specific part is introduced as well, with the correspondign
NormalizedNodeParserTest.

JIRA: YANGTOOLS-1415
Change-Id: Id95b609257e1a8ab57e3d61896315e3211223176
Signed-off-by: Robert Varga <robert.varga@pantheon.tech>
codec/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactory.java
codec/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/InputStreamNormalizerTest.java [new file with mode: 0644]
data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/InputStreamNormalizer.java [new file with mode: 0644]
data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/NormalizationException.java [new file with mode: 0644]
data/yang-data-util/src/main/java/org/opendaylight/yangtools/yang/data/util/codec/AbstractInputStreamNormalizer.java [new file with mode: 0644]

index c7c666952a3061f8041549d9e5af8419c3b21bed..76a1fc50ffa73a00a44e846f3f15d68f6dfd1254 100644 (file)
@@ -9,13 +9,25 @@ package org.opendaylight.yangtools.yang.data.codec.gson;
 
 import static com.google.common.base.Verify.verifyNotNull;
 
+import com.google.gson.JsonParseException;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
 import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.function.BiFunction;
 import org.eclipse.jdt.annotation.NonNull;
 import org.opendaylight.yangtools.yang.common.QName;
 import org.opendaylight.yangtools.yang.common.QNameModule;
+import org.opendaylight.yangtools.yang.common.UnresolvedQName.Unqualified;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizationException;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizationResult;
 import org.opendaylight.yangtools.yang.data.impl.codec.AbstractIntegerStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.BinaryStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.BitsStringCodec;
@@ -23,10 +35,14 @@ import org.opendaylight.yangtools.yang.data.impl.codec.BooleanStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.DecimalStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.EnumStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.StringStringCodec;
-import org.opendaylight.yangtools.yang.data.util.codec.AbstractCodecFactory;
+import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNormalizedNodeStreamWriter;
+import org.opendaylight.yangtools.yang.data.impl.schema.NormalizationResultHolder;
+import org.opendaylight.yangtools.yang.data.util.codec.AbstractInputStreamNormalizer;
 import org.opendaylight.yangtools.yang.data.util.codec.CodecCache;
 import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
 import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
 import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BooleanTypeDefinition;
@@ -46,12 +62,13 @@ import org.opendaylight.yangtools.yang.model.api.type.Uint64TypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.Uint8TypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnknownTypeDefinition;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
 
 /**
  * Factory for creating JSON equivalents of codecs. Each instance of this object is bound to
  * a particular {@link EffectiveModelContext}, but can be reused by multiple {@link JSONNormalizedNodeStreamWriter}s.
  */
-public abstract sealed class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
+public abstract sealed class JSONCodecFactory extends AbstractInputStreamNormalizer<JSONCodec<?>> {
     @Deprecated(since = "12.0.0", forRemoval = true)
     static final class Lhotka02 extends JSONCodecFactory {
         Lhotka02(final @NonNull EffectiveModelContext context, final @NonNull CodecCache<JSONCodec<?>> cache) {
@@ -225,4 +242,99 @@ public abstract sealed class JSONCodecFactory extends AbstractCodecFactory<JSONC
     abstract JSONCodec<?> wrapDecimalCodec(DecimalStringCodec decimalCodec);
 
     abstract JSONCodec<?> wrapIntegerCodec(AbstractIntegerStringCodec<?, ?> integerCodec);
+
+    @Override
+    protected final NormalizationResult<ContainerNode> parseDatastore(final InputStream stream,
+            final NodeIdentifier containerName, final Unqualified moduleName)
+                throws IOException, NormalizationException {
+        // This is bit more involved: given this example document:
+        //
+        //          {
+        //            "ietf-restconf:data" : {
+        //              "foo:foo" : {
+        //                "str" : "str"
+        //              }
+        //            }
+        //          }
+        //
+        // we need to first peel this part:
+        //
+        //          {
+        //            "ietf-restconf:data" :
+        //
+        // validating it really the name matches rootName and that it is followed by '{', i.e. it really is an object.
+        //
+        // We then need to essentially do the equivalent of parseStream() on the EffectiveModelContext, but the receiver
+        // should be the builder for our resulting node -- we cannot and do not want to use a holder, as can legally
+        // more than one child.
+        //
+        // Then we need to take care of the last closing brace, raising an error if there is any other content -- i.e.
+        // we need to reach the end of JsonReader.
+        //
+        // And then it's just a matter of returning the built container.
+        try (var reader = new JsonReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) {
+            reader.beginObject();
+            final var name = reader.nextName();
+            final var expected = moduleName.getLocalName() + ':' + containerName.getNodeType().getLocalName();
+            if (!expected.equals(name)) {
+                throw NormalizationException.ofMessage("Expected name '" + expected + "', got '" + name + "'");
+            }
+
+            final var builder = Builders.containerBuilder().withNodeIdentifier(containerName);
+
+            if (reader.peek() == JsonToken.BEGIN_OBJECT) {
+                try (var writer = ImmutableNormalizedNodeStreamWriter.from(builder)) {
+                    try (var parser = JsonParserStream.create(writer, this)) {
+                        parser.parse(reader);
+                    } catch (JsonParseException e) {
+                        throw NormalizationException.ofCause(e);
+                    }
+                }
+            }
+
+            reader.endObject();
+            final var nextToken = reader.peek();
+            if (nextToken != JsonToken.END_DOCUMENT) {
+                throw NormalizationException.ofMessage("Expected end of JSON document, got " + nextToken);
+            }
+            return new NormalizationResult<>(builder.build());
+        } catch (IllegalStateException e) {
+            throw NormalizationException.ofCause(e);
+        }
+    }
+
+    @Override
+    protected final NormalizationResult<?> parseData(final SchemaInferenceStack stack, final InputStream stream)
+            throws IOException, NormalizationException {
+        // Point to parent node
+        stack.exit();
+        return parseStream(stack.toInference(), stream);
+    }
+
+    @Override
+    protected final NormalizationResult<?> parseChildData(final InputStream stream,
+            final EffectiveStatementInference inference) throws IOException, NormalizationException {
+        return parseStream(inference, stream);
+    }
+
+    @Override
+    protected final NormalizationResult<?> parseInputOutput(final SchemaInferenceStack stack, final QName expected,
+            final InputStream stream) throws IOException, NormalizationException {
+        return checkNodeName(parseStream(stack.toInference(), stream), expected);
+    }
+
+    private @NonNull NormalizationResult<?> parseStream(final @NonNull EffectiveStatementInference inference,
+            final @NonNull InputStream stream) throws IOException, NormalizationException {
+        try (var reader = new JsonReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) {
+            final var holder = new NormalizationResultHolder();
+            try (var writer = ImmutableNormalizedNodeStreamWriter.from(holder)) {
+                try (var parser = JsonParserStream.create(writer, this, inference)) {
+                    parser.parse(reader);
+                } catch (JsonParseException e) {
+                    throw NormalizationException.ofCause(e);
+                }
+            }
+            return holder.getResult();
+        }
+    }
 }
diff --git a/codec/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/InputStreamNormalizerTest.java b/codec/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/InputStreamNormalizerTest.java
new file mode 100644 (file)
index 0000000..cc0b84a
--- /dev/null
@@ -0,0 +1,439 @@
+/*
+ * Copyright (c) 2023 PANTHEON.tech, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.codec.gson;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.Map;
+import org.eclipse.jdt.annotation.NonNull;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.function.Executable;
+import org.opendaylight.yangtools.yang.common.ErrorSeverity;
+import org.opendaylight.yangtools.yang.common.ErrorTag;
+import org.opendaylight.yangtools.yang.common.ErrorType;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.common.Uint32;
+import org.opendaylight.yangtools.yang.common.UnresolvedQName.Unqualified;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
+import org.opendaylight.yangtools.yang.data.api.YangNetconfError;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.InputStreamNormalizer;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizationException;
+import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack.Inference;
+import org.opendaylight.yangtools.yang.test.util.YangParserTestUtils;
+
+class InputStreamNormalizerTest {
+    private static final EffectiveModelContext MODEL_CONTEXT = YangParserTestUtils.parseYang("""
+            module foo {
+              yang-version 1.1;
+              prefix foo;
+              namespace foo;
+
+              container foo {
+                leaf str {
+                  type string {
+                    length 3;
+                  }
+                }
+              }
+
+              container bar {
+                leaf uint {
+                  type uint32;
+                }
+              }
+
+              list baz {
+                key "one two";
+                leaf one {
+                  type boolean;
+                }
+                leaf two {
+                  type string;
+                }
+
+                action qux {
+                  input {
+                    leaf str {
+                      type string;
+                    }
+                  }
+                }
+              }
+
+              rpc thud {
+                input {
+                  leaf uint {
+                    type uint32;
+                  }
+                }
+              }
+
+              choice ch1 {
+                choice ch2 {
+                  leaf str {
+                    type string;
+                  }
+                }
+              }
+            }""");
+    private static final InputStreamNormalizer PARSER = JSONCodecFactorySupplier.RFC7951.getShared(MODEL_CONTEXT);
+    private static final QName FOO = QName.create("foo", "foo");
+    private static final QName BAR = QName.create("foo", "bar");
+    private static final QName BAZ = QName.create("foo", "baz");
+    private static final QName QUX = QName.create("foo", "qux");
+    private static final QName THUD = QName.create("foo", "thud");
+    private static final QName ONE = QName.create("foo", "one");
+    private static final QName TWO = QName.create("foo", "two");
+    private static final QName STR = QName.create("foo", "str");
+    private static final QName UINT = QName.create("foo", "uint");
+
+    private static final @NonNull NodeIdentifier DATA_NID = new NodeIdentifier(
+        QName.create("urn:ietf:params:xml:ns:yang:ietf-restconf", "2017-01-26", "data"));
+    private static final @NonNull Unqualified RESTCONF_MODULE = Unqualified.of("ietf-restconf");
+
+    @Test
+    void parseDatastore() throws Exception {
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(DATA_NID)
+            .withChild(Builders.containerBuilder()
+                .withNodeIdentifier(new NodeIdentifier(FOO))
+                .withChild(ImmutableNodes.leafNode(STR, "str"))
+                .build())
+            .withChild(Builders.containerBuilder()
+                .withNodeIdentifier(new NodeIdentifier(BAR))
+                .withChild(ImmutableNodes.leafNode(UINT, Uint32.TWO))
+                .build())
+            .build(),
+            PARSER.parseDatastore(DATA_NID, RESTCONF_MODULE, stream("""
+                {
+                  "ietf-restconf:data" : {
+                    "foo:foo" : {
+                      "str" : "str"
+                    },
+                    "foo:bar" : {
+                      "uint" : 2
+                    }
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseData() throws Exception {
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(FOO))
+            .withChild(ImmutableNodes.leafNode(STR, "str"))
+            .build(),
+            PARSER.parseData(Inference.ofDataTreePath(MODEL_CONTEXT, FOO), stream("""
+                {
+                  "foo:foo" : {
+                    "str" : "str"
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseDataBadType() throws Exception {
+        final var error = assertError(() -> PARSER.parseData(Inference.ofDataTreePath(MODEL_CONTEXT, FOO), stream("""
+            {
+              "foo:foo" : {
+                "str" : "too long"
+              }
+            }""")));
+        assertEquals(ErrorType.APPLICATION, error.type());
+        assertEquals(ErrorTag.INVALID_VALUE, error.tag());
+    }
+
+    @Test
+    void parseDataBadRootElement() throws Exception {
+        assertMismatchedError("(foo)foo", "(foo)bar",
+            () -> PARSER.parseData(Inference.ofDataTreePath(MODEL_CONTEXT, FOO), stream("""
+                {
+                  "foo:bar" : {
+                    "uint" : 23
+                  }
+                }""")));
+    }
+
+    @Test
+    void parseDataBadInference() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(THUD);
+
+        final var ex = assertThrows(IllegalArgumentException.class,
+            () -> PARSER.parseData(stack.toInference(), stream("")));
+        assertEquals("Invalid inference statement RpcEffectiveStatementImpl{argument=(foo)thud}", ex.getMessage());
+    }
+
+    @Test
+    void parseDataEmptyInference() throws Exception {
+        final var inference = Inference.of(MODEL_CONTEXT);
+
+        final var ex = assertThrows(IllegalArgumentException.class, () -> PARSER.parseData(inference, stream("")));
+        assertEquals("Inference must not be empty", ex.getMessage());
+    }
+
+    @Test
+    void parseChildData() throws Exception {
+        final var prefixAndNode = PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:foo" : {
+                "str" : "str"
+              }
+            }"""));
+
+        assertEquals(List.of(), prefixAndNode.prefix());
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(FOO))
+            .withChild(ImmutableNodes.leafNode(STR, "str"))
+            .build(), prefixAndNode.result().data());
+    }
+
+    @Test
+    void parseChildDataChoices() throws Exception {
+        final var prefixAndNode = PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:str" : "str"
+            }"""));
+        assertEquals(List.of(
+            new NodeIdentifier(QName.create("foo", "ch1")),
+            new NodeIdentifier(QName.create("foo", "ch2"))), prefixAndNode.prefix());
+        assertEquals(ImmutableNodes.leafNode(STR, "str"), prefixAndNode.result().data());
+    }
+
+    @Test
+    void parseChildDataListEntry() throws Exception {
+        final var prefixAndNode = PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:baz" : [
+                {
+                  "one" : true,
+                  "two" : "two"
+                }
+              ]
+            }"""));
+        assertEquals(List.of(new NodeIdentifier(BAZ)), prefixAndNode.prefix());
+        assertEquals(Builders.mapEntryBuilder()
+            .withNodeIdentifier(NodeIdentifierWithPredicates.of(BAZ, Map.of(ONE, Boolean.TRUE, TWO, "two")))
+            .withChild(ImmutableNodes.leafNode(ONE, Boolean.TRUE))
+            .withChild(ImmutableNodes.leafNode(TWO, "two"))
+            .build(), prefixAndNode.result().data());
+    }
+
+    @Test
+    void parseChildDataListEntryOnly() throws Exception {
+        // FIXME: this needs to be rejected, as it is an illegal format for a list resource, as per:
+        //
+        //        https://www.rfc-editor.org/rfc/rfc8040#section-4.4.1:
+        //
+        //        The message-body is expected to contain the
+        //        content of a child resource to create within the parent (target
+        //        resource).  The message-body MUST contain exactly one instance of the
+        //        expected data resource.  The data model for the child tree is the
+        //        subtree, as defined by YANG for the child resource.
+        //
+        //        https://www.rfc-editor.org/rfc/rfc7951#section-5.4:
+        //
+        //        the following is a valid JSON-encoded instance:
+        //
+        //            "bar": [
+        //              {
+        //                "foo": 123,
+        //                "baz": "zig"
+        //              },
+        //              {
+        //                "baz": "zag",
+        //                "foo": 0
+        //              }
+        //            ]
+        final var prefixAndNode = PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:baz" : {
+                "one" : true,
+                "two" : "two"
+              }
+            }"""));
+        assertEquals(List.of(new NodeIdentifier(BAZ)), prefixAndNode.prefix());
+        assertEquals(Builders.mapEntryBuilder()
+            .withNodeIdentifier(NodeIdentifierWithPredicates.of(BAZ, Map.of(ONE, Boolean.TRUE, TWO, "two")))
+            .withChild(ImmutableNodes.leafNode(ONE, Boolean.TRUE))
+            .withChild(ImmutableNodes.leafNode(TWO, "two"))
+            .build(), prefixAndNode.result().data());
+    }
+
+    @Test
+    void parseChildDataListEntryNone() throws Exception {
+        final var error = assertError(() -> PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:baz" : [
+              ]
+            }""")));
+        assertEquals(ErrorType.PROTOCOL, error.type());
+        assertEquals(ErrorTag.MALFORMED_MESSAGE, error.tag());
+        assertEquals("Exactly one instance of (foo)baz is required, 0 supplied", error.message());
+    }
+
+    @Test
+    void parseChildDataListEntryTwo() throws Exception {
+        final var error = assertError(() -> PARSER.parseChildData(Inference.of(MODEL_CONTEXT), stream("""
+            {
+              "foo:baz" : [
+                {
+                  "one" : false,
+                  "two" : "two"
+                },
+                {
+                  "one" : true,
+                  "two" : "two"
+                }
+              ]
+            }""")));
+        assertEquals(ErrorType.PROTOCOL, error.type());
+        assertEquals(ErrorTag.MALFORMED_MESSAGE, error.tag());
+        assertEquals("Exactly one instance of (foo)baz is required, 2 supplied", error.message());
+    }
+
+    @Test
+    void parseInputRpc() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(THUD);
+
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(QName.create("foo", "input")))
+            .withChild(ImmutableNodes.leafNode(UINT, Uint32.TWO))
+            .build(),
+            PARSER.parseInput(stack.toInference(), stream("""
+                {
+                  "foo:input" : {
+                    "uint" : 2
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseInputRpcBadRootElement() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(THUD);
+
+        assertMismatchedError("(foo)input", "(foo)output", () -> PARSER.parseInput(stack.toInference(), stream("""
+            {
+              "foo:output" : {
+              }
+            }""")));
+    }
+
+    @Test
+    void parseInputAction() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(BAZ);
+        stack.enterSchemaTree(QUX);
+
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(QName.create("foo", "input")))
+            .withChild(ImmutableNodes.leafNode(STR, "str"))
+            .build(),
+            PARSER.parseInput(stack.toInference(), stream("""
+                {
+                  "foo:input" : {
+                    "str" : "str"
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseInputBadInference() {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(BAZ);
+
+        final var ex = assertThrows(IllegalArgumentException.class,
+            () -> PARSER.parseInput(stack.toInference(), stream("")));
+        assertEquals("Invalid inference statement EmptyListEffectiveStatement{argument=(foo)baz}", ex.getMessage());
+    }
+
+    @Test
+    void parseOutputRpc() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(THUD);
+
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(QName.create("foo", "output")))
+            .build(),
+            PARSER.parseOutput(stack.toInference(), stream("""
+                {
+                  "foo:output" : {
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseOutputRpcBadRootElement() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(THUD);
+
+        assertMismatchedError("(foo)output", "(foo)input", () -> PARSER.parseOutput(stack.toInference(), stream("""
+            {
+              "foo:input" : {
+              }
+            }""")));
+    }
+
+    @Test
+    void parseOutputAction() throws Exception {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(BAZ);
+        stack.enterSchemaTree(QUX);
+
+        assertEquals(Builders.containerBuilder()
+            .withNodeIdentifier(new NodeIdentifier(QName.create("foo", "output")))
+            .build(),
+            PARSER.parseOutput(stack.toInference(), stream("""
+                {
+                  "foo:output" : {
+                  }
+                }""")).data());
+    }
+
+    @Test
+    void parseOutputBadInference() {
+        final var stack = SchemaInferenceStack.of(MODEL_CONTEXT);
+        stack.enterSchemaTree(BAZ);
+
+        final var ex = assertThrows(IllegalArgumentException.class,
+            () -> PARSER.parseOutput(stack.toInference(), stream("")));
+        assertEquals("Invalid inference statement EmptyListEffectiveStatement{argument=(foo)baz}", ex.getMessage());
+    }
+
+    private static @NonNull InputStream stream(final String str) {
+        return new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8));
+    }
+
+    private static void assertMismatchedError(final String expected, final String actual, final Executable executable) {
+        final var error = assertError(executable);
+        assertEquals(ErrorType.PROTOCOL, error.type());
+        assertEquals(ErrorTag.MALFORMED_MESSAGE, error.tag());
+        assertEquals("Payload name " + actual + " is different from identifier name " + expected, error.message());
+    }
+
+    private static YangNetconfError assertError(final Executable executable) {
+        final var ex = assertThrows(NormalizationException.class, executable);
+        final var errors = ex.getNetconfErrors();
+        assertEquals(1, errors.size());
+        final var error = errors.get(0);
+        assertEquals(ErrorSeverity.ERROR, error.severity());
+        return error;
+    }
+}
diff --git a/data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/InputStreamNormalizer.java b/data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/InputStreamNormalizer.java
new file mode 100644 (file)
index 0000000..bce3ecd
--- /dev/null
@@ -0,0 +1,206 @@
+/*
+ * Copyright (c) 2023 PANTHEON.tech, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.api.schema.stream;
+
+import static java.util.Objects.requireNonNull;
+
+import java.io.InputStream;
+import java.util.List;
+import org.eclipse.jdt.annotation.NonNullByDefault;
+import org.opendaylight.yangtools.yang.common.UnresolvedQName.Unqualified;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
+
+/**
+ * Interface to parsing of {@link InputStream}s containing YANG-modeled data. While the design of this interface is
+ * guided by what a typical implementation of a <a href="https://www.rfc-editor.org/rfc/rfc8040">RESTCONF</a> server or
+ * client might require, and it is not limited solely to that use case and should be used wherever its methods provide
+ * the required semantics.
+ *
+ * <p>
+ * The core assumption is that the user knows the general context in which a particular document, provided as an
+ * {@link InputStream}, needs to be interpreted.
+ *
+ * <p>
+ * In RESTCONF that context is provided by the HTTP request method and the HTTP request URI. On the server side these
+ * expect to be differentiated between requests to
+ * <ul>
+ *   <li>invoke an {@code rpc} or an {@code action}, catered to by
+ *       {@link #parseInput(EffectiveStatementInference, InputStream)}</li>
+ *   <li>replace the contents of a particular data store, catered to by
+ *       {@link #parseDatastore(QName, Unqualified, InputStream)}<li>
+ *   <li>create, replace or otherwise modify a directly identified data store resource, catered to by
+ *       {@link #parseData(EffectiveStatementInference, InputStream)}</li>
+ *   <li>create an indirectly identified data store resource, catered to by
+ *       {@link #parseChildData(EffectiveStatementInference, InputStream)}</li>
+ * </ul>
+ * On the client side, these are similarly differentiated between responses to
+ * <ul>
+ *   <li>invoke an {@code rpc} or an {@code action}, catered to by
+ *       {@link #parseOutput(EffectiveStatementInference, InputStream)}</li>
+ *   <li>replace the contents of a particular data store, catered to by
+ *       {@link #parseDatastore(QName, Unqualified, InputStream)}<li>
+ *   <li>create, replace or otherwise modify a directly identified data store resource, catered to by
+ *       {@link #parseData(EffectiveStatementInference, InputStream)}</li>
+ * </ul>
+ */
+@NonNullByDefault
+public interface InputStreamNormalizer {
+    /*
+     * API design notes
+     *
+     * This interface uses EffectiveStatementInference in places where YangInstanceIdentifier might be convenient. This
+     * is on purpose, as we want to provide an interface between standards-based yang-model-api and provide enough rope
+     * for integration with YangInstanceIdentifier, but not require users to necessarily use it.
+     *
+     * The reason for that is that an empty YangInstanceIdentifier is not really a YANG construct, but rather something
+     * yang-data-tree-api (mis)uses.
+     *
+     * Futhermore we do not want to force users to provide a YangInstanceIdentifier for efficiency reasons. In the case
+     * of RESTCONF, which is guiding the design here, the caller would acquire a YangInstanceIdentifier through parsing
+     * the request URL. That means the caller was dealing with yang-model-api and therefore have likely seen
+     * a SchemaInferenceStack corresponding to that identifier and can take a snapshot in the form or an
+     * EffectiveStatementInference. This has the added benefit of keeping semantics clear: we expect inferences to be
+     * the result of YANG-defined processing without introducing the additional friction of having to deal with the
+     * differences in data tree addressing. Again, we provide enough rope to do bridge that gap easily if the user needs
+     * to do so.
+     *
+     * Another case for not exposing YangInstanceIdentifier-based methods is that implementations of this interface are
+     * expected to be bound to an EffectiveModelContext, but we do not want to expose that via this this interface
+     * extending EffectiveModelContextProvider -- at the end of the day implementations may provide the required
+     * functionality through hard-coding against some concrete set of of YANG models.
+     *
+     * PrefixAndData is using an explicit List<PathArgument> instead of a relative YangInstanceIdentifier in order to
+     * make a clear distinction of use: the prefix is meant to be interpreted and must not be confused with something
+     * that can, for example, be stored as a 'type instance-identifier' value or a DataTreeSnapshot.readNode() argument.
+     *
+     * Similar reasoning goes for the use of EffectiveStatementInference: it is a generalised concept, which could be
+     * to reduce the number of methods in this interface, each method places explicit requirements on what an acceptable
+     * EffectiveStatementInference argument looks like. This is done on purpose, so that we bind to explicit semantics
+     * of that particular method, e.g. being explicit about semantics of a method rather than overloading methods with
+     * multiple semantic modes.
+     */
+
+    /**
+     * A DTO capturing the result of
+     * {@link InputStreamNormalizer#parseChildData(EffectiveStatementInference, InputStream)}.
+     *
+     * @param prefix {@link YangInstanceIdentifier} steps that need to be concatenated to the request path to form
+     *               a {@link YangInstanceIdentifier} pointing to the immediate parent of {@link #result}.
+     * @param result a {@link NormalizationResult}
+     */
+    record PrefixAndResult(List<PathArgument> prefix, NormalizationResult<?> result) {
+        /**
+         * Default constructor.
+         *
+         * @param prefix {@link YangInstanceIdentifier} steps that need to be concatenated to the request path to form
+         *               a {@link YangInstanceIdentifier} pointing to the immediate parent of {@link #result}.
+         * @param result parsed data
+         */
+        public PrefixAndResult {
+            prefix = List.copyOf(prefix);
+            requireNonNull(result);
+        }
+    }
+
+    /**
+     * Parse the contents of an {@link InputStream} as the contents of a data store.
+     *
+     * <p>
+     * This method's signature is a bit counter-intuitive. {@code rootNamespace} and {@code rootName} collectively
+     * encode the expected root element, which may not be expressed in the underlying YANG data model.
+     *
+     * <p>
+     * The reason for this is that YANG does not define an explicit {@link NodeIdentifier} of the datastore root
+     * resource, but protocol encodings require this conceptual root to be encapsulated in protocol documents and the
+     * approaches taken differ from protocol to protocol. NETCONF operates in terms of YANG-modeled RPC operations,
+     * where this conceptual root is given an anchor -- {@code get-config} output's {@code anyxml data}. RESTCONF
+     * operates in terms of HTTP payloads and while it models such an anchor, it is rather unnatural
+     * {@code container data} with description defining its magic properties and it is not feasible for YANG parser
+     * to help us with that.
+     *
+     * <p>
+     * Therefore this method takes the name of the root element in two arguments, which together define its value in
+     * both JSON-based (module + localName} and XML-based (namespace + localName) encodings. Implementations of this
+     * method are expected to use this information and treat the root element outside of their usual YANG-informed
+     * processing.
+     *
+     * <p>
+     * For example, XML parsers will pick {@code containerName.getNodeType().getNamespace()} to match the root element's
+     * namespace and {@code containerName.getNodeType().getLocalName()} to match the element's local name. JSON parsers,
+     * on the other hand, will use {@code moduleName} and {@code rootName.getLocalName()} to match the top-level JSON
+     * object's sole named member.
+     *
+     * @param containerName expected root container name
+     * @param moduleName module name corresponding to {@code containerName}
+     * @param stream the {@link InputStream} to parse
+     * @return parsed {@link ContainerNode} corresponding to the data store root, with its {@link ContainerNode#name()}
+     *         equal to {@code containerName}.
+     * @throws NullPointerException if any argument is {@code null}
+     * @throws NormalizationException if an error occurs
+     */
+    NormalizationResult<ContainerNode> parseDatastore(NodeIdentifier containerName, Unqualified moduleName,
+        InputStream stream) throws NormalizationException;
+
+    /**
+     * Parse the contents of an {@link InputStream} as a data resource.
+     *
+     * @param inference pointer to the data resource
+     * @param stream the {@link InputStream} to parse
+     * @return Parsed {@link NormalizedNode} corresponding the requested resource
+     * @throws NullPointerException if any argument is {@code null}
+     * @throws IllegalArgumentException if {@code inference} does not to point to a resource recognized by this parser
+     * @throws NormalizationException if an error occurs
+     */
+    NormalizationResult<?> parseData(EffectiveStatementInference inference, InputStream stream)
+        throws NormalizationException;
+
+    /**
+     * Parse the contents of an {@link InputStream} as a child data resource.
+     *
+     * @param parentInference pointer to the parent of the data resource
+     * @param stream the {@link InputStream} to parse
+     * @return A {@link PrefixAndResult} containing parsed resource data and any {@link YangInstanceIdentifier} steps
+     *         that need to be appended between {@code inference} and the parsed {@link NormalizedNode}
+     * @throws NullPointerException if any argument is {@code null}
+     * @throws IllegalArgumentException if {@code inference} does not to point to a resource recognized by this parser
+     * @throws NormalizationException if an error occurs
+     */
+    PrefixAndResult parseChildData(EffectiveStatementInference parentInference, InputStream stream)
+        throws NormalizationException;
+
+    /**
+     * Parse the contents of an {@link InputStream} as an operation {@code input}.
+     *
+     * @param operationInference pointer to the operation
+     * @param stream the {@link InputStream} to parse
+     * @return Parsed {@link ContainerNode} corresponding to the operation input
+     * @throws NullPointerException if any argument is {@code null}
+     * @throws IllegalArgumentException if {@code inference} does not to point to an operation recognized by this parser
+     * @throws NormalizationException if an error occurs
+     */
+    NormalizationResult<ContainerNode> parseInput(EffectiveStatementInference operationInference, InputStream stream)
+        throws NormalizationException;
+
+    /**
+     * Parse the contents of an {@link InputStream} as on operation {@code output}.
+     *
+     * @param operationInference pointer to the operation
+     * @param stream the {@link InputStream} to parse
+     * @return Parsed {@link ContainerNode} corresponding to the operation output
+     * @throws NullPointerException if any argument is {@code null}
+     * @throws IllegalArgumentException if {@code inference} does not to point to an operation recognized by this parser
+     * @throws NormalizationException if an error occurs
+     */
+    NormalizationResult<ContainerNode> parseOutput(EffectiveStatementInference operationInference, InputStream stream)
+        throws NormalizationException;
+}
diff --git a/data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/NormalizationException.java b/data/yang-data-api/src/main/java/org/opendaylight/yangtools/yang/data/api/schema/stream/NormalizationException.java
new file mode 100644 (file)
index 0000000..d3a6cfb
--- /dev/null
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2023 PANTHEON.tech, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.api.schema.stream;
+
+import static java.util.Objects.requireNonNull;
+
+import java.io.IOException;
+import java.io.NotSerializableException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ObjectStreamException;
+import java.util.List;
+import org.eclipse.jdt.annotation.NonNull;
+import org.opendaylight.yangtools.yang.common.ErrorSeverity;
+import org.opendaylight.yangtools.yang.common.ErrorTag;
+import org.opendaylight.yangtools.yang.common.ErrorType;
+import org.opendaylight.yangtools.yang.data.api.ImmutableYangNetconfError;
+import org.opendaylight.yangtools.yang.data.api.YangNetconfError;
+import org.opendaylight.yangtools.yang.data.api.YangNetconfErrorAware;
+
+/**
+ * This exception is typically reported by methods which normalize some external format into a
+ * {@link NormalizationResult}. It can be mapped to one or more {@link YangNetconfError}s.
+ */
+public final class NormalizationException extends Exception implements YangNetconfErrorAware {
+    @java.io.Serial
+    private static final long serialVersionUID = 1L;
+
+    private final @NonNull List<@NonNull YangNetconfError> netconfErrors;
+
+    private NormalizationException(final Throwable cause) {
+        super(cause);
+        netconfErrors = constructErrors(getMessage());
+    }
+
+    private NormalizationException(final String message, final Throwable cause,
+            final List<@NonNull YangNetconfError> netconfErrors) {
+        super(message, cause);
+        this.netconfErrors = requireNonNull(netconfErrors);
+    }
+
+    public static @NonNull NormalizationException ofMessage(final String message) {
+        return new NormalizationException(requireNonNull(message), null, constructErrors(message));
+    }
+
+    public static @NonNull NormalizationException ofCause(final Throwable cause) {
+        if (cause instanceof YangNetconfErrorAware aware) {
+            final var errors = aware.getNetconfErrors();
+            if (!errors.isEmpty()) {
+                return new NormalizationException(cause.getMessage(), cause, List.copyOf(errors));
+            }
+        }
+        return new NormalizationException(cause);
+    }
+
+    @Override
+    public List<@NonNull YangNetconfError> getNetconfErrors() {
+        return netconfErrors;
+    }
+
+    @java.io.Serial
+    private void readObject(final ObjectInputStream stream) throws IOException, ClassNotFoundException {
+        throwNSE();
+    }
+
+    @java.io.Serial
+    private void readObjectNoData() throws ObjectStreamException {
+        throwNSE();
+    }
+
+    @java.io.Serial
+    private void writeObject(final ObjectOutputStream stream) throws IOException {
+        throwNSE();
+    }
+
+    private static @NonNull List<@NonNull YangNetconfError> constructErrors(final @NonNull String message) {
+        return List.of(ImmutableYangNetconfError.builder()
+            .severity(ErrorSeverity.ERROR)
+            .type(ErrorType.PROTOCOL)
+            .tag(ErrorTag.MALFORMED_MESSAGE)
+            .message(message)
+            .build());
+    }
+
+    private static void throwNSE() throws NotSerializableException {
+        throw new NotSerializableException(NormalizationException.class.getName());
+    }
+}
diff --git a/data/yang-data-util/src/main/java/org/opendaylight/yangtools/yang/data/util/codec/AbstractInputStreamNormalizer.java b/data/yang-data-util/src/main/java/org/opendaylight/yangtools/yang/data/util/codec/AbstractInputStreamNormalizer.java
new file mode 100644 (file)
index 0000000..6d76f23
--- /dev/null
@@ -0,0 +1,236 @@
+/*
+ * Copyright (c) 2023 PANTHEON.tech, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.util.codec;
+
+import static java.util.Objects.requireNonNull;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import org.eclipse.jdt.annotation.NonNull;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.common.UnresolvedQName.Unqualified;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.ChoiceNode;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
+import org.opendaylight.yangtools.yang.data.api.schema.LeafSetNode;
+import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNodeContainer;
+import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListNode;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.InputStreamNormalizer;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizationException;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizationResult;
+import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
+import org.opendaylight.yangtools.yang.model.api.stmt.ActionEffectiveStatement;
+import org.opendaylight.yangtools.yang.model.api.stmt.DataTreeEffectiveStatement;
+import org.opendaylight.yangtools.yang.model.api.stmt.RpcEffectiveStatement;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
+
+/**
+ * An {@link AbstractCodecFactory} which additionally provides services defined in {@link InputStreamNormalizer}.
+ *
+ * <p>
+ * This class existsonly because both JSON and XML implementations of {@link InputStreamNormalizer} are naturally hosted
+ * in their respective {@link AbstractCodecFactory} implementations and therefore it is a convenient place to share
+ * common implementation bits.
+ */
+public abstract class AbstractInputStreamNormalizer<T extends TypeAwareCodec<?, ?, ?>>
+        extends AbstractCodecFactory<T> implements InputStreamNormalizer {
+    protected AbstractInputStreamNormalizer(final @NonNull EffectiveModelContext schemaContext,
+            final @NonNull CodecCache<T> cache) {
+        super(schemaContext, cache);
+    }
+
+    @Override
+    public final NormalizationResult<ContainerNode> parseDatastore(final NodeIdentifier containerName,
+            final Unqualified moduleName, final InputStream stream) throws NormalizationException {
+        try {
+            return parseDatastore(requireNonNull(stream), requireNonNull(containerName), requireNonNull(moduleName));
+        } catch (IOException | IllegalArgumentException e) {
+            throw NormalizationException.ofCause(e);
+        }
+    }
+
+    protected abstract @NonNull NormalizationResult<ContainerNode> parseDatastore(@NonNull InputStream stream,
+        @NonNull NodeIdentifier containerName, @NonNull Unqualified moduleName)
+            throws IOException, NormalizationException;
+
+    @Override
+    public final NormalizationResult<?> parseData(final EffectiveStatementInference inference, final InputStream stream)
+            throws NormalizationException {
+        final var stack = checkInferenceNotEmpty(inference);
+        final var stmt = stack.currentStatement();
+        if (!(stmt instanceof DataTreeEffectiveStatement<?> dataStmt)) {
+            throw new IllegalArgumentException("Invalid inference statement " + stmt);
+        }
+
+        final NormalizationResult<?> data;
+        try {
+            data = parseData(stack, requireNonNull(stream));
+        } catch (IOException | IllegalArgumentException e) {
+            throw NormalizationException.ofCause(e);
+        }
+        return checkNodeName(data, dataStmt.argument());
+    }
+
+    protected abstract @NonNull NormalizationResult<?> parseData(@NonNull SchemaInferenceStack stack,
+        @NonNull InputStream stream) throws IOException, NormalizationException;
+
+    @Override
+    public final PrefixAndResult parseChildData(final EffectiveStatementInference inference, final InputStream stream)
+            throws NormalizationException {
+        checkInference(inference);
+
+        final NormalizationResult<?> normalized;
+        try {
+            normalized = parseChildData(requireNonNull(stream), inference);
+        } catch (IOException | IllegalArgumentException e) {
+            throw NormalizationException.ofCause(e);
+        }
+
+        final var prefix = new ArrayList<@NonNull PathArgument>();
+        var data = normalized.data();
+        var metadata = normalized.metadata();
+        var mountPoints = normalized.mountPoints();
+
+        // Deal with the semantic differences of what "child" means in NormalizedNode versus in YANG data tree
+        // structure.
+
+        // NormalizedNode structure has 'choice' statements visible and addressable, whereas YANG data tree makes
+        // them completely transparent.
+        //
+        // Therefore we need to peel any ChoiceNode from the result and shift them to the prefix. Since each choice was
+        // created implicitly to contain the element mentioned in the stream.
+        while (data instanceof ChoiceNode choice) {
+            prefix.add(choice.name());
+            data = choice.body().iterator().next();
+        }
+
+        // NormalizedNode structure has 'list' and 'leaf-list' statements visible and addressable, whereas YANG data
+        // tree addressing can only point to individual instances. RFC8040 section 4.4.1 states:
+        //
+        //        The message-body is expected to contain the
+        //        content of a child resource to create within the parent (target
+        //        resource).  The message-body MUST contain exactly one instance of the
+        //        expected data resource.  The data model for the child tree is the
+        //        subtree, as defined by YANG for the child resource.
+        //
+        // Therefore we need to peel any UnkeyedListNode, MapNode and LeafSetNodes from the top-level and shift them
+        // to the prefix. Note that from the parser perspective, each such node can legally contain zero, one or more
+        // entries, but this method is restricted to allowing only a single entry.
+        if (data instanceof MapNode || data instanceof LeafSetNode || data instanceof UnkeyedListNode) {
+            final var dataName = data.name();
+            final var body = ((NormalizedNodeContainer<?>) data).body();
+            final var size = body.size();
+            if (body.size() != 1) {
+                throw NormalizationException.ofMessage(
+                    "Exactly one instance of " + dataName.getNodeType() + " is required, " + size + " supplied");
+            }
+
+
+            prefix.add(dataName);
+            data = body.iterator().next();
+            if (metadata != null) {
+                metadata = metadata.getChildren().get(dataName);
+            }
+            if (mountPoints != null) {
+                mountPoints = mountPoints.getChildren().get(dataName);
+            }
+        }
+
+        return new PrefixAndResult(prefix, new NormalizationResult<>(data, metadata, mountPoints));
+    }
+
+    protected abstract @NonNull NormalizationResult<?> parseChildData(@NonNull InputStream stream,
+        @NonNull EffectiveStatementInference inference) throws IOException, NormalizationException;
+
+    @Override
+    public final NormalizationResult<ContainerNode> parseInput(final EffectiveStatementInference inference,
+            final InputStream stream) throws NormalizationException {
+        final var stack = checkInferenceNotEmpty(inference);
+        final var stmt = stack.currentStatement();
+        final QName expected;
+        if (stmt instanceof RpcEffectiveStatement rpc) {
+            expected = rpc.input().argument();
+        } else if (stmt instanceof ActionEffectiveStatement action) {
+            expected = action.input().argument();
+        } else {
+            throw new IllegalArgumentException("Invalid inference statement " + stmt);
+        }
+        return parseInputOutput(stream, stack, expected);
+    }
+
+    @Override
+    public final NormalizationResult<ContainerNode> parseOutput(final EffectiveStatementInference inference,
+            final InputStream stream) throws NormalizationException {
+        final var stack = checkInferenceNotEmpty(inference);
+        final var stmt = stack.currentStatement();
+        final QName expected;
+        if (stmt instanceof RpcEffectiveStatement rpc) {
+            expected = rpc.output().argument();
+        } else if (stmt instanceof ActionEffectiveStatement action) {
+            expected = action.output().argument();
+        } else {
+            throw new IllegalArgumentException("Invalid inference statement " + stmt);
+        }
+        return parseInputOutput(stream, stack, expected);
+    }
+
+    private @NonNull NormalizationResult<ContainerNode> parseInputOutput(final @NonNull InputStream stream,
+            final @NonNull SchemaInferenceStack stack, final @NonNull QName expected) throws NormalizationException {
+        final NormalizationResult<?> data;
+        try {
+            data = parseInputOutput(stack, expected, requireNonNull(stream));
+        } catch (IOException | IllegalArgumentException e) {
+            throw NormalizationException.ofCause(e);
+        }
+        return checkNodeContainer(data);
+    }
+
+    protected abstract @NonNull NormalizationResult<?> parseInputOutput(@NonNull SchemaInferenceStack stack,
+        @NonNull QName expected, @NonNull InputStream stream) throws IOException, NormalizationException;
+
+    private void checkInference(final EffectiveStatementInference inference) {
+        final var modelContext = inference.getEffectiveModelContext();
+        final var local = getEffectiveModelContext();
+        if (!local.equals(modelContext)) {
+            throw new IllegalArgumentException("Mismatched inference, expecting model context " + local);
+        }
+    }
+
+    private @NonNull SchemaInferenceStack checkInferenceNotEmpty(final EffectiveStatementInference inference) {
+        checkInference(inference);
+        final var stack = SchemaInferenceStack.ofInference(inference);
+        if (stack.isEmpty()) {
+            throw new IllegalArgumentException("Inference must not be empty");
+        }
+        return stack;
+    }
+
+    @SuppressWarnings("unchecked")
+    protected static final @NonNull NormalizationResult<ContainerNode> checkNodeContainer(
+            final NormalizationResult<?> result) throws NormalizationException {
+        final var data = result.data();
+        if (data instanceof ContainerNode) {
+            return (NormalizationResult<ContainerNode>) result;
+        }
+        throw NormalizationException.ofMessage("Unexpected payload type " + data.contract());
+    }
+
+    protected static final @NonNull NormalizationResult<?> checkNodeName(final NormalizationResult<?> result,
+            final QName expected) throws NormalizationException {
+        final var qname = result.data().name().getNodeType();
+        if (qname.equals(expected)) {
+            return result;
+        }
+        throw NormalizationException.ofMessage(
+            "Payload name " + qname + " is different from identifier name " + expected);
+    }
+}