import javax.xml.transform.dom.DOMSource;
import org.eclipse.jdt.annotation.NonNull;
import org.opendaylight.yangtools.util.xml.UntrustedXML;
+import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.XMLNamespace;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.util.AbstractNodeDataWithSchema;
import org.opendaylight.yangtools.yang.model.api.ChoiceSchemaNode;
import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.OperationDefinition;
-import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.meta.EffectiveStatement;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
private final JSONCodecFactory codecs;
private final DataSchemaNode parentNode;
+ private final SchemaInferenceStack stack;
+
// TODO: consider class specialization to remove this field
private final boolean lenient;
private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
- final DataSchemaNode parentNode, final boolean lenient) {
+ final SchemaInferenceStack stack, final boolean lenient) {
this.writer = requireNonNull(writer);
this.codecs = requireNonNull(codecs);
- this.parentNode = parentNode;
+ this.stack = requireNonNull(stack);
this.lenient = lenient;
+
+ if (!stack.isEmpty()) {
+ final EffectiveStatement<?, ?> parent = stack.currentStatement();
+ if (parent instanceof DataSchemaNode) {
+ parentNode = (DataSchemaNode) parent;
+ } else if (parent instanceof OperationDefinition) {
+ parentNode = OperationAsContainer.of((OperationDefinition) parent);
+ } else {
+ throw new IllegalArgumentException("Illegal parent node " + parent);
+ }
+ } else {
+ parentNode = stack.getEffectiveModelContext();
+ }
}
/**
*/
public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
final @NonNull JSONCodecFactory codecFactory) {
- return new JsonParserStream(writer, codecFactory, codecFactory.getEffectiveModelContext(), false);
+ return new JsonParserStream(writer, codecFactory,
+ SchemaInferenceStack.of(codecFactory.getEffectiveModelContext()), false);
}
/**
* @throws NullPointerException if any of the arguments are null
*/
public static @NonNull JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
- final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
- return new JsonParserStream(writer, codecFactory, validateParent(parentNode), false);
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull EffectiveStatementInference parentNode) {
+ return new JsonParserStream(writer, codecFactory, SchemaInferenceStack.ofInference(parentNode), false);
}
/**
*/
public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
final @NonNull JSONCodecFactory codecFactory) {
- return new JsonParserStream(writer, codecFactory, codecFactory.getEffectiveModelContext(), true);
+ return new JsonParserStream(writer, codecFactory,
+ SchemaInferenceStack.of(codecFactory.getEffectiveModelContext()), true);
}
/**
* @throws NullPointerException if any of the arguments are null
*/
public static @NonNull JsonParserStream createLenient(final @NonNull NormalizedNodeStreamWriter writer,
- final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
- return new JsonParserStream(writer, codecFactory, validateParent(parentNode), true);
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull EffectiveStatementInference parentNode) {
+ return new JsonParserStream(writer, codecFactory, SchemaInferenceStack.ofInference(parentNode), true);
}
public JsonParserStream parse(final JsonReader reader) {
"Schema for node with name %s and namespace %s does not exist at %s",
localName, getCurrentNamespace(), parentSchema);
-
+ final QName qname = childDataSchemaNodes.peekLast().getQName();
final AbstractNodeDataWithSchema<?> newChild = ((CompositeNodeDataWithSchema<?>) parent)
.addChild(childDataSchemaNodes, ChildReusePolicy.NOOP);
if (newChild instanceof AnyXmlNodeDataWithSchema) {
readAnyXmlValue(in, (AnyXmlNodeDataWithSchema) newChild, jsonElementName);
} else {
+ stack.enterDataTree(qname);
read(in, newChild);
+ stack.exit();
}
removeNamespace();
}
private Object translateValueByType(final String value, final DataSchemaNode node) {
checkArgument(node instanceof TypedDataSchemaNode);
- return codecs.codecFor((TypedDataSchemaNode) node).parseValue(null, value);
+ return codecs.codecFor((TypedDataSchemaNode) node, stack).parseValue(null, value);
}
private void removeNamespace() {
return namespaces.peek();
}
- private static DataSchemaNode validateParent(final SchemaNode parent) {
- if (parent instanceof DataSchemaNode) {
- return (DataSchemaNode) parent;
- } else if (parent instanceof OperationDefinition) {
- return OperationAsContainer.of((OperationDefinition) parent);
- } else {
- throw new IllegalArgumentException("Illegal parent node " + requireNonNull(parent));
- }
- }
-
@Override
public void flush() throws IOException {
writer.flush();