YANGTOOLS-766: introduce JSONCodecFactorySupplier 65/66765/3
authorRobert Varga <robert.varga@pantheon.tech>
Tue, 26 Dec 2017 10:57:08 +0000 (11:57 +0100)
committerRobert Varga <robert.varga@pantheon.tech>
Tue, 26 Dec 2017 13:07:53 +0000 (14:07 +0100)
We need two separate instances of JSONCodecFactory factory methods,
each with its own conformance, so that we can properly spin out
codecs compliant to RFC7951 and draft-lhotka-netmod-yang-json-02.

This patch provides a new API entrypoints for specifying these, without
actually providing distinct codecs. It also provides proper JsonParserStream
factory methods which take JSONCodecFactory instead of plain SchemaContext
(and thus assuming JSONCodecFactory implementation).

Change-Id: Ib2b402ed865903432645632e89c9b0e04851a05c
Signed-off-by: Robert Varga <robert.varga@pantheon.tech>
16 files changed:
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactory.java
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java [new file with mode: 0644]
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/AnyXmlSupportTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug4501Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug4969Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug5446Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug6112Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug7246Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug8083Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonStreamToNormalizedNodeTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/NormalizedNodeToJsonStreamTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/StreamToNormalizedNodeTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/TestUtils.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/YangModeledAnyXmlSupportTest.java
yang/yang-data-util/src/main/java/org/opendaylight/yangtools/yang/data/util/codec/AbstractCodecFactory.java

index 0a5e082276c4f8d6f6af9460c0965fa86bbf47f3..5cfe1784f1df3c0f0cbbc9a99b660e3ca6bcd235 100644 (file)
@@ -7,13 +7,12 @@
  */
 package org.opendaylight.yangtools.yang.data.codec.gson;
 
+import static com.google.common.base.Verify.verifyNotNull;
+
 import com.google.common.annotations.Beta;
-import com.google.common.base.Stopwatch;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
 import java.util.List;
 import java.util.Optional;
+import java.util.function.BiFunction;
 import org.opendaylight.yangtools.yang.common.QNameModule;
 import org.opendaylight.yangtools.yang.data.impl.codec.AbstractIntegerStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.BinaryStringCodec;
@@ -24,14 +23,7 @@ import org.opendaylight.yangtools.yang.data.impl.codec.EnumStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.StringStringCodec;
 import org.opendaylight.yangtools.yang.data.util.codec.AbstractCodecFactory;
 import org.opendaylight.yangtools.yang.data.util.codec.CodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
-import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
-import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
 import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BooleanTypeDefinition;
@@ -51,8 +43,6 @@ import org.opendaylight.yangtools.yang.model.api.type.Uint64TypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.Uint8TypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnknownTypeDefinition;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Factory for creating JSON equivalents of codecs. Each instance of this object is bound to
@@ -65,56 +55,12 @@ import org.slf4j.LoggerFactory;
  */
 @Beta
 public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
-    private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
-        @Override
-        public JSONCodecFactory load(final SchemaContext key) {
-            final Stopwatch sw = Stopwatch.createStarted();
-            final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
-            final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache);
-            final int visitedLeaves = requestCodecsForChildren(lazy, key);
-            sw.stop();
-
-            final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
-            LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
-                cache.simpleSize(), cache.complexSize(), sw);
-            return new JSONCodecFactory(key, cache);
-        }
-
-        private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
-            int ret = 0;
-            for (DataSchemaNode child : parent.getChildNodes()) {
-                if (child instanceof TypedDataSchemaNode) {
-                    lazy.codecFor((TypedDataSchemaNode) child);
-                    ++ret;
-                } else if (child instanceof DataNodeContainer) {
-                    ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
-                }
-            }
-
-            return ret;
-        }
-    }
-
-    private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactory.class);
-
-    // Weak keys to retire the entry when SchemaContext goes away
-    private static final LoadingCache<SchemaContext, JSONCodecFactory> PRECOMPUTED = CacheBuilder.newBuilder()
-            .weakKeys().build(new EagerCacheLoader());
-
-    // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
-    private static final LoadingCache<SchemaContext, JSONCodecFactory> SHARED = CacheBuilder.newBuilder()
-            .weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
-                @Override
-                public JSONCodecFactory load(final SchemaContext key) {
-                    return new JSONCodecFactory(key, new SharedCodecCache<>());
-                }
-            });
-
     private final JSONCodec<?> iidCodec;
 
-    JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache) {
+    JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache,
+            final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
         super(context, cache);
-        iidCodec = new JSONStringInstanceIdentifierCodec(context, this);
+        iidCodec = verifyNotNull(iidCodecSupplier.apply(context, this));
     }
 
     /**
@@ -137,9 +83,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputed(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory getPrecomputed(final SchemaContext context) {
-        return PRECOMPUTED.getUnchecked(context);
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputed(context);
     }
 
     /**
@@ -152,9 +101,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputedIfAvailable(SchemaContext)} instead.
      */
+    @Deprecated
     public static Optional<JSONCodecFactory> getPrecomputedIfAvailable(final SchemaContext context) {
-        return Optional.ofNullable(PRECOMPUTED.getIfPresent(context));
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputedIfAvailable(context);
     }
 
     /**
@@ -169,9 +121,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getShared(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory getShared(final SchemaContext context) {
-        return SHARED.getUnchecked(context);
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context);
     }
 
     /**
@@ -186,9 +141,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A non-sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#createLazy(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory createLazy(final SchemaContext context) {
-        return new JSONCodecFactory(context, new LazyCodecCache<>());
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createLazy(context);
     }
 
     /**
@@ -203,9 +161,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A non-sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null.
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#createSimple(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory createSimple(final SchemaContext context) {
-        return new JSONCodecFactory(context, NoopCodecCache.getInstance());
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createSimple(context);
     }
 
     @Override
@@ -245,8 +206,6 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
 
     @Override
     protected JSONCodec<?> instanceIdentifierCodec(final InstanceIdentifierTypeDefinition type) {
-        // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
-        //        with that, though, we need access to the current data store.
         return iidCodec;
     }
 
@@ -304,5 +263,4 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
     protected JSONCodec<?> unknownCodec(final UnknownTypeDefinition type) {
         return NullJSONCodec.INSTANCE;
     }
-
 }
diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java
new file mode 100644 (file)
index 0000000..d3a04ed
--- /dev/null
@@ -0,0 +1,199 @@
+/*
+ * Copyright (c) 2017 Pantheon Technologies, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.codec.gson;
+
+import static com.google.common.base.Verify.verifyNotNull;
+import static java.util.Objects.requireNonNull;
+
+import com.google.common.annotations.Beta;
+import com.google.common.base.Stopwatch;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import java.util.Optional;
+import java.util.function.BiFunction;
+import org.eclipse.jdt.annotation.NonNull;
+import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * API entry point for acquiring {@link JSONCodecFactory} instances.
+ *
+ * @author Robert Varga
+ */
+@Beta
+public enum JSONCodecFactorySupplier {
+    /**
+     * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+     */
+    // FIXME: YANGTOOLS-766: use a different codec
+    RFC7951(JSONStringInstanceIdentifierCodec::new),
+    /**
+     * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+     */
+    DRAFT_LHOTKA_NETMOD_YANG_JSON_02(JSONStringInstanceIdentifierCodec::new);
+
+    private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactorySupplier.class);
+
+    private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
+        private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+            iidCodecSupplier;
+
+        EagerCacheLoader(final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+                iidCodecSupplier) {
+            this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+        }
+
+        @Override
+        public JSONCodecFactory load(final SchemaContext key) {
+            final Stopwatch sw = Stopwatch.createStarted();
+            final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
+            final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache, iidCodecSupplier);
+            final int visitedLeaves = requestCodecsForChildren(lazy, key);
+            sw.stop();
+
+            final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
+            LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
+                cache.simpleSize(), cache.complexSize(), sw);
+            return new JSONCodecFactory(key, cache, iidCodecSupplier);
+        }
+
+        private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
+            int ret = 0;
+            for (DataSchemaNode child : parent.getChildNodes()) {
+                if (child instanceof TypedDataSchemaNode) {
+                    lazy.codecFor((TypedDataSchemaNode) child);
+                    ++ret;
+                } else if (child instanceof DataNodeContainer) {
+                    ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
+                }
+            }
+
+            return ret;
+        }
+    }
+
+    private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier;
+
+    // Weak keys to retire the entry when SchemaContext goes away
+    private final LoadingCache<SchemaContext, JSONCodecFactory> precomputed;
+
+    // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
+    private final LoadingCache<SchemaContext, JSONCodecFactory> shared;
+
+    JSONCodecFactorySupplier(
+            final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
+        this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+        precomputed = CacheBuilder.newBuilder().weakKeys().build(new EagerCacheLoader(iidCodecSupplier));
+        shared = CacheBuilder.newBuilder().weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
+            @Override
+            public JSONCodecFactory load(final SchemaContext key) {
+                return new JSONCodecFactory(key, new SharedCodecCache<>(), iidCodecSupplier);
+            }
+        });
+    }
+
+    /**
+     * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+     * return the same instance as long as the associated SchemaContext is present. Returned object can be safely
+     * used by multiple threads concurrently. If the SchemaContext instance does not have a cached instance
+     * of {@link JSONCodecFactory}, it will be completely precomputed before this method will return.
+     *
+     * <p>
+     * Choosing this implementation is appropriate when the memory overhead of keeping a full codec tree is not as
+     * great a concern as predictable performance. When compared to the implementation returned by
+     * {@link #getShared(SchemaContext)}, this implementation is expected to offer higher performance and have lower
+     * peak memory footprint when most of the SchemaContext is actually in use.
+     *
+     * <p>
+     * For call sites which do not want to pay the CPU cost of pre-computing this implementation, but still would like
+     * to use it if is available (by being populated by some other caller), you can use
+     * {@link #getPrecomputedIfAvailable(SchemaContext)}.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory getPrecomputed(final @NonNull SchemaContext context) {
+        return verifyNotNull(precomputed.getUnchecked(context));
+    }
+
+    /**
+     * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext, if it is available. This
+     * method is a non-blocking equivalent of {@link #getPrecomputed(SchemaContext)} for use in code paths where
+     * the potential of having to pre-compute the implementation is not acceptable. One such scenario is when the
+     * code base wants to opportunistically take advantage of pre-computed version, but is okay with a fallback to
+     * a different implementation.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull Optional<JSONCodecFactory> getPrecomputedIfAvailable(final @NonNull SchemaContext context) {
+        return Optional.ofNullable(precomputed.getIfPresent(context));
+    }
+
+    /**
+     * Get a thread-safe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+     * return the same instance as long as the associated SchemaContext is present or the factory is not invalidated
+     * by memory pressure. Returned object can be safely used by multiple threads concurrently.
+     *
+     * <p>
+     * Choosing this implementation is a safe default, as it will not incur prohibitive blocking, nor will it tie up
+     * memory in face of pressure.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory getShared(final @NonNull SchemaContext context) {
+        return verifyNotNull(shared.getUnchecked(context));
+    }
+
+    /**
+     * Create a new thread-unsafe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method will
+     * return distinct objects every time it is invoked. Returned object may not be used from multiple threads
+     * concurrently.
+     *
+     * <p>
+     * This implementation is appropriate for one-off serialization from a single thread. It will aggressively cache
+     * codecs for reuse and will tie them up in memory until the factory is freed.
+     *
+     * @param context SchemaContext instance
+     * @return A non-sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory createLazy(final @NonNull SchemaContext context) {
+        return new JSONCodecFactory(context, new LazyCodecCache<>(), iidCodecSupplier);
+    }
+
+    /**
+     * Create a simplistic, thread-safe {@link JSONCodecFactory} for a {@link SchemaContext}. This method will return
+     * distinct objects every time it is invoked. Returned object may be use from multiple threads concurrently.
+     *
+     * <p>
+     * This implementation exists mostly for completeness only, as it does not perform any caching at all and each codec
+     * is computed every time it is requested. This may be useful in extremely constrained environments, where memory
+     * footprint is more critical than performance.
+     *
+     * @param context SchemaContext instance
+     * @return A non-sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null.
+     */
+    public @NonNull JSONCodecFactory createSimple(final @NonNull SchemaContext context) {
+        return new JSONCodecFactory(context, NoopCodecCache.getInstance(), iidCodecSupplier);
+    }
+}
index bb35ce819843ac29620bfe7b6ecad514732fd1cb..93c0165443e7e3be987780105973291a228c9cce 100644 (file)
@@ -31,6 +31,7 @@ import java.util.Iterator;
 import java.util.Map.Entry;
 import java.util.Set;
 import javax.xml.transform.dom.DOMSource;
+import org.eclipse.jdt.annotation.NonNull;
 import org.opendaylight.yangtools.odlext.model.api.YangModeledAnyXmlSchemaNode;
 import org.opendaylight.yangtools.util.xml.UntrustedXML;
 import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
@@ -69,33 +70,84 @@ public final class JsonParserStream implements Closeable, Flushable {
     private final Deque<URI> namespaces = new ArrayDeque<>();
     private final NormalizedNodeStreamWriter writer;
     private final JSONCodecFactory codecs;
-    private final SchemaContext schema;
     private final DataSchemaNode parentNode;
 
-    private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final JSONCodecFactory codecs, final DataSchemaNode parentNode) {
-        this.schema = requireNonNull(schemaContext);
+    private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
+            final DataSchemaNode parentNode) {
         this.writer = requireNonNull(writer);
         this.codecs = requireNonNull(codecs);
         this.parentNode = parentNode;
     }
 
-    private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final DataSchemaNode parentNode) {
-        this(writer, schemaContext, JSONCodecFactory.getShared(schemaContext), parentNode);
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+     * with the specified codec factory.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull JSONCodecFactory codecFactory) {
+        return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext());
     }
 
-    public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final SchemaNode parentNode) {
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+     * @param parentNode Logical root node
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
         if (parentNode instanceof RpcDefinition) {
-            return new JsonParserStream(writer, schemaContext, new RpcAsContainer((RpcDefinition) parentNode));
+            return new JsonParserStream(writer, codecFactory, new RpcAsContainer((RpcDefinition) parentNode));
         }
-        checkArgument(parentNode instanceof DataSchemaNode, "Instance of DataSchemaNode class awaited.");
-        return new JsonParserStream(writer, schemaContext, (DataSchemaNode) parentNode);
+        checkArgument(parentNode instanceof DataSchemaNode, "An instance of DataSchemaNode is expected, %s supplied",
+            parentNode);
+        return new JsonParserStream(writer, codecFactory, (DataSchemaNode) parentNode);
+    }
+
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link SchemaContext}. The stream will be logically rooted at the top of the supplied SchemaContext.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param schemaContext {@link SchemaContext} to use
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     *
+     * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory)} instead.
+     */
+    @Deprecated
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull SchemaContext schemaContext) {
+        return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
     }
 
-    public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext) {
-        return new JsonParserStream(writer, schemaContext, schemaContext);
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link SchemaContext}. The stream will be logically rooted at the specified parent node.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param schemaContext {@link SchemaContext} to use
+     * @param parentNode Logical root node
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     *
+     * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory, SchemaNode)} instead.
+     */
+    @Deprecated
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull SchemaContext schemaContext, final @NonNull SchemaNode parentNode) {
+        return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext),
+            parentNode);
     }
 
     public JsonParserStream parse(final JsonReader reader) {
@@ -305,7 +357,7 @@ public final class JsonParserStream implements Closeable, Flushable {
             moduleNamePart = childName.substring(0, lastIndexOfColon);
             nodeNamePart = childName.substring(lastIndexOfColon + 1);
 
-            final Iterator<Module> m = schema.findModules(moduleNamePart).iterator();
+            final Iterator<Module> m = codecs.getSchemaContext().findModules(moduleNamePart).iterator();
             namespace = m.hasNext() ? m.next().getNamespace() : null;
         } else {
             nodeNamePart = childName;
@@ -335,7 +387,7 @@ public final class JsonParserStream implements Closeable, Flushable {
         for (final URI potentialUri : potentialUris) {
             builder.append('\n');
             //FIXME how to get information about revision from JSON input? currently first available is used.
-            builder.append(schema.findModules(potentialUri).iterator().next().getName());
+            builder.append(codecs.getSchemaContext().findModules(potentialUri).iterator().next().getName());
         }
         return builder.toString();
     }
index bac5a4144c1a1ebbe55a489f0a82c36211ce1a44..19a47fe79417ad525760f6f980a3e79bd45afccc 100644 (file)
@@ -67,7 +67,8 @@ public class AnyXmlSupportTest {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -102,7 +103,8 @@ public class AnyXmlSupportTest {
 
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index cb05b63a1d618138750c90d436e7abeafaee9f1f..eee52da71238e305bda71119368dd6962fa4c5dc 100644 (file)
@@ -46,7 +46,8 @@ public class Bug4501Test {
         final String inputJson = loadTextFile("/bug-4501/json/foo-correct.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertTrue(transformedInput instanceof UnkeyedListNode);
@@ -64,7 +65,8 @@ public class Bug4501Test {
         final String inputJson = loadTextFile("/bug-4501/json/foo-incorrect.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
 
         try {
             jsonParser.parse(new JsonReader(new StringReader(inputJson)));
index 5330bd0d3341fd0fc7ced8f6b203851941ece226..c1f30b46c6d895b99f8ef302dc07d11151c26a70 100644 (file)
@@ -44,7 +44,8 @@ public class Bug4969Test {
         final String inputJson = TestUtils.loadTextFile("/bug-4969/json/foo.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
 
@@ -102,7 +103,8 @@ public class Bug4969Test {
         final String inputJson = TestUtils.loadTextFile("/leafref/json/data.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index 7eb6054e682b990f92a59d9fd02f848e674091de..4d0600a258543888614274be88aa87bb389e8427 100644 (file)
@@ -75,8 +75,8 @@ public class Bug5446Test {
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                JsonWriterFactory.createJsonWriter(writer, 2));
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index c257a3af0a183bffbc0d9fbefd5ae872994b6297..eaffe4c15a30d596d0a6bccbc7fa2cae90d0a802 100644 (file)
@@ -43,7 +43,8 @@ public class Bug6112Test {
 
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         return result.getResult();
     }
index d2d1bbbdcb1d3913dd85e6b6be87c430ebd57fcd..86a0057551a892f66654933d4097888afeb3c6ed 100644 (file)
@@ -62,8 +62,8 @@ public class Bug7246Test {
             throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), path, URI.create(NS),
-                JsonWriterFactory.createJsonWriter(writer, 2));
+                JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), path,
+                URI.create(NS), JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index fb3e195c9aa162b617df3fe7dcf8034d015efebf..138841739f8926c20f864e9cdd6c72d2ececc463 100644 (file)
@@ -35,7 +35,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.RFC7951.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -49,7 +50,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -63,7 +65,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -77,7 +80,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index c0599b7b6b1fbbb6b59a889dc91e271f6484be1e..822bda7f6f505f3c1408193b8d619b1e5cb09f04 100644 (file)
@@ -195,7 +195,8 @@ public class JsonStreamToNormalizedNodeTest {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
         final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -208,7 +209,8 @@ public class JsonStreamToNormalizedNodeTest {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
         final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -247,7 +249,8 @@ public class JsonStreamToNormalizedNodeTest {
                                         .build())
                                 .build()).build();
 
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -258,7 +261,8 @@ public class JsonStreamToNormalizedNodeTest {
             final NormalizedNode<?, ?> awaitedStructure) {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertEquals("Transformation of json input to normalized node wasn't successful.", awaitedStructure,
index 2f9fa49d1a54b4d5f7b61d793ae877dd09f6c2f5..bf30b77bc328ff5d315cdcea9280752ce49c1121 100644 (file)
@@ -312,7 +312,7 @@ public class NormalizedNodeToJsonStreamTest {
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-            JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
             JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
index 0591c5e2c774c6eb6369d800f794981efc809543..9a968cb43d880ea7ded7bd5f93c6411fec2ffb4c 100644 (file)
@@ -16,7 +16,6 @@ import java.io.StringReader;
 import java.io.StringWriter;
 import java.net.URISyntaxException;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
 import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
@@ -56,7 +55,8 @@ public class StreamToNormalizedNodeTest {
         final LoggingNormalizedNodeStreamWriter logWriter = new LoggingNormalizedNodeStreamWriter();
 
         // JSON -> StreamWriter parser
-        try (JsonParserStream jsonHandler = JsonParserStream.create(logWriter, schemaContext)) {
+        try (JsonParserStream jsonHandler = JsonParserStream.create(logWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
             // Process multiple readers, flush()/close() as needed
             jsonHandler.parse(reader);
         }
@@ -66,7 +66,6 @@ public class StreamToNormalizedNodeTest {
      * Demonstrates how to create an immutable NormalizedNode tree from a {@link JsonReader} and
      * then writes the data back into string representation.
      */
-    @Ignore
     @Test
     public void immutableNormalizedNodeStreamWriterDemonstration() throws IOException {
         /*
@@ -79,7 +78,8 @@ public class StreamToNormalizedNodeTest {
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
 
         // JSON -> StreamWriter parser
-        try (JsonParserStream handler = JsonParserStream.create(streamWriter, schemaContext)) {
+        try (JsonParserStream handler = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
             handler.parse(new JsonReader(new StringReader(streamAsString)));
         }
 
@@ -101,7 +101,7 @@ public class StreamToNormalizedNodeTest {
         // StreamWriter which outputs JSON strings
         // StreamWriter which outputs JSON strings
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-            JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
             JsonWriterFactory.createJsonWriter(writer, 2));
 
         // NormalizedNode -> StreamWriter
index a8c512301ff2ee66f72bf1a0f1107f7c8841a97c..280fa592e7cc50408ef646e52bdd932e672f73bf 100644 (file)
@@ -69,7 +69,7 @@ public final class TestUtils {
             final SchemaPath rootPath) throws IOException {
         final Writer writer = new StringWriter();
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), rootPath, null,
+                JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), rootPath, null,
                 JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(data);
index de041d6b37e19368ee8151f74c385a73fa2c472f..4da8a82bac7eef21cc49413f85cd994e811f3292 100644 (file)
@@ -83,7 +83,8 @@ public class YangModeledAnyXmlSupportTest {
         final String inputJson = loadTextFile("/yang-modeled-anyxml/json/baz.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
 
@@ -109,8 +110,8 @@ public class YangModeledAnyXmlSupportTest {
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                JsonWriterFactory.createJsonWriter(writer, 2));
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index 6699581dcceec740205f4c327f56012c83c906d1..ab52a2ec9baf448ebea40ed2c7ca40b25c1f3767 100644 (file)
@@ -116,6 +116,8 @@ public abstract class AbstractCodecFactory<T extends TypeAwareCodec<?, ?, ?>> {
 
     protected abstract T identityRefCodec(IdentityrefTypeDefinition type, QNameModule module);
 
+    // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
+    //        with that, though, we need access to the current data store.
     protected abstract T instanceIdentifierCodec(InstanceIdentifierTypeDefinition type);
 
     protected abstract T int8Codec(Int8TypeDefinition type);