YANGTOOLS-766: introduce JSONCodecFactorySupplier 73/66773/3
authorRobert Varga <robert.varga@pantheon.tech>
Tue, 26 Dec 2017 10:57:08 +0000 (11:57 +0100)
committerRobert Varga <robert.varga@pantheon.tech>
Wed, 27 Dec 2017 12:21:28 +0000 (13:21 +0100)
We need two separate instances of JSONCodecFactory factory methods,
each with its own conformance, so that we can properly spin out
codecs compliant to RFC7951 and draft-lhotka-netmod-yang-json-02.

This patch provides a new API entrypoints for specifying these, without
actually providing distinct codecs. It also provides proper JsonParserStream
factory methods which take JSONCodecFactory instead of plain SchemaContext
(and thus assuming JSONCodecFactory implementation).

Change-Id: Ib2b402ed865903432645632e89c9b0e04851a05c
Signed-off-by: Robert Varga <robert.varga@pantheon.tech>
(cherry picked from commit cc3f63f7d403f66a9761e27ae6afcfd30ec48df0)

16 files changed:
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactory.java
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java [new file with mode: 0644]
yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonParserStream.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/AnyXmlSupportTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug4501Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug4969Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug5446Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug6112Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug7246Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/Bug8083Test.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/JsonStreamToNormalizedNodeTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/NormalizedNodeToJsonStreamTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/StreamToNormalizedNodeTest.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/TestUtils.java
yang/yang-data-codec-gson/src/test/java/org/opendaylight/yangtools/yang/data/codec/gson/YangModeledAnyXmlSupportTest.java
yang/yang-data-util/src/main/java/org/opendaylight/yangtools/yang/data/util/codec/AbstractCodecFactory.java

index c24db7699414d8d762b6e4a78e70d77547e7df5e..641a5f8af82d01bd899781d7eb70a651e2131269 100644 (file)
@@ -7,6 +7,8 @@
  */
 package org.opendaylight.yangtools.yang.data.codec.gson;
 
+import static com.google.common.base.Verify.verifyNotNull;
+
 import com.google.common.annotations.Beta;
 import com.google.common.base.Optional;
 import com.google.common.base.Stopwatch;
@@ -14,6 +16,7 @@ import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import java.util.List;
+import java.util.function.BiFunction;
 import org.opendaylight.yangtools.yang.common.QNameModule;
 import org.opendaylight.yangtools.yang.data.impl.codec.AbstractIntegerStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.BinaryStringCodec;
@@ -24,14 +27,7 @@ import org.opendaylight.yangtools.yang.data.impl.codec.EnumStringCodec;
 import org.opendaylight.yangtools.yang.data.impl.codec.StringStringCodec;
 import org.opendaylight.yangtools.yang.data.util.codec.AbstractCodecFactory;
 import org.opendaylight.yangtools.yang.data.util.codec.CodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
-import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
-import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import org.opendaylight.yangtools.yang.model.api.TypedSchemaNode;
 import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.BooleanTypeDefinition;
@@ -45,8 +41,6 @@ import org.opendaylight.yangtools.yang.model.api.type.StringTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnknownTypeDefinition;
 import org.opendaylight.yangtools.yang.model.api.type.UnsignedIntegerTypeDefinition;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Factory for creating JSON equivalents of codecs. Each instance of this object is bound to
@@ -58,56 +52,12 @@ import org.slf4j.LoggerFactory;
  */
 @Beta
 public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
-    private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
-        @Override
-        public JSONCodecFactory load(final SchemaContext key) {
-            final Stopwatch sw = Stopwatch.createStarted();
-            final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
-            final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache);
-            final int visitedLeaves = requestCodecsForChildren(lazy, key);
-            sw.stop();
-
-            final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
-            LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
-                cache.simpleSize(), cache.complexSize(), sw);
-            return new JSONCodecFactory(key, cache);
-        }
-
-        private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
-            int ret = 0;
-            for (DataSchemaNode child : parent.getChildNodes()) {
-                if (child instanceof TypedSchemaNode) {
-                    lazy.codecFor((TypedSchemaNode) child);
-                    ++ret;
-                } else if (child instanceof DataNodeContainer) {
-                    ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
-                }
-            }
-
-            return ret;
-        }
-    }
-
-    private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactory.class);
-
-    // Weak keys to retire the entry when SchemaContext goes away
-    private static final LoadingCache<SchemaContext, JSONCodecFactory> PRECOMPUTED = CacheBuilder.newBuilder()
-            .weakKeys().build(new EagerCacheLoader());
-
-    // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
-    private static final LoadingCache<SchemaContext, JSONCodecFactory> SHARED = CacheBuilder.newBuilder()
-            .weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
-                @Override
-                public JSONCodecFactory load(final SchemaContext key) {
-                    return new JSONCodecFactory(key, new SharedCodecCache<>());
-                }
-            });
-
     private final JSONCodec<?> iidCodec;
 
-    JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache) {
+    JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache,
+            final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
         super(context, cache);
-        iidCodec = new JSONStringInstanceIdentifierCodec(context, this);
+        iidCodec = verifyNotNull(iidCodecSupplier.apply(context, this));
     }
 
     /**
@@ -141,9 +91,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputed(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory getPrecomputed(final SchemaContext context) {
-        return PRECOMPUTED.getUnchecked(context);
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputed(context);
     }
 
     /**
@@ -156,9 +109,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputedIfAvailable(SchemaContext)} instead.
      */
+    @Deprecated
     public static Optional<JSONCodecFactory> getPrecomputedIfAvailable(final SchemaContext context) {
-        return Optional.fromNullable(PRECOMPUTED.getIfPresent(context));
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputedIfAvailable(context);
     }
 
     /**
@@ -172,9 +128,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#getShared(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory getShared(final SchemaContext context) {
-        return SHARED.getUnchecked(context);
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context);
     }
 
     /**
@@ -188,9 +147,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A non-sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#createLazy(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory createLazy(final SchemaContext context) {
-        return new JSONCodecFactory(context, new LazyCodecCache<>());
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createLazy(context);
     }
 
     /**
@@ -204,9 +166,12 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
      * @param context SchemaContext instance
      * @return A non-sharable {@link JSONCodecFactory}
      * @throws NullPointerException if context is null.
+     *
+     * @deprecated Use {@link JSONCodecFactorySupplier#createSimple(SchemaContext)} instead.
      */
+    @Deprecated
     public static JSONCodecFactory createSimple(final SchemaContext context) {
-        return new JSONCodecFactory(context, NoopCodecCache.getInstance());
+        return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createSimple(context);
     }
 
     @Override
@@ -246,8 +211,6 @@ public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
 
     @Override
     protected JSONCodec<?> instanceIdentifierCodec(final InstanceIdentifierTypeDefinition type) {
-        // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
-        //        with that, though, we need access to the current data store.
         return iidCodec;
     }
 
diff --git a/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java b/yang/yang-data-codec-gson/src/main/java/org/opendaylight/yangtools/yang/data/codec/gson/JSONCodecFactorySupplier.java
new file mode 100644 (file)
index 0000000..1b4d766
--- /dev/null
@@ -0,0 +1,199 @@
+/*
+ * Copyright (c) 2017 Pantheon Technologies, s.r.o. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.codec.gson;
+
+import static com.google.common.base.Verify.verifyNotNull;
+import static java.util.Objects.requireNonNull;
+
+import com.google.common.annotations.Beta;
+import com.google.common.base.Optional;
+import com.google.common.base.Stopwatch;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import java.util.function.BiFunction;
+import org.eclipse.jdt.annotation.NonNull;
+import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.TypedSchemaNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * API entry point for acquiring {@link JSONCodecFactory} instances.
+ *
+ * @author Robert Varga
+ */
+@Beta
+public enum JSONCodecFactorySupplier {
+    /**
+     * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+     */
+    // FIXME: YANGTOOLS-766: use a different codec
+    RFC7951(JSONStringInstanceIdentifierCodec::new),
+    /**
+     * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+     */
+    DRAFT_LHOTKA_NETMOD_YANG_JSON_02(JSONStringInstanceIdentifierCodec::new);
+
+    private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactorySupplier.class);
+
+    private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
+        private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+            iidCodecSupplier;
+
+        EagerCacheLoader(final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+                iidCodecSupplier) {
+            this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+        }
+
+        @Override
+        public JSONCodecFactory load(final SchemaContext key) {
+            final Stopwatch sw = Stopwatch.createStarted();
+            final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
+            final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache, iidCodecSupplier);
+            final int visitedLeaves = requestCodecsForChildren(lazy, key);
+            sw.stop();
+
+            final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
+            LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
+                cache.simpleSize(), cache.complexSize(), sw);
+            return new JSONCodecFactory(key, cache, iidCodecSupplier);
+        }
+
+        private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
+            int ret = 0;
+            for (DataSchemaNode child : parent.getChildNodes()) {
+                if (child instanceof TypedSchemaNode) {
+                    lazy.codecFor((TypedSchemaNode) child);
+                    ++ret;
+                } else if (child instanceof DataNodeContainer) {
+                    ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
+                }
+            }
+
+            return ret;
+        }
+    }
+
+    private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier;
+
+    // Weak keys to retire the entry when SchemaContext goes away
+    private final LoadingCache<SchemaContext, JSONCodecFactory> precomputed;
+
+    // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
+    private final LoadingCache<SchemaContext, JSONCodecFactory> shared;
+
+    JSONCodecFactorySupplier(
+            final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
+        this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+        precomputed = CacheBuilder.newBuilder().weakKeys().build(new EagerCacheLoader(iidCodecSupplier));
+        shared = CacheBuilder.newBuilder().weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
+            @Override
+            public JSONCodecFactory load(final SchemaContext key) {
+                return new JSONCodecFactory(key, new SharedCodecCache<>(), iidCodecSupplier);
+            }
+        });
+    }
+
+    /**
+     * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+     * return the same instance as long as the associated SchemaContext is present. Returned object can be safely
+     * used by multiple threads concurrently. If the SchemaContext instance does not have a cached instance
+     * of {@link JSONCodecFactory}, it will be completely precomputed before this method will return.
+     *
+     * <p>
+     * Choosing this implementation is appropriate when the memory overhead of keeping a full codec tree is not as
+     * great a concern as predictable performance. When compared to the implementation returned by
+     * {@link #getShared(SchemaContext)}, this implementation is expected to offer higher performance and have lower
+     * peak memory footprint when most of the SchemaContext is actually in use.
+     *
+     * <p>
+     * For call sites which do not want to pay the CPU cost of pre-computing this implementation, but still would like
+     * to use it if is available (by being populated by some other caller), you can use
+     * {@link #getPrecomputedIfAvailable(SchemaContext)}.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory getPrecomputed(final @NonNull SchemaContext context) {
+        return verifyNotNull(precomputed.getUnchecked(context));
+    }
+
+    /**
+     * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext, if it is available. This
+     * method is a non-blocking equivalent of {@link #getPrecomputed(SchemaContext)} for use in code paths where
+     * the potential of having to pre-compute the implementation is not acceptable. One such scenario is when the
+     * code base wants to opportunistically take advantage of pre-computed version, but is okay with a fallback to
+     * a different implementation.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull Optional<JSONCodecFactory> getPrecomputedIfAvailable(final @NonNull SchemaContext context) {
+        return Optional.fromNullable(precomputed.getIfPresent(context));
+    }
+
+    /**
+     * Get a thread-safe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+     * return the same instance as long as the associated SchemaContext is present or the factory is not invalidated
+     * by memory pressure. Returned object can be safely used by multiple threads concurrently.
+     *
+     * <p>
+     * Choosing this implementation is a safe default, as it will not incur prohibitive blocking, nor will it tie up
+     * memory in face of pressure.
+     *
+     * @param context SchemaContext instance
+     * @return A sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory getShared(final @NonNull SchemaContext context) {
+        return verifyNotNull(shared.getUnchecked(context));
+    }
+
+    /**
+     * Create a new thread-unsafe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method will
+     * return distinct objects every time it is invoked. Returned object may not be used from multiple threads
+     * concurrently.
+     *
+     * <p>
+     * This implementation is appropriate for one-off serialization from a single thread. It will aggressively cache
+     * codecs for reuse and will tie them up in memory until the factory is freed.
+     *
+     * @param context SchemaContext instance
+     * @return A non-sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null
+     */
+    public @NonNull JSONCodecFactory createLazy(final @NonNull SchemaContext context) {
+        return new JSONCodecFactory(context, new LazyCodecCache<>(), iidCodecSupplier);
+    }
+
+    /**
+     * Create a simplistic, thread-safe {@link JSONCodecFactory} for a {@link SchemaContext}. This method will return
+     * distinct objects every time it is invoked. Returned object may be use from multiple threads concurrently.
+     *
+     * <p>
+     * This implementation exists mostly for completeness only, as it does not perform any caching at all and each codec
+     * is computed every time it is requested. This may be useful in extremely constrained environments, where memory
+     * footprint is more critical than performance.
+     *
+     * @param context SchemaContext instance
+     * @return A non-sharable {@link JSONCodecFactory}
+     * @throws NullPointerException if context is null.
+     */
+    public @NonNull JSONCodecFactory createSimple(final @NonNull SchemaContext context) {
+        return new JSONCodecFactory(context, NoopCodecCache.getInstance(), iidCodecSupplier);
+    }
+}
index bc29d28a65bc94a9ded963e72d1590a77ab23c0e..a8396f986b5ac23e488aecde194a35a6dc88ae2a 100644 (file)
@@ -25,6 +25,7 @@ import java.util.Deque;
 import java.util.HashSet;
 import java.util.Set;
 import javax.xml.transform.dom.DOMSource;
+import org.eclipse.jdt.annotation.NonNull;
 import org.opendaylight.yangtools.util.xml.UntrustedXML;
 import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
 import org.opendaylight.yangtools.yang.data.util.AbstractNodeDataWithSchema;
@@ -63,33 +64,84 @@ public final class JsonParserStream implements Closeable, Flushable {
     private final Deque<URI> namespaces = new ArrayDeque<>();
     private final NormalizedNodeStreamWriter writer;
     private final JSONCodecFactory codecs;
-    private final SchemaContext schema;
     private final DataSchemaNode parentNode;
 
-    private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final JSONCodecFactory codecs, final DataSchemaNode parentNode) {
-        this.schema = Preconditions.checkNotNull(schemaContext);
+    private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
+            final DataSchemaNode parentNode) {
         this.writer = Preconditions.checkNotNull(writer);
         this.codecs = Preconditions.checkNotNull(codecs);
         this.parentNode = parentNode;
     }
 
-    private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final DataSchemaNode parentNode) {
-        this(writer, schemaContext, JSONCodecFactory.getShared(schemaContext), parentNode);
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+     * with the specified codec factory.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull JSONCodecFactory codecFactory) {
+        return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext());
     }
 
-    public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
-            final SchemaNode parentNode) {
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+     * @param parentNode Logical root node
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
         if (parentNode instanceof RpcDefinition) {
-            return new JsonParserStream(writer, schemaContext, new RpcAsContainer((RpcDefinition) parentNode));
+            return new JsonParserStream(writer, codecFactory, new RpcAsContainer((RpcDefinition) parentNode));
         }
-        Preconditions.checkArgument(parentNode instanceof DataSchemaNode, "Instance of DataSchemaNode class awaited.");
-        return new JsonParserStream(writer, schemaContext, (DataSchemaNode) parentNode);
+        Preconditions.checkArgument(parentNode instanceof DataSchemaNode,
+                "An instance of DataSchemaNode is expected, %s supplied", parentNode);
+        return new JsonParserStream(writer, codecFactory, (DataSchemaNode) parentNode);
+    }
+
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link SchemaContext}. The stream will be logically rooted at the top of the supplied SchemaContext.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param schemaContext {@link SchemaContext} to use
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     *
+     * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory)} instead.
+     */
+    @Deprecated
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull SchemaContext schemaContext) {
+        return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
     }
 
-    public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext) {
-        return new JsonParserStream(writer, schemaContext, schemaContext);
+    /**
+     * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+     * and {@link SchemaContext}. The stream will be logically rooted at the specified parent node.
+     *
+     * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+     * @param schemaContext {@link SchemaContext} to use
+     * @param parentNode Logical root node
+     * @return A new {@link JsonParserStream}
+     * @throws NullPointerException if any of the arguments are null
+     *
+     * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory, SchemaNode)} instead.
+     */
+    @Deprecated
+    public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+            final @NonNull SchemaContext schemaContext, final @NonNull SchemaNode parentNode) {
+        return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext),
+            parentNode);
     }
 
     public JsonParserStream parse(final JsonReader reader) {
@@ -302,7 +354,7 @@ public final class JsonParserStream implements Closeable, Flushable {
             moduleNamePart = childName.substring(0, lastIndexOfColon);
             nodeNamePart = childName.substring(lastIndexOfColon + 1);
 
-            final Module m = schema.findModuleByName(moduleNamePart, null);
+            final Module m = codecs.getSchemaContext().findModuleByName(moduleNamePart, null);
             namespace = m == null ? null : m.getNamespace();
         } else {
             nodeNamePart = childName;
@@ -330,7 +382,7 @@ public final class JsonParserStream implements Closeable, Flushable {
         for (final URI potentialUri : potentialUris) {
             builder.append("\n");
             //FIXME how to get information about revision from JSON input? currently first available is used.
-            builder.append(schema.findModuleByNamespace(potentialUri).iterator().next().getName());
+            builder.append(codecs.getSchemaContext().findModuleByNamespace(potentialUri).iterator().next().getName());
         }
         return builder.toString();
     }
index 7c5af6c04be85491a0b136da90ddef9e7796cea2..c02f4715fb862dbcd59a720222e8aabd856b5547 100644 (file)
@@ -68,7 +68,8 @@ public class AnyXmlSupportTest {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -103,7 +104,8 @@ public class AnyXmlSupportTest {
 
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index f4157ea535ea6081f61161561d8e2e19c0180223..3c90c967d723a6f9671bab6b53ceaeabf5a9d1cd 100644 (file)
@@ -47,7 +47,8 @@ public class Bug4501Test {
         final String inputJson = loadTextFile("/bug-4501/json/foo-correct.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertTrue(transformedInput instanceof UnkeyedListNode);
@@ -65,7 +66,8 @@ public class Bug4501Test {
         final String inputJson = loadTextFile("/bug-4501/json/foo-incorrect.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
 
         try {
             jsonParser.parse(new JsonReader(new StringReader(inputJson)));
index 32ac33c24891097052824e4bc0bcc34611af06a9..3a5c83431e6a1ad5ec05a14bce045415a16b34bf 100644 (file)
@@ -47,7 +47,8 @@ public class Bug4969Test {
         final String inputJson = TestUtils.loadTextFile("/bug-4969/json/foo.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
 
@@ -106,7 +107,8 @@ public class Bug4969Test {
         final String inputJson = TestUtils.loadTextFile("/leafref/json/data.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index 4c937f2fcb695277a0674313715857e2cba5e521..4c91f5f257cd130430d74d28a01479dc66be616d 100644 (file)
@@ -77,8 +77,8 @@ public class Bug5446Test {
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                JsonWriterFactory.createJsonWriter(writer, 2));
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index 8c4f14cc865504f88bb45aa4c12276fc7ce34f92..7713a818efdc412966f2cfc0c8df4e4a763609b8 100644 (file)
@@ -44,7 +44,8 @@ public class Bug6112Test {
 
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         return result.getResult();
     }
index 682fcc676579556c14cd9c3f5a081bd9ec5eaaf4..0d78dbad7d46b5ed084123031b801e781dcd5f5f 100644 (file)
@@ -64,8 +64,8 @@ public class Bug7246Test {
             throws IOException, URISyntaxException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), path, new URI(NS),
-                JsonWriterFactory.createJsonWriter(writer, 2));
+                JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), path,
+                URI.create(NS), JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index 618dca1e5519d82802a32856418ef7ec80825737..f94d0da75f9e1486a1de0eb792cf9136110dc841 100644 (file)
@@ -33,7 +33,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.RFC7951.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -48,7 +49,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -62,7 +64,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -76,7 +79,8 @@ public class Bug8083Test {
         // deserialization
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
index 5ab681dc290ed29a6282ebf4edf2bb9ce7f17d00..09d58630d81e12a6de71d284a86a498211859692 100644 (file)
@@ -197,7 +197,8 @@ public class JsonStreamToNormalizedNodeTest {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
         final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -210,7 +211,8 @@ public class JsonStreamToNormalizedNodeTest {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
         final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -254,7 +256,8 @@ public class JsonStreamToNormalizedNodeTest {
                                         .build())
                                 .build()).build();
 
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertNotNull(transformedInput);
@@ -265,7 +268,8 @@ public class JsonStreamToNormalizedNodeTest {
             final NormalizedNode<?, ?> awaitedStructure) {
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
         assertEquals("Transformation of json input to normalized node wasn't successful.", awaitedStructure,
index fcba4f921b6952cc8e1bb29b8744c09f4b92d11a..fb4a6986a5bf0f63ff1c67706a039e68fc579183 100644 (file)
@@ -349,9 +349,9 @@ public class NormalizedNodeToJsonStreamTest {
     private static String normalizedNodeToJsonStreamTransformation(final Writer writer,
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
-        final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.
-                createExclusiveWriter(JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                    JsonWriterFactory.createJsonWriter(writer, 2));
+        final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index 3585cf56430d94b7abd980d022442e61635e1628..51bfde1789a196f720e5b601d3120e9523f5303d 100644 (file)
@@ -16,7 +16,6 @@ import java.io.StringReader;
 import java.io.StringWriter;
 import java.net.URISyntaxException;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
 import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
@@ -59,7 +58,8 @@ public class StreamToNormalizedNodeTest {
         final LoggingNormalizedNodeStreamWriter logWriter = new LoggingNormalizedNodeStreamWriter();
 
         // JSON -> StreamWriter parser
-        try (final JsonParserStream jsonHandler = JsonParserStream.create(logWriter, schemaContext)) {
+        try (JsonParserStream jsonHandler = JsonParserStream.create(logWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
             // Process multiple readers, flush()/close() as needed
             jsonHandler.parse(reader);
         }
@@ -71,7 +71,6 @@ public class StreamToNormalizedNodeTest {
      *
      * @throws IOException
      */
-    @Ignore
     @Test
     public void immutableNormalizedNodeStreamWriterDemonstration() throws IOException {
         /*
@@ -84,7 +83,8 @@ public class StreamToNormalizedNodeTest {
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
 
         // JSON -> StreamWriter parser
-        try (JsonParserStream handler = JsonParserStream.create(streamWriter, schemaContext)) {
+        try (JsonParserStream handler = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
             handler.parse(new JsonReader(new StringReader(streamAsString)));
         }
 
@@ -104,9 +104,9 @@ public class StreamToNormalizedNodeTest {
 
         // StreamWriter which outputs JSON strings
         // StreamWriter which outputs JSON strings
-        final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.
-                createExclusiveWriter(JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                    JsonWriterFactory.createJsonWriter(writer, 2));
+        final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
 
         // NormalizedNode -> StreamWriter
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
index c6b8bf693f98eeae104dc608ca06881b65074f34..1615e3177cc851e79a065b9b897555e70ddce69a 100644 (file)
@@ -68,7 +68,7 @@ public class TestUtils {
             final SchemaPath rootPath) throws IOException {
         final Writer writer = new StringWriter();
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), rootPath, null,
+                JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), rootPath, null,
                 JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(data);
index d4914959570d2efea1795701258259a121ad125e..f4b57ad2793a3c5fb3c4de79c3d362fab6d21aea 100644 (file)
@@ -84,7 +84,8 @@ public class YangModeledAnyXmlSupportTest {
         final String inputJson = loadTextFile("/yang-modeled-anyxml/json/baz.json");
         final NormalizedNodeResult result = new NormalizedNodeResult();
         final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
-        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+        final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
         jsonParser.parse(new JsonReader(new StringReader(inputJson)));
         final NormalizedNode<?, ?> transformedInput = result.getResult();
 
@@ -110,8 +111,8 @@ public class YangModeledAnyXmlSupportTest {
             final NormalizedNode<?, ?> inputStructure) throws IOException {
 
         final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
-                JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
-                JsonWriterFactory.createJsonWriter(writer, 2));
+            JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+            JsonWriterFactory.createJsonWriter(writer, 2));
         final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
         nodeWriter.write(inputStructure);
 
index 1d89e855d96ec7f4295948c9045e8f5c63af72b2..57ac633d73cae8c7b4c69e3234fa0ff7365a84e6 100644 (file)
@@ -109,6 +109,8 @@ public abstract class AbstractCodecFactory<T extends TypeAwareCodec<?, ?, ?>> {
 
     protected abstract T identityRefCodec(IdentityrefTypeDefinition type, QNameModule module);
 
+    // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
+    //        with that, though, we need access to the current data store.
     protected abstract T instanceIdentifierCodec(InstanceIdentifierTypeDefinition type);
 
     protected abstract T intCodec(IntegerTypeDefinition type);