*/
package org.opendaylight.yangtools.yang.data.codec.gson;
+import static com.google.common.base.Verify.verifyNotNull;
+
import com.google.common.annotations.Beta;
-import com.google.common.base.Stopwatch;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
import java.util.List;
import java.util.Optional;
+import java.util.function.BiFunction;
import org.opendaylight.yangtools.yang.common.QNameModule;
import org.opendaylight.yangtools.yang.data.impl.codec.AbstractIntegerStringCodec;
import org.opendaylight.yangtools.yang.data.impl.codec.BinaryStringCodec;
import org.opendaylight.yangtools.yang.data.impl.codec.StringStringCodec;
import org.opendaylight.yangtools.yang.data.util.codec.AbstractCodecFactory;
import org.opendaylight.yangtools.yang.data.util.codec.CodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
-import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
-import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
-import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BooleanTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.Uint8TypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.UnknownTypeDefinition;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Factory for creating JSON equivalents of codecs. Each instance of this object is bound to
*/
@Beta
public final class JSONCodecFactory extends AbstractCodecFactory<JSONCodec<?>> {
- private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
- @Override
- public JSONCodecFactory load(final SchemaContext key) {
- final Stopwatch sw = Stopwatch.createStarted();
- final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
- final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache);
- final int visitedLeaves = requestCodecsForChildren(lazy, key);
- sw.stop();
-
- final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
- LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
- cache.simpleSize(), cache.complexSize(), sw);
- return new JSONCodecFactory(key, cache);
- }
-
- private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
- int ret = 0;
- for (DataSchemaNode child : parent.getChildNodes()) {
- if (child instanceof TypedDataSchemaNode) {
- lazy.codecFor((TypedDataSchemaNode) child);
- ++ret;
- } else if (child instanceof DataNodeContainer) {
- ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
- }
- }
-
- return ret;
- }
- }
-
- private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactory.class);
-
- // Weak keys to retire the entry when SchemaContext goes away
- private static final LoadingCache<SchemaContext, JSONCodecFactory> PRECOMPUTED = CacheBuilder.newBuilder()
- .weakKeys().build(new EagerCacheLoader());
-
- // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
- private static final LoadingCache<SchemaContext, JSONCodecFactory> SHARED = CacheBuilder.newBuilder()
- .weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
- @Override
- public JSONCodecFactory load(final SchemaContext key) {
- return new JSONCodecFactory(key, new SharedCodecCache<>());
- }
- });
-
private final JSONCodec<?> iidCodec;
- JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache) {
+ JSONCodecFactory(final SchemaContext context, final CodecCache<JSONCodec<?>> cache,
+ final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
super(context, cache);
- iidCodec = new JSONStringInstanceIdentifierCodec(context, this);
+ iidCodec = verifyNotNull(iidCodecSupplier.apply(context, this));
}
/**
* @param context SchemaContext instance
* @return A sharable {@link JSONCodecFactory}
* @throws NullPointerException if context is null
+ *
+ * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputed(SchemaContext)} instead.
*/
+ @Deprecated
public static JSONCodecFactory getPrecomputed(final SchemaContext context) {
- return PRECOMPUTED.getUnchecked(context);
+ return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputed(context);
}
/**
* @param context SchemaContext instance
* @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
* @throws NullPointerException if context is null
+ *
+ * @deprecated Use {@link JSONCodecFactorySupplier#getPrecomputedIfAvailable(SchemaContext)} instead.
*/
+ @Deprecated
public static Optional<JSONCodecFactory> getPrecomputedIfAvailable(final SchemaContext context) {
- return Optional.ofNullable(PRECOMPUTED.getIfPresent(context));
+ return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getPrecomputedIfAvailable(context);
}
/**
* @param context SchemaContext instance
* @return A sharable {@link JSONCodecFactory}
* @throws NullPointerException if context is null
+ *
+ * @deprecated Use {@link JSONCodecFactorySupplier#getShared(SchemaContext)} instead.
*/
+ @Deprecated
public static JSONCodecFactory getShared(final SchemaContext context) {
- return SHARED.getUnchecked(context);
+ return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context);
}
/**
* @param context SchemaContext instance
* @return A non-sharable {@link JSONCodecFactory}
* @throws NullPointerException if context is null
+ *
+ * @deprecated Use {@link JSONCodecFactorySupplier#createLazy(SchemaContext)} instead.
*/
+ @Deprecated
public static JSONCodecFactory createLazy(final SchemaContext context) {
- return new JSONCodecFactory(context, new LazyCodecCache<>());
+ return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createLazy(context);
}
/**
* @param context SchemaContext instance
* @return A non-sharable {@link JSONCodecFactory}
* @throws NullPointerException if context is null.
+ *
+ * @deprecated Use {@link JSONCodecFactorySupplier#createSimple(SchemaContext)} instead.
*/
+ @Deprecated
public static JSONCodecFactory createSimple(final SchemaContext context) {
- return new JSONCodecFactory(context, NoopCodecCache.getInstance());
+ return JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createSimple(context);
}
@Override
@Override
protected JSONCodec<?> instanceIdentifierCodec(final InstanceIdentifierTypeDefinition type) {
- // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
- // with that, though, we need access to the current data store.
return iidCodec;
}
protected JSONCodec<?> unknownCodec(final UnknownTypeDefinition type) {
return NullJSONCodec.INSTANCE;
}
-
}
--- /dev/null
+/*
+ * Copyright (c) 2017 Pantheon Technologies, s.r.o. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.yangtools.yang.data.codec.gson;
+
+import static com.google.common.base.Verify.verifyNotNull;
+import static java.util.Objects.requireNonNull;
+
+import com.google.common.annotations.Beta;
+import com.google.common.base.Stopwatch;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import java.util.Optional;
+import java.util.function.BiFunction;
+import org.eclipse.jdt.annotation.NonNull;
+import org.opendaylight.yangtools.yang.data.util.codec.LazyCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.NoopCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.PrecomputedCodecCache;
+import org.opendaylight.yangtools.yang.data.util.codec.SharedCodecCache;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.TypedDataSchemaNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * API entry point for acquiring {@link JSONCodecFactory} instances.
+ *
+ * @author Robert Varga
+ */
+@Beta
+public enum JSONCodecFactorySupplier {
+ /**
+ * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+ */
+ // FIXME: YANGTOOLS-766: use a different codec
+ RFC7951(JSONStringInstanceIdentifierCodec::new),
+ /**
+ * Source of {@link JSONCodecFactory} instances compliant with RFC7951.
+ */
+ DRAFT_LHOTKA_NETMOD_YANG_JSON_02(JSONStringInstanceIdentifierCodec::new);
+
+ private static final Logger LOG = LoggerFactory.getLogger(JSONCodecFactorySupplier.class);
+
+ private static final class EagerCacheLoader extends CacheLoader<SchemaContext, JSONCodecFactory> {
+ private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+ iidCodecSupplier;
+
+ EagerCacheLoader(final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec>
+ iidCodecSupplier) {
+ this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+ }
+
+ @Override
+ public JSONCodecFactory load(final SchemaContext key) {
+ final Stopwatch sw = Stopwatch.createStarted();
+ final LazyCodecCache<JSONCodec<?>> lazyCache = new LazyCodecCache<>();
+ final JSONCodecFactory lazy = new JSONCodecFactory(key, lazyCache, iidCodecSupplier);
+ final int visitedLeaves = requestCodecsForChildren(lazy, key);
+ sw.stop();
+
+ final PrecomputedCodecCache<JSONCodec<?>> cache = lazyCache.toPrecomputed();
+ LOG.debug("{} leaf nodes resulted in {} simple and {} complex codecs in {}", visitedLeaves,
+ cache.simpleSize(), cache.complexSize(), sw);
+ return new JSONCodecFactory(key, cache, iidCodecSupplier);
+ }
+
+ private static int requestCodecsForChildren(final JSONCodecFactory lazy, final DataNodeContainer parent) {
+ int ret = 0;
+ for (DataSchemaNode child : parent.getChildNodes()) {
+ if (child instanceof TypedDataSchemaNode) {
+ lazy.codecFor((TypedDataSchemaNode) child);
+ ++ret;
+ } else if (child instanceof DataNodeContainer) {
+ ret += requestCodecsForChildren(lazy, (DataNodeContainer) child);
+ }
+ }
+
+ return ret;
+ }
+ }
+
+ private final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier;
+
+ // Weak keys to retire the entry when SchemaContext goes away
+ private final LoadingCache<SchemaContext, JSONCodecFactory> precomputed;
+
+ // Weak keys to retire the entry when SchemaContext goes away and to force identity-based lookup
+ private final LoadingCache<SchemaContext, JSONCodecFactory> shared;
+
+ JSONCodecFactorySupplier(
+ final BiFunction<SchemaContext, JSONCodecFactory, JSONStringInstanceIdentifierCodec> iidCodecSupplier) {
+ this.iidCodecSupplier = requireNonNull(iidCodecSupplier);
+ precomputed = CacheBuilder.newBuilder().weakKeys().build(new EagerCacheLoader(iidCodecSupplier));
+ shared = CacheBuilder.newBuilder().weakKeys().build(new CacheLoader<SchemaContext, JSONCodecFactory>() {
+ @Override
+ public JSONCodecFactory load(final SchemaContext key) {
+ return new JSONCodecFactory(key, new SharedCodecCache<>(), iidCodecSupplier);
+ }
+ });
+ }
+
+ /**
+ * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+ * return the same instance as long as the associated SchemaContext is present. Returned object can be safely
+ * used by multiple threads concurrently. If the SchemaContext instance does not have a cached instance
+ * of {@link JSONCodecFactory}, it will be completely precomputed before this method will return.
+ *
+ * <p>
+ * Choosing this implementation is appropriate when the memory overhead of keeping a full codec tree is not as
+ * great a concern as predictable performance. When compared to the implementation returned by
+ * {@link #getShared(SchemaContext)}, this implementation is expected to offer higher performance and have lower
+ * peak memory footprint when most of the SchemaContext is actually in use.
+ *
+ * <p>
+ * For call sites which do not want to pay the CPU cost of pre-computing this implementation, but still would like
+ * to use it if is available (by being populated by some other caller), you can use
+ * {@link #getPrecomputedIfAvailable(SchemaContext)}.
+ *
+ * @param context SchemaContext instance
+ * @return A sharable {@link JSONCodecFactory}
+ * @throws NullPointerException if context is null
+ */
+ public @NonNull JSONCodecFactory getPrecomputed(final @NonNull SchemaContext context) {
+ return verifyNotNull(precomputed.getUnchecked(context));
+ }
+
+ /**
+ * Get a thread-safe, eagerly-caching {@link JSONCodecFactory} for a SchemaContext, if it is available. This
+ * method is a non-blocking equivalent of {@link #getPrecomputed(SchemaContext)} for use in code paths where
+ * the potential of having to pre-compute the implementation is not acceptable. One such scenario is when the
+ * code base wants to opportunistically take advantage of pre-computed version, but is okay with a fallback to
+ * a different implementation.
+ *
+ * @param context SchemaContext instance
+ * @return A sharable {@link JSONCodecFactory}, or absent if such an implementation is not available.
+ * @throws NullPointerException if context is null
+ */
+ public @NonNull Optional<JSONCodecFactory> getPrecomputedIfAvailable(final @NonNull SchemaContext context) {
+ return Optional.ofNullable(precomputed.getIfPresent(context));
+ }
+
+ /**
+ * Get a thread-safe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method can, and will,
+ * return the same instance as long as the associated SchemaContext is present or the factory is not invalidated
+ * by memory pressure. Returned object can be safely used by multiple threads concurrently.
+ *
+ * <p>
+ * Choosing this implementation is a safe default, as it will not incur prohibitive blocking, nor will it tie up
+ * memory in face of pressure.
+ *
+ * @param context SchemaContext instance
+ * @return A sharable {@link JSONCodecFactory}
+ * @throws NullPointerException if context is null
+ */
+ public @NonNull JSONCodecFactory getShared(final @NonNull SchemaContext context) {
+ return verifyNotNull(shared.getUnchecked(context));
+ }
+
+ /**
+ * Create a new thread-unsafe, lazily-caching {@link JSONCodecFactory} for a SchemaContext. This method will
+ * return distinct objects every time it is invoked. Returned object may not be used from multiple threads
+ * concurrently.
+ *
+ * <p>
+ * This implementation is appropriate for one-off serialization from a single thread. It will aggressively cache
+ * codecs for reuse and will tie them up in memory until the factory is freed.
+ *
+ * @param context SchemaContext instance
+ * @return A non-sharable {@link JSONCodecFactory}
+ * @throws NullPointerException if context is null
+ */
+ public @NonNull JSONCodecFactory createLazy(final @NonNull SchemaContext context) {
+ return new JSONCodecFactory(context, new LazyCodecCache<>(), iidCodecSupplier);
+ }
+
+ /**
+ * Create a simplistic, thread-safe {@link JSONCodecFactory} for a {@link SchemaContext}. This method will return
+ * distinct objects every time it is invoked. Returned object may be use from multiple threads concurrently.
+ *
+ * <p>
+ * This implementation exists mostly for completeness only, as it does not perform any caching at all and each codec
+ * is computed every time it is requested. This may be useful in extremely constrained environments, where memory
+ * footprint is more critical than performance.
+ *
+ * @param context SchemaContext instance
+ * @return A non-sharable {@link JSONCodecFactory}
+ * @throws NullPointerException if context is null.
+ */
+ public @NonNull JSONCodecFactory createSimple(final @NonNull SchemaContext context) {
+ return new JSONCodecFactory(context, NoopCodecCache.getInstance(), iidCodecSupplier);
+ }
+}
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.transform.dom.DOMSource;
+import org.eclipse.jdt.annotation.NonNull;
import org.opendaylight.yangtools.odlext.model.api.YangModeledAnyXmlSchemaNode;
import org.opendaylight.yangtools.util.xml.UntrustedXML;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
private final Deque<URI> namespaces = new ArrayDeque<>();
private final NormalizedNodeStreamWriter writer;
private final JSONCodecFactory codecs;
- private final SchemaContext schema;
private final DataSchemaNode parentNode;
- private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final JSONCodecFactory codecs, final DataSchemaNode parentNode) {
- this.schema = requireNonNull(schemaContext);
+ private JsonParserStream(final NormalizedNodeStreamWriter writer, final JSONCodecFactory codecs,
+ final DataSchemaNode parentNode) {
this.writer = requireNonNull(writer);
this.codecs = requireNonNull(codecs);
this.parentNode = parentNode;
}
- private JsonParserStream(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final DataSchemaNode parentNode) {
- this(writer, schemaContext, JSONCodecFactory.getShared(schemaContext), parentNode);
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the top of the SchemaContext associated
+ * with the specified codec factory.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory) {
+ return new JsonParserStream(writer, codecFactory, codecFactory.getSchemaContext());
}
- public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext,
- final SchemaNode parentNode) {
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link JSONCodecFactory}. The stream will be logically rooted at the specified parent node.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param codecFactory {@link JSONCodecFactory} to use for parsing leaves
+ * @param parentNode Logical root node
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ */
+ public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull JSONCodecFactory codecFactory, final @NonNull SchemaNode parentNode) {
if (parentNode instanceof RpcDefinition) {
- return new JsonParserStream(writer, schemaContext, new RpcAsContainer((RpcDefinition) parentNode));
+ return new JsonParserStream(writer, codecFactory, new RpcAsContainer((RpcDefinition) parentNode));
}
- checkArgument(parentNode instanceof DataSchemaNode, "Instance of DataSchemaNode class awaited.");
- return new JsonParserStream(writer, schemaContext, (DataSchemaNode) parentNode);
+ checkArgument(parentNode instanceof DataSchemaNode, "An instance of DataSchemaNode is expected, %s supplied",
+ parentNode);
+ return new JsonParserStream(writer, codecFactory, (DataSchemaNode) parentNode);
+ }
+
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link SchemaContext}. The stream will be logically rooted at the top of the supplied SchemaContext.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param schemaContext {@link SchemaContext} to use
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ *
+ * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory)} instead.
+ */
+ @Deprecated
+ public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull SchemaContext schemaContext) {
+ return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
}
- public static JsonParserStream create(final NormalizedNodeStreamWriter writer, final SchemaContext schemaContext) {
- return new JsonParserStream(writer, schemaContext, schemaContext);
+ /**
+ * Create a new {@link JsonParserStream} backed by specified {@link NormalizedNodeStreamWriter}
+ * and {@link SchemaContext}. The stream will be logically rooted at the specified parent node.
+ *
+ * @param writer NormalizedNodeStreamWriter to use for instantiation of normalized nodes
+ * @param schemaContext {@link SchemaContext} to use
+ * @param parentNode Logical root node
+ * @return A new {@link JsonParserStream}
+ * @throws NullPointerException if any of the arguments are null
+ *
+ * @deprecated Use {@link #create(NormalizedNodeStreamWriter, JSONCodecFactory, SchemaNode)} instead.
+ */
+ @Deprecated
+ public static JsonParserStream create(final @NonNull NormalizedNodeStreamWriter writer,
+ final @NonNull SchemaContext schemaContext, final @NonNull SchemaNode parentNode) {
+ return create(writer, JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext),
+ parentNode);
}
public JsonParserStream parse(final JsonReader reader) {
moduleNamePart = childName.substring(0, lastIndexOfColon);
nodeNamePart = childName.substring(lastIndexOfColon + 1);
- final Iterator<Module> m = schema.findModules(moduleNamePart).iterator();
+ final Iterator<Module> m = codecs.getSchemaContext().findModules(moduleNamePart).iterator();
namespace = m.hasNext() ? m.next().getNamespace() : null;
} else {
nodeNamePart = childName;
for (final URI potentialUri : potentialUris) {
builder.append('\n');
//FIXME how to get information about revision from JSON input? currently first available is used.
- builder.append(schema.findModules(potentialUri).iterator().next().getName());
+ builder.append(codecs.getSchemaContext().findModules(potentialUri).iterator().next().getName());
}
return builder.toString();
}
// deserialization
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final String inputJson = loadTextFile("/bug-4501/json/foo-correct.json");
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertTrue(transformedInput instanceof UnkeyedListNode);
final String inputJson = loadTextFile("/bug-4501/json/foo-incorrect.json");
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
try {
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final String inputJson = TestUtils.loadTextFile("/bug-4969/json/foo.json");
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
final String inputJson = TestUtils.loadTextFile("/leafref/json/data.json");
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, context);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(context));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final NormalizedNode<?, ?> inputStructure) throws IOException {
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
- JsonWriterFactory.createJsonWriter(writer, 2));
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+ JsonWriterFactory.createJsonWriter(writer, 2));
final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
nodeWriter.write(inputStructure);
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
return result.getResult();
}
throws IOException {
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), path, URI.create(NS),
- JsonWriterFactory.createJsonWriter(writer, 2));
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), path,
+ URI.create(NS), JsonWriterFactory.createJsonWriter(writer, 2));
final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
nodeWriter.write(inputStructure);
// deserialization
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.RFC7951.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
// deserialization
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
// deserialization
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
// deserialization
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
final SchemaNode parentNode = schemaContext.getDataChildByName(CONT_1);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext, parentNode);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), parentNode);
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
.build())
.build()).build();
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertNotNull(transformedInput);
final NormalizedNode<?, ?> awaitedStructure) {
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
assertEquals("Transformation of json input to normalized node wasn't successful.", awaitedStructure,
final NormalizedNode<?, ?> inputStructure) throws IOException {
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
JsonWriterFactory.createJsonWriter(writer, 2));
final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
nodeWriter.write(inputStructure);
import java.io.StringWriter;
import java.net.URISyntaxException;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Test;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
final LoggingNormalizedNodeStreamWriter logWriter = new LoggingNormalizedNodeStreamWriter();
// JSON -> StreamWriter parser
- try (JsonParserStream jsonHandler = JsonParserStream.create(logWriter, schemaContext)) {
+ try (JsonParserStream jsonHandler = JsonParserStream.create(logWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
// Process multiple readers, flush()/close() as needed
jsonHandler.parse(reader);
}
* Demonstrates how to create an immutable NormalizedNode tree from a {@link JsonReader} and
* then writes the data back into string representation.
*/
- @Ignore
@Test
public void immutableNormalizedNodeStreamWriterDemonstration() throws IOException {
/*
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
// JSON -> StreamWriter parser
- try (JsonParserStream handler = JsonParserStream.create(streamWriter, schemaContext)) {
+ try (JsonParserStream handler = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext))) {
handler.parse(new JsonReader(new StringReader(streamAsString)));
}
// StreamWriter which outputs JSON strings
// StreamWriter which outputs JSON strings
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
JsonWriterFactory.createJsonWriter(writer, 2));
// NormalizedNode -> StreamWriter
final SchemaPath rootPath) throws IOException {
final Writer writer = new StringWriter();
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), rootPath, null,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), rootPath, null,
JsonWriterFactory.createJsonWriter(writer, 2));
final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
nodeWriter.write(data);
final String inputJson = loadTextFile("/yang-modeled-anyxml/json/baz.json");
final NormalizedNodeResult result = new NormalizedNodeResult();
final NormalizedNodeStreamWriter streamWriter = ImmutableNormalizedNodeStreamWriter.from(result);
- final JsonParserStream jsonParser = JsonParserStream.create(streamWriter, schemaContext);
+ final JsonParserStream jsonParser = JsonParserStream.create(streamWriter,
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext));
jsonParser.parse(new JsonReader(new StringReader(inputJson)));
final NormalizedNode<?, ?> transformedInput = result.getResult();
final NormalizedNode<?, ?> inputStructure) throws IOException {
final NormalizedNodeStreamWriter jsonStream = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
- JSONCodecFactory.getShared(schemaContext), SchemaPath.ROOT, null,
- JsonWriterFactory.createJsonWriter(writer, 2));
+ JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.getShared(schemaContext), SchemaPath.ROOT, null,
+ JsonWriterFactory.createJsonWriter(writer, 2));
final NormalizedNodeWriter nodeWriter = NormalizedNodeWriter.forStreamWriter(jsonStream);
nodeWriter.write(inputStructure);
protected abstract T identityRefCodec(IdentityrefTypeDefinition type, QNameModule module);
+ // FIXME: there really are two favors, as 'require-instance true' needs to be validated. In order to deal
+ // with that, though, we need access to the current data store.
protected abstract T instanceIdentifierCodec(InstanceIdentifierTypeDefinition type);
protected abstract T int8Codec(Int8TypeDefinition type);