Migrate nullness annotations
[transportpce.git] / test-common / src / main / java / org / opendaylight / transportpce / test / converter / JSONDataObjectConverter.java
index 1e33514fab98b612a7d452d42f4002a6d3f9c438..ba951d7563a8f880a4785e256cdec28fc7ae3c5a 100644 (file)
@@ -17,7 +17,7 @@ import java.io.StringWriter;
 import java.io.Writer;
 import java.nio.charset.StandardCharsets;
 import java.util.Optional;
-import javax.annotation.Nonnull;
+import org.eclipse.jdt.annotation.NonNull;
 import org.opendaylight.mdsal.binding.dom.codec.api.BindingNormalizedNodeSerializer;
 import org.opendaylight.transportpce.test.DataStoreContext;
 import org.opendaylight.yangtools.yang.binding.DataObject;
@@ -32,8 +32,9 @@ import org.opendaylight.yangtools.yang.data.codec.gson.JsonParserStream;
 import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNormalizedNodeStreamWriter;
 import org.opendaylight.yangtools.yang.data.impl.schema.NormalizedNodeResult;
 import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
+import org.opendaylight.yangtools.yang.model.api.EffectiveStatementInference;
 import org.opendaylight.yangtools.yang.model.api.SchemaNode;
-import org.opendaylight.yangtools.yang.model.api.SchemaPath;
+import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,7 +53,7 @@ public final class JSONDataObjectConverter extends AbstractDataObjectConverter {
      * @param dataStoreContextUtil datastore context util used to extract codec and schema context
      * @return {@link AbstractDataObjectConverter}
      */
-    public static DataObjectConverter createWithDataStoreUtil(@Nonnull DataStoreContext dataStoreContextUtil) {
+    public static DataObjectConverter createWithDataStoreUtil(@NonNull DataStoreContext dataStoreContextUtil) {
         return new JSONDataObjectConverter(dataStoreContextUtil.getSchemaContext(),
                 dataStoreContextUtil.getBindingDOMCodecServices());
     }
@@ -64,8 +65,8 @@ public final class JSONDataObjectConverter extends AbstractDataObjectConverter {
      * @param codecRegistry codec registry used for converting
      * @return converter
      */
-    public static DataObjectConverter createWithSchemaContext(@Nonnull EffectiveModelContext schemaContext,
-            @Nonnull BindingNormalizedNodeSerializer codecRegistry) {
+    public static DataObjectConverter createWithSchemaContext(@NonNull EffectiveModelContext schemaContext,
+            @NonNull BindingNormalizedNodeSerializer codecRegistry) {
         return new JSONDataObjectConverter(schemaContext, codecRegistry);
     }
 
@@ -77,38 +78,37 @@ public final class JSONDataObjectConverter extends AbstractDataObjectConverter {
      */
     @Override
     public Optional<NormalizedNode> transformIntoNormalizedNode(
-            @Nonnull InputStream inputStream) {
+        @   NonNull InputStream inputStream) {
         JsonReader reader = new JsonReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
         return parseInputJSON(reader);
     }
 
     @Override
     public Optional<NormalizedNode> transformIntoNormalizedNode(
-            @Nonnull Reader inputReader, SchemaNode parentSchema) {
+            @NonNull Reader inputReader, SchemaNode parentSchema) {
         throw new UnsupportedOperationException("Not Implemented yet");
     }
 
     @Override
     public Optional<NormalizedNode> transformIntoNormalizedNode(
-            @Nonnull Reader inputReader) {
+            @NonNull Reader inputReader) {
         JsonReader reader = new JsonReader(inputReader);
         return parseInputJSON(reader);
     }
 
     @Override
-    public <T extends DataObject> Writer writerFromDataObject(@Nonnull DataObject object, Class<T> dataObjectClass,
+    public <T extends DataObject> Writer writerFromDataObject(@NonNull DataObject object, Class<T> dataObjectClass,
             ConvertType<T> convertType) {
         Writer writer = new StringWriter();
         JsonWriter jsonWriter = new JsonWriter(writer);
         JSONCodecFactory jsonCodecFactory =
             JSONCodecFactorySupplier.DRAFT_LHOTKA_NETMOD_YANG_JSON_02.createLazy(getSchemaContext());
-        NormalizedNodeStreamWriter create =
-            JSONNormalizedNodeStreamWriter.createExclusiveWriter(jsonCodecFactory,
-            (SchemaPath)null, null, jsonWriter);
-
+        EffectiveStatementInference rootNode = SchemaInferenceStack.of(getSchemaContext()).toInference();
+        NormalizedNodeStreamWriter create = JSONNormalizedNodeStreamWriter.createExclusiveWriter(
+                jsonCodecFactory, rootNode, EffectiveModelContext.NAME.getNamespace(), jsonWriter);
         try (NormalizedNodeWriter normalizedNodeWriter = NormalizedNodeWriter.forStreamWriter(create);) {
             normalizedNodeWriter
-                    .write(convertType.toNormalizedNodes(dataObjectClass.cast(object), dataObjectClass).get());
+                    .write(convertType.toNormalizedNodes(dataObjectClass.cast(object), dataObjectClass).orElseThrow());
         } catch (IOException ioe) {
             throw new IllegalStateException(ioe);
         }
@@ -116,7 +116,7 @@ public final class JSONDataObjectConverter extends AbstractDataObjectConverter {
     }
 
     @Override
-    public <T extends DataObject> Writer writerFromRpcDataObject(@Nonnull DataObject object, Class<T> dataObjectClass,
+    public <T extends DataObject> Writer writerFromRpcDataObject(@NonNull DataObject object, Class<T> dataObjectClass,
             ConvertType<T> convertType, QName rpcOutputQName, String rpcName) {
         return null;
     }