Turn this class into a record, with complete non-null semantics.
JIRA: NETCONF-773
Change-Id: I73e35fe4bd55f8382a86de0052839efc7b98be81
Signed-off-by: Robert Varga <robert.varga@pantheon.tech>
public final void writeTo(final NormalizedNodePayload context, final Class<?> type, final Type genericType,
final Annotation[] annotations, final MediaType mediaType, final MultivaluedMap<String, Object> httpHeaders,
final OutputStream entityStream) throws IOException {
- final var data = context.getData();
- if (data == null) {
- return;
- }
-
final var output = requireNonNull(entityStream);
final var stack = context.inference().toSchemaInferenceStack();
// FIXME: this dispatch is here to handle codec transition to 'output', but that should be completely okay with
final var stmt = stack.currentStatement();
if (stmt instanceof RpcEffectiveStatement rpc) {
stack.enterSchemaTree(rpc.output().argument());
- writeOperationOutput(stack, context.getWriterParameters(), (ContainerNode) data, output);
+ writeOperationOutput(stack, context.writerParameters(), (ContainerNode) context.data(), output);
} else if (stmt instanceof ActionEffectiveStatement action) {
stack.enterSchemaTree(action.output().argument());
- writeOperationOutput(stack, context.getWriterParameters(), (ContainerNode) data, output);
+ writeOperationOutput(stack, context.writerParameters(), (ContainerNode) context.data(), output);
}
}
- writeData(stack, context.getWriterParameters(), data, output);
+ writeData(stack, context.writerParameters(), context.data(), output);
}
abstract void writeOperationOutput(@NonNull SchemaInferenceStack stack, @NonNull QueryParameters writerParameters,
import static java.util.Objects.requireNonNull;
-import org.eclipse.jdt.annotation.NonNull;
-import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.jdt.annotation.NonNullByDefault;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.util.SchemaInferenceStack.Inference;
* A RFC8040 overlay from our marriage to NormalizedNodeContext. This represents a NormalizedNode along with further
* messy details needed to deal with the payload.
*/
-public final class NormalizedNodePayload {
- private final @NonNull QueryParameters writerParameters;
- private final @NonNull Inference inference;
- private final NormalizedNode data;
-
- private NormalizedNodePayload(final Inference inference, final NormalizedNode data,
- final QueryParameters writerParameters) {
- this.inference = requireNonNull(inference);
- this.data = data;
- this.writerParameters = requireNonNull(writerParameters);
- }
-
- public static @NonNull NormalizedNodePayload empty(final Inference inference) {
- return new NormalizedNodePayload(inference, null, QueryParameters.empty());
- }
-
- public static @NonNull NormalizedNodePayload of(final Inference inference, final NormalizedNode data) {
- return new NormalizedNodePayload(inference, requireNonNull(data), QueryParameters.empty());
- }
-
- public static @NonNull NormalizedNodePayload ofNullable(final Inference inference, final NormalizedNode data) {
- return data == null ? empty(inference) : of(inference, data);
- }
-
- public static Object ofReadData(final Inference inference, final NormalizedNode data,
- final QueryParameters parameters) {
- return new NormalizedNodePayload(inference, requireNonNull(data), parameters);
- }
-
- public @NonNull Inference inference() {
- return inference;
- }
-
- public @Nullable NormalizedNode getData() {
- return data;
+@NonNullByDefault
+public record NormalizedNodePayload(Inference inference, NormalizedNode data, QueryParameters writerParameters) {
+ public NormalizedNodePayload {
+ requireNonNull(inference);
+ requireNonNull(data);
+ requireNonNull(writerParameters);
}
- public @NonNull QueryParameters getWriterParameters() {
- return writerParameters;
+ public NormalizedNodePayload(final Inference inference, final NormalizedNode data) {
+ this(inference, data, QueryParameters.empty());
}
}
case ALL, CONFIG -> {
final QName type = node.name().getNodeType();
yield Response.status(Status.OK)
- .entity(NormalizedNodePayload.ofReadData(instanceIdentifier.inference(), node, queryParams))
+ .entity(new NormalizedNodePayload(instanceIdentifier.inference(), node, queryParams))
.header("ETag", '"' + type.getModule().getRevision().map(Revision::toString).orElse(null) + "-"
+ type.getLocalName() + '"')
.header("Last-Modified", FORMATTER.format(LocalDateTime.now(Clock.systemUTC())))
.build();
}
case NONCONFIG -> Response.status(Status.OK)
- .entity(NormalizedNodePayload.ofReadData(instanceIdentifier.inference(), node, queryParams))
+ .entity(new NormalizedNodePayload(instanceIdentifier.inference(), node, queryParams))
.build();
};
}
if (resultData == null || resultData.isEmpty()) {
return Response.status(Status.NO_CONTENT).build();
}
- return Response.status(Status.OK).entity(NormalizedNodePayload.of(inference, resultData)).build();
+ return Response.status(Status.OK).entity(new NormalizedNodePayload(inference, resultData)).build();
}
/**
stack.enterDataTree(Restconf.QNAME);
stack.enterDataTree(YANG_LIBRARY_VERSION);
- return NormalizedNodePayload.of(stack.toInference(),
+ return new NormalizedNodePayload(stack.toInference(),
ImmutableNodes.leafNode(YANG_LIBRARY_VERSION, YANG_LIBRARY_REVISION));
}
}
if (resultData == null || resultData.isEmpty()) {
ar.resume(new WebApplicationException(Status.NO_CONTENT));
} else {
- ar.resume(NormalizedNodePayload.of(context.inference(), resultData));
+ ar.resume(new NormalizedNodePayload(context.inference(), resultData));
}
}
return Response.ok()
.location(location)
- .entity(NormalizedNodePayload.of(
+ .entity(new NormalizedNodePayload(
Inference.ofDataTreePath(handlersHolder.getDatabindProvider().currentContext().modelContext(),
Notifi.QNAME, LOCATION_QNAME),
ImmutableNodes.leafNode(LOCATION_NODEID, location.toString())))
public void testWriteEmptyRootContainer() throws IOException {
final EffectiveModelContext schemaContext = mock(EffectiveModelContext.class);
- final NormalizedNodePayload nodePayload = NormalizedNodePayload.of(Inference.ofDataTreePath(schemaContext),
+ final NormalizedNodePayload nodePayload = new NormalizedNodePayload(Inference.ofDataTreePath(schemaContext),
Builders.containerBuilder().withNodeIdentifier(new NodeIdentifier(SchemaContext.NAME)).build());
final ByteArrayOutputStream output = new ByteArrayOutputStream();
@Test
public void testRootContainerWrite() throws IOException {
- final NormalizedNodePayload nodePayload = NormalizedNodePayload.of(
+ final NormalizedNodePayload nodePayload = new NormalizedNodePayload(
Inference.ofDataTreePath(IID_SCHEMA),
Builders.containerBuilder()
.withNodeIdentifier(new NodeIdentifier(SchemaContext.NAME))
final Response response = dataService.readData("example-jukebox:jukebox", uriInfo);
assertNotNull(response);
assertEquals(200, response.getStatus());
- assertEquals(EMPTY_JUKEBOX, ((NormalizedNodePayload) response.getEntity()).getData());
+ assertEquals(EMPTY_JUKEBOX, ((NormalizedNodePayload) response.getEntity()).data());
}
@Test
assertNotNull(response);
assertEquals(200, response.getStatus());
- final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).getData();
+ final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).data();
assertTrue(data instanceof ContainerNode);
final Collection<DataContainerChild> rootNodes = ((ContainerNode) data).body();
assertEquals(1, rootNodes.size());
assertEquals(200, response.getStatus());
// response must contain all child nodes from config and operational containers merged in one container
- final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).getData();
+ final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).data();
assertTrue(data instanceof ContainerNode);
assertEquals(3, ((ContainerNode) data).size());
assertNotNull(((ContainerNode) data).childByArg(CONT_PLAYER.name()));
assertEquals(200, response.getStatus());
// response must contain only config data
- final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).getData();
+ final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).data();
// config data present
assertNotNull(((ContainerNode) data).childByArg(CONT_PLAYER.name()));
assertEquals(200, response.getStatus());
// response must contain only operational data
- final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).getData();
+ final NormalizedNode data = ((NormalizedNodePayload) response.getEntity()).data();
// state data present
assertNotNull(((ContainerNode) data).childByArg(CONT_PLAYER.name()));
final var context = YangParserTestUtils.parseYangResourceDirectory("/restconf/impl");
final var restconfImpl = new RestconfImpl(() -> DatabindContext.ofModel(context));
final var libraryVersion = restconfImpl.getLibraryVersion();
- assertEquals("2019-01-04", libraryVersion.getData().body());
+ assertEquals("2019-01-04", libraryVersion.data().body());
}
}
""".getBytes(StandardCharsets.UTF_8)), mock(UriInfo.class), ar);
verify(ar).resume(response.capture());
- assertSame(result, response.getValue().getData());
+ assertSame(result, response.getValue().data());
}
@Test