*/
package org.opendaylight.yangtools.yang.data.impl.schema.tree;
-
import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNodeContainer;
import org.opendaylight.yangtools.yang.data.api.schema.tree.spi.TreeNode;
import org.opendaylight.yangtools.yang.data.api.schema.tree.spi.Version;
this.applyOperation = op;
}
- public OperationWithModification write(final NormalizedNode<?, ?> value) {
+ void write(final NormalizedNode<?, ?> value) {
modification.write(value);
applyOperation.verifyStructure(modification);
- return this;
}
- public OperationWithModification delete() {
+ private void recursiveMerge(final NormalizedNode<?,?> data) {
+ if (data instanceof NormalizedNodeContainer<?,?,?>) {
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ NormalizedNodeContainer<?,?,NormalizedNode<PathArgument, ?>> dataContainer = (NormalizedNodeContainer) data;
+
+ /*
+ * if there was write before on this node and it is of NormalizedNodeContainer type
+ * merge would overwrite our changes. So we create write modifications from data children to
+ * retain children created by past write operation.
+ * These writes will then be pushed down in the tree while there are merge modifications on these children
+ */
+ if (modification.getOperation().equals(LogicalOperation.WRITE)) {
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ NormalizedNodeContainer<?,?,NormalizedNode<PathArgument, ?>> odlDataContainer =
+ (NormalizedNodeContainer) modification.getWrittenValue();
+ for (NormalizedNode<PathArgument, ?> child : odlDataContainer.getValue()) {
+ PathArgument childId = child.getIdentifier();
+ forChild(childId).write(child);
+ }
+ }
+ for (NormalizedNode<PathArgument, ?> child : dataContainer.getValue()) {
+ PathArgument childId = child.getIdentifier();
+ forChild(childId).recursiveMerge(child);
+ }
+ }
+
+ modification.merge(data);
+ }
+
+ void merge(final NormalizedNode<?, ?> data) {
+ /*
+ * A merge operation will end up overwriting parts of the tree, retaining others.
+ * We want to make sure we do not validate the complete resulting structure, but
+ * rather just what was written. In order to do that, we first pretend the data
+ * was written, run verification and then perform the merge -- with the explicit
+ * assumption that adding the newly-validated data with the previously-validated
+ * data will not result in invalid data.
+ */
+ applyOperation.verifyStructure(modification.asNewlyWritten(data));
+ recursiveMerge(data);
+ }
+
+ void delete() {
modification.delete();
- return this;
}
public ModifiedNode getModification() {
return new OperationWithModification(operation, modification);
}
- public void merge(final NormalizedNode<?, ?> data) {
- modification.merge(data);
- applyOperation.verifyStructure(modification);
- }
-
- public OperationWithModification forChild(final PathArgument childId) {
- ModificationApplyOperation childOp = applyOperation.getChild(childId).get();
-
- final boolean isOrdered;
- if (childOp instanceof SchemaAwareApplyOperation) {
- isOrdered = ((SchemaAwareApplyOperation) childOp).isOrdered();
- } else {
- isOrdered = true;
- }
+ private OperationWithModification forChild(final PathArgument childId) {
+ final Optional<ModificationApplyOperation> maybeChildOp = applyOperation.getChild(childId);
+ Preconditions.checkArgument(maybeChildOp.isPresent(), "Attempted to apply operation to non-existent child %s", childId);
- ModifiedNode childMod = modification.modifyChild(childId, isOrdered);
+ ModificationApplyOperation childOp = maybeChildOp.get();
+ ModifiedNode childMod = modification.modifyChild(childId, childOp.getChildPolicy());
- return from(childOp,childMod);
+ return from(childOp, childMod);
}
}