2 * Copyright © 2019 Orange, Inc. and others. All rights reserved.
4 * This program and the accompanying materials are made available under the
5 * terms of the Eclipse Public License v1.0 which accompanies this distribution,
6 * and is available at http://www.eclipse.org/legal/epl-v10.html
8 package org.opendaylight.transportpce.test.utils;
10 import com.google.common.util.concurrent.FluentFuture;
12 import java.io.FileInputStream;
13 import java.io.IOException;
14 import java.io.InputStream;
15 import java.util.Optional;
16 import java.util.concurrent.ExecutionException;
17 import org.opendaylight.mdsal.binding.api.DataBroker;
18 import org.opendaylight.mdsal.binding.api.WriteTransaction;
19 import org.opendaylight.mdsal.common.api.CommitInfo;
20 import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
21 import org.opendaylight.transportpce.test.DataStoreContext;
22 import org.opendaylight.transportpce.test.converter.XMLDataObjectConverter;
23 import org.opendaylight.yang.gen.v1.http.org.opendaylight.transportpce.portmapping.rev220114.Network;
24 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.network.rev180226.Networks;
25 import org.opendaylight.yangtools.yang.binding.DataObject;
26 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
27 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
28 import org.slf4j.Logger;
29 import org.slf4j.LoggerFactory;
31 public final class TopologyDataUtils {
33 private static final Logger LOG = LoggerFactory.getLogger(TopologyDataUtils.class);
35 @SuppressWarnings({"unchecked","rawtypes"})
36 // FIXME check if the InstanceIdentifier raw type can be avoided
37 // Raw types use are discouraged since they lack type safety.
38 // Resulting Problems are observed at run time and not at compile time
39 public static <T> void writeTopologyFromFileToDatastore(DataStoreContext dataStoreContextUtil, String file,
40 InstanceIdentifier ii) throws InterruptedException, ExecutionException {
41 Networks networks = null;
42 File topoFile = new File(file);
43 if (topoFile.exists()) {
44 String fileName = topoFile.getName();
45 try (InputStream targetStream = new FileInputStream(topoFile)) {
46 Optional<NormalizedNode> transformIntoNormalizedNode = XMLDataObjectConverter
47 .createWithDataStoreUtil(dataStoreContextUtil).transformIntoNormalizedNode(targetStream);
48 if (!transformIntoNormalizedNode.isPresent()) {
49 throw new IllegalStateException(String.format(
50 "Could not transform the input %s into normalized nodes", fileName));
52 Optional<DataObject> dataObject = XMLDataObjectConverter.createWithDataStoreUtil(dataStoreContextUtil)
53 .getDataObject(transformIntoNormalizedNode.get(), Networks.QNAME);
54 if (!dataObject.isPresent()) {
55 throw new IllegalStateException("Could not transform normalized nodes into data object");
57 networks = (Networks) dataObject.get();
59 } catch (IOException e) {
60 LOG.error("An error occured while reading file {}", file, e);
63 LOG.error("xml file {} not found at {}", topoFile.getName(), topoFile.getAbsolutePath());
65 if (networks == null) {
66 throw new IllegalStateException("Network is null cannot write it to datastore");
68 FluentFuture<? extends CommitInfo> commitFuture = writeTransaction(dataStoreContextUtil.getDataBroker(), ii,
69 networks.nonnullNetwork().values().stream().findFirst().get());
71 LOG.info("extraction from {} stored with success in datastore", topoFile.getName());
74 @SuppressWarnings({"unchecked","rawtypes"})
75 // FIXME check if the InstanceIdentifier raw type can be avoided
76 // Raw types use are discouraged since they lack type safety.
77 // Resulting Problems are observed at run time and not at compile time
78 private static FluentFuture<? extends CommitInfo> writeTransaction(DataBroker dataBroker,
79 InstanceIdentifier instanceIdentifier, DataObject object) {
80 WriteTransaction transaction = dataBroker.newWriteOnlyTransaction();
81 transaction.put(LogicalDatastoreType.CONFIGURATION, instanceIdentifier, object);
82 return transaction.commit();
85 public static void writePortmappingFromFileToDatastore(DataStoreContext dataStoreContextUtil, String file)
86 throws InterruptedException, ExecutionException {
87 Network result = null;
88 File portmappingFile = new File(file);
89 if (portmappingFile.exists()) {
90 String fileName = portmappingFile.getName();
91 try (InputStream targetStream = new FileInputStream(portmappingFile)) {
92 Optional<NormalizedNode> transformIntoNormalizedNode = null;
93 transformIntoNormalizedNode = XMLDataObjectConverter.createWithDataStoreUtil(dataStoreContextUtil)
94 .transformIntoNormalizedNode(targetStream);
95 if (!transformIntoNormalizedNode.isPresent()) {
96 throw new IllegalStateException(String.format(
97 "Could not transform the input %s into normalized nodes", fileName));
99 Optional<DataObject> dataObject = XMLDataObjectConverter.createWithDataStoreUtil(dataStoreContextUtil)
100 .getDataObject(transformIntoNormalizedNode.get(), Network.QNAME);
101 if (!dataObject.isPresent()) {
102 throw new IllegalStateException("Could not transform normalized nodes into data object");
104 result = (Network) dataObject.get();
106 } catch (IOException e) {
107 LOG.error("An error occured while reading file {}", file, e);
110 LOG.error("xml file {} not found at {}", portmappingFile.getName(), portmappingFile.getAbsolutePath());
112 InstanceIdentifier<Network> portmappingIID = InstanceIdentifier.builder(Network.class).build();
113 FluentFuture<? extends CommitInfo> writeTransaction = writeTransaction(dataStoreContextUtil.getDataBroker(),
114 portmappingIID, result);
115 writeTransaction.get();
116 LOG.info("portmapping-example stored with success in datastore");
119 private TopologyDataUtils() {