package org.opendaylight.controller.sal.connector.remoterpc.impl;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableSet;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+import javax.transaction.HeuristicMixedException;
+import javax.transaction.HeuristicRollbackException;
+import javax.transaction.NotSupportedException;
+import javax.transaction.RollbackException;
+
import org.apache.felix.dm.Component;
-import org.opendaylight.controller.clustering.services.*;
+import org.opendaylight.controller.clustering.services.CacheConfigException;
+import org.opendaylight.controller.clustering.services.CacheExistException;
+import org.opendaylight.controller.clustering.services.CacheListenerAddException;
+import org.opendaylight.controller.clustering.services.ICacheUpdateAware;
+import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
+import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.transaction.HeuristicMixedException;
-import javax.transaction.HeuristicRollbackException;
-import javax.transaction.NotSupportedException;
-import javax.transaction.RollbackException;
-import java.util.*;
-import java.util.concurrent.ConcurrentMap;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableSet;
public class RoutingTableImpl<I, R> implements RoutingTable<I, R>, ICacheUpdateAware<I, R> {
- private Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
+ private final Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
- private IClusterGlobalServices clusterGlobalServices = null;
+ private IClusterGlobalServices clusterGlobalServices = null;
- private ConcurrentMap<I,R> globalRpcCache = null;
- private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
+ private ConcurrentMap<I,R> globalRpcCache = null;
+ private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
- public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
- public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
+ public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
+ public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
- public RoutingTableImpl() {
- }
-
- @Override
- public R getGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
- return globalRpcCache.get(routeId);
- }
-
- @Override
- public void addGlobalRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
- try {
-
- log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
- clusterGlobalServices.tbegin();
- if (globalRpcCache.putIfAbsent(routeId, route) != null) {
- throw new DuplicateRouteException(" There is already existing route " + routeId);
- }
- clusterGlobalServices.tcommit();
-
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to create route id="
- + routeId + "with route" + route, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to create with value", e);
+ public RoutingTableImpl() {
}
- }
+ @Override
+ public R getGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
+ return globalRpcCache.get(routeId);
+ }
- @Override
- public void removeGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
- try {
- log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+ @Override
+ public void addGlobalRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
+ try {
+
+ log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
+ clusterGlobalServices.tbegin();
+ if (globalRpcCache.putIfAbsent(routeId, route) != null) {
+ throw new DuplicateRouteException(" There is already existing route " + routeId);
+ }
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to create route id="
+ + routeId + "with route" + route, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to create with value", e);
+ }
- clusterGlobalServices.tbegin();
- globalRpcCache.remove(routeId);
- clusterGlobalServices.tcommit();
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
+ try {
+ log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+
+ clusterGlobalServices.tbegin();
+ globalRpcCache.remove(routeId);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public Set<R> getRoutes(I routeId) {
- Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
- Set<R> routes = rpcCache.get(routeId);
+ @Override
+ public Set<R> getRoutes(final I routeId) {
+ Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
+ Set<R> routes = rpcCache.get(routeId);
- if (routes == null) return Collections.emptySet();
+ if (routes == null) {
+ return Collections.emptySet();
+ }
- return ImmutableSet.copyOf(routes);
- }
+ return ImmutableSet.copyOf(routes);
+ }
- public R getLastAddedRoute(I routeId) {
+ @Override
+ public R getLastAddedRoute(final I routeId) {
- Set<R> routes = getRoutes(routeId);
+ Set<R> routes = getRoutes(routeId);
- if (routes.isEmpty()) return null;
+ if (routes.isEmpty()) {
+ return null;
+ }
- R route = null;
- Iterator<R> iter = routes.iterator();
- while (iter.hasNext())
- route = iter.next();
+ R route = null;
+ Iterator<R> iter = routes.iterator();
+ while (iter.hasNext()) {
+ route = iter.next();
+ }
- return route;
- }
+ return route;
+ }
- @Override
- public void addRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
- Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+ @Override
+ public void addRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
+ Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+
+ try{
+ clusterGlobalServices.tbegin();
+ log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
+ threadSafeAdd(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
+ }
- try{
- clusterGlobalServices.tbegin();
- log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
- threadSafeAdd(routeId, route);
- clusterGlobalServices.tcommit();
+ @Override
+ public void addRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ addRoute(routeId, route);
+ }
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+
+ LinkedHashSet<R> routes = rpcCache.get(routeId);
+ if (routes == null) {
+ return;
+ }
+
+ try {
+ log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
+
+ clusterGlobalServices.tbegin();
+ threadSafeRemove(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public void addRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- addRoute(routeId, route);
+ @Override
+ public void removeRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ removeRoute(routeId, route);
+ }
}
- }
- @Override
- public void removeRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 100 times then throw
+ */
+ private void threadSafeAdd(final I routeId, final R route) {
+
+ for (int i=0;i<100;i++){
+
+ LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
+ updatedRoutes.add(route);
+ LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
+ if (oldRoutes == null) {
+ return;
+ }
+
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.add(route);
+
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+ }
+ //the method did not already return means it failed to add route in 10 attempts
+ throw new IllegalStateException("Failed to add route [" + routeId + "]");
+ }
- LinkedHashSet<R> routes = rpcCache.get(routeId);
- if (routes == null) return;
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 10 times then throw
+ */
+ private void threadSafeRemove(final I routeId, final R route) {
+ LinkedHashSet<R> updatedRoutes = null;
+ for (int i=0;i<10;i++){
+ LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
+
+ // if route to be deleted is the only entry in the set then remove routeId from the cache
+ if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
+ rpcCache.remove(routeId);
+ return;
+ }
+
+ // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.remove(route);
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+
+ }
+ //the method did not already return means it failed to remove route in 10 attempts
+ throw new IllegalStateException("Failed to remove route [" + routeId + "]");
+ }
- try {
- log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
- clusterGlobalServices.tbegin();
- threadSafeRemove(routeId, route);
- clusterGlobalServices.tcommit();
+ // /**
+ // * @deprecated doesn't do anything will be removed once listeners used
+ // * whiteboard pattern Registers listener for sending any change
+ // * notification
+ // * @param listener
+ // */
+ // @Override
+ // public void registerRouteChangeListener(RouteChangeListener listener) {
+ //
+ // }
+
+ // public void setRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.add(rcl);
+ // }else{
+ // log.warn("setRouteChangeListener called with null listener");
+ // }
+ // }
+ //
+ // public void unSetRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.remove(rcl);
+ // }else{
+ // log.warn("unSetRouteChangeListener called with null listener");
+ // }
+ // }
+
+ /**
+ * Returning the set of route change listeners for Unit testing Note: the
+ * package scope is default
+ *
+ * @return List of registered RouteChangeListener<I,R> listeners
+ */
+ // Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
+ // return routeChangeListeners;
+ // }
+ public void setClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ this.clusterGlobalServices = clusterGlobalServices;
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ public void unsetClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
+ this.clusterGlobalServices = null;
+ }
}
- }
- @Override
- public void removeRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- removeRoute(routeId, route);
+ /**
+ * Finds OR Creates clustered cache for Global RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ // let us check here if the cache already exists -- if so don't create
+ if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
+
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
+
+ } else {
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
+ log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
+ }
}
- }
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 100 times then throw
- */
- private void threadSafeAdd(I routeId, R route) {
+ /**
+ * Finds OR Creates clustered cache for Routed RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ if (clusterGlobalServices.existCache(RPC_CACHE)){
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
+ log.debug("Cache exists [{}] ", RPC_CACHE);
+ return;
+ }
+
+ //cache doesnt exist, create one
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", RPC_CACHE);
+ }
- for (int i=0;i<100;i++){
- LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
- updatedRoutes.add(route);
- LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
- if (oldRoutes == null) return;
+ /**
+ * Function called by the dependency manager when all the required
+ * dependencies are satisfied
+ */
+ void init(final Component c) {
+ try {
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.add(route);
+ findOrCreateGlobalRpcCache();
+ findOrCreateRpcCache();
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
+ throw new IllegalStateException("could not construct routing table cache");
+ }
}
- //the method did not already return means it failed to add route in 10 attempts
- throw new IllegalStateException("Failed to add route [" + routeId + "]");
- }
-
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 10 times then throw
- */
- private void threadSafeRemove(I routeId, R route) {
- LinkedHashSet<R> updatedRoutes = null;
- for (int i=0;i<10;i++){
- LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
-
- // if route to be deleted is the only entry in the set then remove routeId from the cache
- if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
- rpcCache.remove(routeId);
- return;
- }
-
- // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.remove(route);
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, R> getGlobalRpcCache() {
+ return this.globalRpcCache;
}
- //the method did not already return means it failed to remove route in 10 attempts
- throw new IllegalStateException("Failed to remove route [" + routeId + "]");
- }
-
-
-// /**
-// * @deprecated doesn't do anything will be removed once listeners used
-// * whiteboard pattern Registers listener for sending any change
-// * notification
-// * @param listener
-// */
-// @Override
-// public void registerRouteChangeListener(RouteChangeListener listener) {
-//
-// }
-
-// public void setRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.add(rcl);
-// }else{
-// log.warn("setRouteChangeListener called with null listener");
-// }
-// }
-//
-// public void unSetRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.remove(rcl);
-// }else{
-// log.warn("unSetRouteChangeListener called with null listener");
-// }
-// }
-
- /**
- * Returning the set of route change listeners for Unit testing Note: the
- * package scope is default
- *
- * @return List of registered RouteChangeListener<I,R> listeners
- */
-// Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
-// return routeChangeListeners;
-// }
- public void setClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- this.clusterGlobalServices = clusterGlobalServices;
- }
-
- public void unsetClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
- this.clusterGlobalServices = null;
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Global RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- // let us check here if the cache already exists -- if so don't create
- if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
-
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
-
- } else {
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
- log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Routed RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- if (clusterGlobalServices.existCache(RPC_CACHE)){
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
- log.debug("Cache exists [{}] ", RPC_CACHE);
- return;
- }
-
- //cache doesnt exist, create one
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", RPC_CACHE);
- }
-
-
- /**
- * Function called by the dependency manager when all the required
- * dependencies are satisfied
- */
- void init(Component c) {
- try {
- findOrCreateGlobalRpcCache();
- findOrCreateRpcCache();
-
- } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
- throw new IllegalStateException("could not construct routing table cache");
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, LinkedHashSet<R>> getRpcCache() {
+ return this.rpcCache;
}
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getGlobalRpcCache() {
- return this.globalRpcCache;
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getRpcCache() {
- return this.rpcCache;
- }
-
- /**
- * This is used from integration test NP rest API to check out the result of the
- * cache population
- * <Note> For testing purpose only-- use it wisely</Note>
- *
- * @return
- */
- public String dumpGlobalRpcCache() {
- Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, R> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ /**
+ * This is used from integration test NP rest API to check out the result of the
+ * cache population
+ * <Note> For testing purpose only-- use it wisely</Note>
+ *
+ * @return
+ */
+ public String dumpGlobalRpcCache() {
+ Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, R> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
-
- public String dumpRpcCache() {
- Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ public String dumpRpcCache() {
+ Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
- /**
- * Invoked when a new entry is available in the cache, the key is only
- * provided, the value will come as an entryUpdate invocation
- *
- * @param key Key for the entry just created
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryCreated(I key, String cacheName, boolean originLocal) {
- // TBD: do we require this.
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ /**
+ * Invoked when a new entry is available in the cache, the key is only
+ * provided, the value will come as an entryUpdate invocation
+ *
+ * @param key Key for the entry just created
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryCreated(final I key, final String cacheName, final boolean originLocal) {
+ // TBD: do we require this.
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ }
}
- }
-
- /**
- * Called anytime a given entry is updated
- *
- * @param key Key for the entry modified
- * @param new_value the new value the key will have
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryUpdated(I key, R new_value, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
- + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given entry is updated
+ *
+ * @param key Key for the entry modified
+ * @param new_value the new value the key will have
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryUpdated(final I key, final R new_value, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
+ + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteUpdated(key, new_value);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteUpdated(key, new_value);
-// }
-// }
- }
-
- /**
- * Called anytime a given key is removed from the ConcurrentHashMap we are
- * listening to.
- *
- * @param key Key of the entry removed
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryDeleted(I key, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
- + " cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given key is removed from the ConcurrentHashMap we are
+ * listening to.
+ *
+ * @param key Key of the entry removed
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryDeleted(final I key, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
+ + " cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteDeleted(key);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteDeleted(key);
-// }
-// }
- }
-}
\ No newline at end of file
+}
package org.opendaylight.controller.sal.connector.remoterpc.impl;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
import junit.framework.Assert;
+
import org.apache.felix.dm.Component;
import org.junit.After;
import org.junit.Before;
import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
-import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
-import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.Set;
-import java.util.concurrent.*;
-
-import static org.mockito.Mockito.*;
-
public class RoutingTableImplTest {
- private final URI namespace = URI.create("http://cisco.com/example");
- private final QName QNAME = new QName(namespace, "global");
-
- private IClusterGlobalServices clusterService;
- private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
- ConcurrentMap mockGlobalRpcCache;
- ConcurrentMap mockRpcCache;
-
- @Before
- public void setUp() throws Exception{
- clusterService = mock(IClusterGlobalServices.class);
- routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
- mockGlobalRpcCache = new ConcurrentHashMap<>();
- mockRpcCache = new ConcurrentHashMap<>();
- createRoutingTableCache();
- }
-
- @After
- public void tearDown(){
- reset(clusterService);
- mockGlobalRpcCache = null;
- mockRpcCache = null;
- }
+ private final URI namespace = URI.create("http://cisco.com/example");
+ private final QName QNAME = new QName(namespace, "global");
+
+ private IClusterGlobalServices clusterService;
+ private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
+ ConcurrentMap mockGlobalRpcCache;
+ ConcurrentMap mockRpcCache;
+
+ @Before
+ public void setUp() throws Exception{
+ clusterService = mock(IClusterGlobalServices.class);
+ routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
+ mockGlobalRpcCache = new ConcurrentHashMap<>();
+ mockRpcCache = new ConcurrentHashMap<>();
+ createRoutingTableCache();
+ }
- @Test
- public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
+ @After
+ public void tearDown(){
+ reset(clusterService);
+ mockGlobalRpcCache = null;
+ mockRpcCache = null;
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
- final String expectedRoute = "172.27.12.1:5000";
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
- Assert.assertEquals(mockGlobalRpcCache, latestCache);
- Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
- }
+ final String expectedRoute = "172.27.12.1:5000";
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test (expected = RoutingTable.DuplicateRouteException.class)
- public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
+ ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
+ Assert.assertEquals(mockGlobalRpcCache, latestCache);
+ Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
+ @Test (expected = RoutingTable.DuplicateRouteException.class)
+ public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.addGlobalRoute(routeIdentifier, new String());
- routingTable.addGlobalRoute(routeIdentifier, new String());
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
- @Test
- public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- String expectedRoute = "172.27.12.1:5000";
+ @Test
+ public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ String expectedRoute = "172.27.12.1:5000";
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertEquals(expectedRoute, actualRoute);
- }
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test
- public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertEquals(expectedRoute, actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertNull(actualRoute);
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- @Test
- public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertNull(actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
- routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
- Assert.assertTrue(cache.size() == 1);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
+ routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
+ Assert.assertTrue(cache.size() == 1);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
- Assert.assertTrue(mockRpcCache.size() == 0);
+ }
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ @Test
+ public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
+ Assert.assertTrue(mockRpcCache.size() == 0);
- routingTable.addRoute(routeId, new String());
- Assert.assertTrue(mockRpcCache.size() == 1);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(1, routes.size());
- }
+ routingTable.addRoute(routeId, new String());
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(1, routes.size());
+ }
- Assert.assertTrue(mockRpcCache.size() == 0);
+ @Test
+ public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ Assert.assertTrue(mockRpcCache.size() == 0);
- String route_1 = "10.0.0.1:5955";
- String route_2 = "10.0.0.2:5955";
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ String route_1 = "10.0.0.1:5955";
+ String route_2 = "10.0.0.2:5955";
- Assert.assertTrue(mockRpcCache.size() == 1);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(2, routes.size());
- Assert.assertTrue(routes.contains(route_1));
- Assert.assertTrue(routes.contains(route_2));
- }
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
- ExecutorService threadPool = Executors.newCachedThreadPool();
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(2, routes.size());
+ Assert.assertTrue(routes.contains(route_1));
+ Assert.assertTrue(routes.contains(route_2));
+ }
- int numOfRoutesToAdd = 100;
- String routePrefix_1 = "10.0.0.1:555";
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
- String routePrefix_2 = "10.0.0.1:556";
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+ @Test
+ public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
+ ExecutorService threadPool = Executors.newCachedThreadPool();
+
+ int numOfRoutesToAdd = 100;
+ String routePrefix_1 = "10.0.0.1:555";
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
+ String routePrefix_2 = "10.0.0.1:556";
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+
+ // wait for all tasks to complete; timeout in 10 sec
+ threadPool.shutdown();
+ try {
+ threadPool.awaitTermination(10, TimeUnit.SECONDS); //
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
- // wait for all tasks to complete; timeout in 10 sec
- threadPool.shutdown();
- try {
- threadPool.awaitTermination(10, TimeUnit.SECONDS); //
- } catch (InterruptedException e) {
- e.printStackTrace();
+ Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
}
- Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
- }
-
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
- routingTable.addRoute(null, new String());
- }
+ routingTable.addRoute(null, new String());
+ }
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
- routingTable.addRoute(getRouteIdentifier(), null);
- }
+ routingTable.addRoute(getRouteIdentifier(), null);
+ }
- @Test (expected = UnsupportedOperationException.class)
- public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, new String());
+ @Test (expected = UnsupportedOperationException.class)
+ public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, new String());
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- routes.add(new String()); //can not be modified; should throw
- }
+ routes.add(new String()); //can not be modified; should throw
+ }
- @Test
- public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, "10.0.0.1:5555");
- routingTable.addRoute(routeId, "10.0.0.2:5555");
+ @Test
+ public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, "10.0.0.1:5555");
+ routingTable.addRoute(routeId, "10.0.0.2:5555");
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- Assert.assertEquals(2, routes.size());
- }
+ Assert.assertEquals(2, routes.size());
+ }
- @Test
- public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
- throws Exception {
+ @Test
+ public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
+ throws Exception {
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
- }
+ Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
+ }
- @Test
- public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
+ @Test
+ public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
+ Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.removeRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- }
+ }
- @Test
- public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
+ @Test
+ public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
- routingTable.addRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.addRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- ConcurrentMap cache = routingTable.getRpcCache();
- Assert.assertFalse(cache.containsKey(routeId));
+ routingTable.removeRoute(routeId, route_1);
+ ConcurrentMap cache = routingTable.getRpcCache();
+ Assert.assertFalse(cache.containsKey(routeId));
- }
+ }
- /*
- * Private helper methods
- */
- private void createRoutingTableCache() throws Exception {
+ /*
+ * Private helper methods
+ */
+ private void createRoutingTableCache() throws Exception {
- //here init
- Component c = mock(Component.class);
+ //here init
+ Component c = mock(Component.class);
- when(clusterService.existCache(
- RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockGlobalRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockGlobalRpcCache);
- when(clusterService.existCache(
- RoutingTableImpl.RPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.RPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockRpcCache);
- doNothing().when(clusterService).tbegin();
- doNothing().when(clusterService).tcommit();
+ doNothing().when(clusterService).tbegin();
+ doNothing().when(clusterService).tcommit();
- routingTable.setClusterGlobalServices(this.clusterService);
- routingTable.init(c);
+ routingTable.setClusterGlobalServices(this.clusterService);
+ routingTable.init(c);
- Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
- Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
- }
+ Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
+ Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
+ }
- private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
- InstanceIdentifier identifier = mock(InstanceIdentifier.class);
- when(routeIdentifier.getType()).thenReturn(QNAME);
- when(routeIdentifier.getRoute()).thenReturn(identifier);
+ private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
+ InstanceIdentifier identifier = mock(InstanceIdentifier.class);
+ when(routeIdentifier.getType()).thenReturn(QNAME);
+ when(routeIdentifier.getRoute()).thenReturn(identifier);
- return routeIdentifier;
- }
+ return routeIdentifier;
+ }
- private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
- return new Runnable() {
- @Override
- public void run() {
- for (int i=0;i<numRoutes;i++){
- String route = routePrefix + i;
- try {
- routingTable.addRoute(routeId, route);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- };
- }
+ private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
+ return new Runnable() {
+ @Override
+ public void run() {
+ for (int i=0;i<numRoutes;i++){
+ String route = routePrefix + i;
+ try {
+ routingTable.addRoute(routeId, route);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ };
+ }
}
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
import org.opendaylight.controller.sal.binding.api.BindingAwareProvider.ProviderFunctionality;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
* <li>Notification Service - see {@link NotificationService} and
* {@link NotificationProviderService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.binding.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.binding.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
*
* For more information about session-based access see {@link ConsumerContext}
* and {@link ProviderContext}
- *
- *
- *
*/
public interface BindingAwareBroker {
/**
* functionality) for the consumer and provides access to the SAL
* infrastructure services and other functionality provided by
* {@link Provider}s.
- *
- *
- *
*/
public interface ConsumerContext extends RpcConsumerRegistry {
* @return Session specific implementation of service
*/
<T extends BindingAwareService> T getSALService(Class<T> service);
-
-
}
/**
*/
package org.opendaylight.controller.sal.binding.api;
-import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext;
-
/**
- *
* Session-specific instance of the broker functionality.
*
* <p>
*
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerContext#getSALService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext#getSALService(Class)} method on session
* assigned to the consumer.
*
* <p>
- * {@link BindingAwareService} and {@link BindingAwareProvider} may seem
+ * {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext} and {@link BindingAwareProvider} may seem
* similar, but provider provides YANG model-based functionality and
* {@link BindingAwareProvider} exposes the necessary supporting functionality
* to implement specific functionality of YANG and to reuse it in the
- * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}
- * s.
- *
- *
- *
+ * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}s.
*/
public interface BindingAwareService {
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler;
import org.opendaylight.controller.md.sal.common.api.data.DataModification;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.common.RpcResult;
public interface DataModificationTransaction extends DataModification<InstanceIdentifier<? extends DataObject>, DataObject> {
-
/**
* Returns an unique identifier for transaction
*
*/
@Override
- public Object getIdentifier();
+ Object getIdentifier();
/**
* Initiates a two-phase commit of candidate data.
* of this changes.
*
*
- * @see DataCommitHandler for further information how two-phase commit is
+ * @see org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler for further information how two-phase commit is
* processed.
* @param store
* Identifier of the store, where commit should occur.
* encountered errors, if commit was not successful.
*/
@Override
- public Future<RpcResult<TransactionStatus>> commit();
-
-
+ Future<RpcResult<TransactionStatus>> commit();
/**
* Register a listener for transaction
*/
ListenerRegistration<DataTransactionListener> registerListener(DataTransactionListener listener);
-
-
/**
* Listener for transaction state changes
- *
- *
*/
public interface DataTransactionListener extends EventListener {
/**
*/
package org.opendaylight.controller.sal.binding.api.data;
-
import org.opendaylight.controller.md.sal.common.api.data.DataProvisionService;
import org.opendaylight.controller.md.sal.common.api.data.DataReader;
-import org.opendaylight.controller.sal.binding.api.BindingAwareProvider;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
/**
- * DataProviderService is common access point for {@link BindingAwareProvider} providers
+ * DataProviderService is common access point for {@link org.opendaylight.controller.sal.binding.api.BindingAwareProvider} providers
* to access data trees described by the YANG model.
- *
*/
public interface DataProviderService extends DataBrokerService, DataProvisionService<InstanceIdentifier<? extends DataObject>, DataObject> {
-
-
/**
* Registers a data reader for particular subtree of overal YANG data tree.
*
import org.opendaylight.controller.sal.binding.api.rpc.RpcRouter;
import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.RpcService;
-import org.opendaylight.yangtools.yang.binding.annotations.RoutingContext;
public interface RuntimeCodeGenerator {
* Returned instance:
* <ul>
* <li>implements provided subclass of RpcService type and
- * {@link DelegateProxy} interface.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy} interface.
* <li>
* <p>
* delegates all invocations of methods, which are defined in RpcService
* subtype to delegate which is defined by
- * {@link DelegateProxy#setDelegate(Object)}.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)}.
* <p>
* If delegate is not defined (<code>getDelegate() == null</code>)
* implementation throws {@link IllegalStateException}
- * <li>{@link DelegateProxy#getDelegate()} - returns the delegate to which
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#getDelegate()} - returns the delegate to which
* all calls are delegated.
- * <li>{@link DelegateProxy#setDelegate(Object)} - sets the delegate for
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)} - sets the delegate for
* particular instance
*
* </ul>
* - Subclass of RpcService for which direct proxy is to be
* generated.
* @return Instance of RpcService of provided serviceType which implements
- * and {@link DelegateProxy}
+ * and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* @throws IllegalArgumentException
*
*/
* <ul>
* <li>Implements:
* <ul>
- * <li>{@link DelegateProxy}
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* <li>{@link RpcRouter}
* </ul>
* <li>
* <ul>
* <li>
* Implementation uses
- * {@link RpcRouter#getService(Class, InstanceIdentifier)} method to
+ * {@link RpcRouter#getService(Class, org.opendaylight.yangtools.yang.binding.InstanceIdentifier)} method to
* retrieve particular instance to which call will be routed.
* <li>
- * Instance of {@link InstanceIdentifier} is determined by first argument of
+ * Instance of {@link org.opendaylight.yangtools.yang.binding.InstanceIdentifier} is determined by first argument of
* method and is retrieved via method which is annotated with
- * {@link RoutingContext}. Class representing Routing Context Identifier is
- * retrieved by {@link RoutingContext}.
- * <li>If first argument is not defined / {@link RoutingContext} annotation
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * Class representing Routing Context Identifier is retrieved by a
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * <li>If first argument is not defined / {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext} annotation
* is not present on any field invocation will be delegated to default
* service {@link RpcRouter#getDefaultService()}.
* </ul>
* @param serviceType
* - Subclass of RpcService for which Router is to be generated.
* @return Instance of RpcService of provided serviceType which implements
- * also {@link RpcRouter}<T> and {@link DelegateProxy}
+ * also {@link RpcRouter}<T> and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
*/
<T extends RpcService> RpcRouter<T> getRouterFor(Class<T> serviceType,String name) throws IllegalArgumentException;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
-import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
*/
package org.opendaylight.controller.sal.binding.impl.util;
-import java.util.Iterator;
import org.opendaylight.controller.md.sal.common.impl.routing.AbstractDataReadRouter;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-@SuppressWarnings("all")
public class BindingAwareDataReaderRouter extends AbstractDataReadRouter<InstanceIdentifier<? extends DataObject>,DataObject> {
- protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
- return data.iterator().next();
- }
+ @Override
+ protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
+ return data.iterator().next();
+ }
}
*/
package org.opendaylight.controller.sal.compability;
-import org.opendaylight.controller.sal.core.*;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeConnector;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.port.rev130925.PortFeatures;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
-import org.opendaylight.controller.sal.core.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.core.api.data.DataProviderService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationPublishService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationService;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.common.QName;
* <li>RPC Invocation - see {@link ConsumerSession#rpc(QName, CompositeNode)},
* {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)} and
* {@link RpcImplementation}
- * <li>Notification Service - see {@link NotificationService} and
- * {@link NotificationPublishService}
+ * <li>Notification Service - see {@link org.opendaylight.controller.sal.core.api.notify.NotificationService} and
+ * {@link org.opendaylight.controller.sal.core.api.notify.NotificationPublishService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.core.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.core.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
* functionality of the provider from the system.
*/
@Override
- public void close();
+ void close();
@Override
boolean isClosed();
void close();
}
- public interface RoutedRpcRegistration extends RpcRegistration,
- RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
+ public interface RoutedRpcRegistration extends RpcRegistration, RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
}
}
*/
package org.opendaylight.controller.sal.core.api;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-
/**
*
* Session-specific instance of the broker functionality.
*
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerSession#getService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#getService(Class)} method on session
* assigned to the consumer.
*
* <p>
import java.util.Set;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
* {@link Provider#getProviderFunctionality()}
* <li>passing an instance of implementation and {@link QName} of rpc as
* arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, RpcImplementation)}
* </ul>
*
* The simplified process of the invocation of rpc is following:
*
* <ol>
* <li> {@link Consumer} invokes
- * {@link ConsumerSession#rpc(QName, CompositeNode)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#rpc(QName, CompositeNode)}
* <li> {@link Broker} finds registered {@link RpcImplementation}s
* <li> {@link Broker} invokes
* {@link RpcImplementation#invokeRpc(QName, CompositeNode)}
*/
package org.opendaylight.controller.sal.core.api.notify;
-import org.opendaylight.controller.sal.core.api.Broker;
-import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
* The simplified process of the notification publishing is following:
*
* <ol>
- * <li> {@link Provider} invokes {@link #sendNotification(CompositeNode)}
- * <li> {@link Broker} finds {@link NotificationListener}s which subscribed for
+ * <li> {@link org.opendaylight.controller.sal.core.api.Provider} invokes {@link #sendNotification(CompositeNode)}
+ * <li> {@link org.opendaylight.controller.sal.core.api.Broker} finds {@link NotificationListener}s which subscribed for
* the notification type.
*
- * <li>For each subscriber {@link Broker} invokes
+ * <li>For each subscriber {@link org.opendaylight.controller.sal.core.api.Broker} invokes
* {@link NotificationListener#onNotification(CompositeNode)}
* </ol>
*/
package org.opendaylight.controller.sal.core.api.notify;
import org.opendaylight.controller.sal.core.api.BrokerService;
-import org.opendaylight.controller.sal.core.api.Provider;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.common.QName;
* The registration of notification listeners could be done by:
* <ul>
* <li>returning an instance of implementation in the return value of
- * {@link Provider#getProviderFunctionality()}
- * <li>passing an instance of implementation and {@link QName} of rpc as an
- * arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Provider#getProviderFunctionality()}
+ * <li>passing an instance of implementation and {@link QName} of an RPC as an
+ * argument to
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, org.opendaylight.controller.sal.core.api.RpcImplementation)}
* </ul>
*
*
import java.util.List;
import java.util.Map;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier.PathArgument;
import com.google.common.base.Predicates;
/**
- * A set of utility methods for interacting with {@link TreeNode} objects.
+ * A set of utility methods for interacting with {@link org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode} objects.
*/
public final class TreeNodeUtils {
private TreeNodeUtils() {
*/
package org.opendaylight.controller.sal.dom.broker.osgi;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.opendaylight.controller.sal.core.api.BrokerService;
import org.opendaylight.yangtools.concepts.Registration;
import org.osgi.framework.ServiceReference;
-import static com.google.common.base.Preconditions.*;
public abstract class AbstractBrokerServiceProxy<T extends BrokerService> implements AutoCloseable, BrokerService {
private T delegate;
private final ServiceReference<T> reference;
- public AbstractBrokerServiceProxy(ServiceReference<T> ref, T delegate) {
+ public AbstractBrokerServiceProxy(final ServiceReference<T> ref, final T delegate) {
this.delegate = checkNotNull(delegate, "Delegate should not be null.");
this.reference = checkNotNull(ref, "Reference should not be null.");
}
return reference;
}
- private Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
+ private final Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
- protected <R extends Registration<?>> R addRegistration(R registration) {
+ protected <R extends Registration<?>> R addRegistration(final R registration) {
if (registration != null) {
registrations.add(registration);
}
import java.util.Set;
import org.opendaylight.controller.md.sal.common.api.routing.RouteChangeListener;
-import org.opendaylight.controller.sal.core.api.*;
+import org.opendaylight.controller.sal.core.api.Broker;
+import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
+import org.opendaylight.controller.sal.core.api.RpcImplementation;
+import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
+import org.opendaylight.controller.sal.core.api.RpcRegistrationListener;
+import org.opendaylight.controller.sal.core.api.RpcRoutingContext;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
-public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry>
- implements RpcProvisionRegistry {
+public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry> implements RpcProvisionRegistry {
- public RpcProvisionRegistryProxy(ServiceReference<RpcProvisionRegistry> ref, RpcProvisionRegistry delegate) {
+ public RpcProvisionRegistryProxy(final ServiceReference<RpcProvisionRegistry> ref, final RpcProvisionRegistry delegate) {
super(ref, delegate);
}
@Override
- public Broker.RpcRegistration addRpcImplementation(QName rpcType, RpcImplementation implementation) throws IllegalArgumentException {
+ public Broker.RpcRegistration addRpcImplementation(final QName rpcType, final RpcImplementation implementation) throws IllegalArgumentException {
return getDelegate().addRpcImplementation(rpcType, implementation);
}
@Override
- public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(RpcRegistrationListener listener) {
+ public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(final RpcRegistrationListener listener) {
return getDelegate().addRpcRegistrationListener(listener);
}
@Override
- public Broker.RoutedRpcRegistration addRoutedRpcImplementation(QName rpcType, RpcImplementation implementation) {
+ public Broker.RoutedRpcRegistration addRoutedRpcImplementation(final QName rpcType, final RpcImplementation implementation) {
return getDelegate().addRoutedRpcImplementation(rpcType, implementation);
}
@Override
- public void setRoutedRpcDefaultDelegate(RoutedRpcDefaultImplementation defaultImplementation) {
+ public void setRoutedRpcDefaultDelegate(final RoutedRpcDefaultImplementation defaultImplementation) {
getDelegate().setRoutedRpcDefaultDelegate(defaultImplementation);
}
@Override
- public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(L listener) {
+ public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(final L listener) {
return getDelegate().registerRouteChangeListener(listener);
}
}
@Override
- public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(QName rpc, CompositeNode input) {
+ public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(final QName rpc, final CompositeNode input) {
return getDelegate().invokeRpc(rpc, input);
}
}
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.SimpleNode;
-import static com.google.common.base.Preconditions.*;
-
-public class YangDataUtils {
+public final class YangDataUtils {
- public YangDataUtils() {
- // TODO Auto-generated constructor stub
+ private YangDataUtils() {
+ throw new UnsupportedOperationException("Utility class");
}
-
-
- public static Map<Map<QName,Object>,CompositeNode> toIndexMap(List<CompositeNode> nodes,List<QName> keys) {
+ public static Map<Map<QName,Object>,CompositeNode> toIndexMap(final List<CompositeNode> nodes,final List<QName> keys) {
ConcurrentHashMap<Map<QName,Object>,CompositeNode> ret = new ConcurrentHashMap<>();
for(CompositeNode node : nodes) {
Map<QName, Object> key = getKeyMap(node,keys);
return ret;
}
-
-
- public static Map<QName,Object> getKeyMap(CompositeNode node, List<QName> keys) {
+ public static Map<QName,Object> getKeyMap(final CompositeNode node, final List<QName> keys) {
Map<QName,Object> map = new HashMap<>();
for(QName key : keys) {
SimpleNode<?> keyNode = node.getFirstSimpleByName(QName.create(node.getNodeType(), key.getLocalName()));
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
import java.util.Iterator;
import java.util.List;
import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
import org.opendaylight.yangtools.yang.model.api.UsesNode;
-import static com.google.common.base.Preconditions.*;
-
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
-public class YangSchemaUtils {
+public final class YangSchemaUtils {
private static final Function<PathArgument, QName> QNAME_FROM_PATH_ARGUMENT = new Function<PathArgument, QName>(){
@Override
- public QName apply(PathArgument input) {
+ public QName apply(final PathArgument input) {
if(input == null) {
return null;
}
}
};
- private YangSchemaUtils() {
+ private YangSchemaUtils() {
throw new UnsupportedOperationException("Utility class.");
}
-
- public static DataSchemaNode getSchemaNode(SchemaContext schema,InstanceIdentifier path) {
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final InstanceIdentifier path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
return getSchemaNode(schema, FluentIterable.from(path.getPath()).transform(QNAME_FROM_PATH_ARGUMENT));
}
- public static DataSchemaNode getSchemaNode(SchemaContext schema,Iterable<QName> path) {
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final Iterable<QName> path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
if(!path.iterator().hasNext()){
return (DataSchemaNode) previous;
}
- private static DataSchemaNode searchInChoices(DataNodeContainer node, QName arg) {
+ private static DataSchemaNode searchInChoices(final DataNodeContainer node, final QName arg) {
Set<DataSchemaNode> children = node.getChildNodes();
for (DataSchemaNode child : children) {
if (child instanceof ChoiceNode) {
return null;
}
- private static DataSchemaNode searchInCases(ChoiceNode choiceNode, QName arg) {
+ private static DataSchemaNode searchInCases(final ChoiceNode choiceNode, final QName arg) {
Set<ChoiceCaseNode> cases = choiceNode.getCases();
for (ChoiceCaseNode caseNode : cases) {
DataSchemaNode node = caseNode.getDataChildByName(arg);
return null;
}
- private static ContainerSchemaNode toRootDataNode(SchemaContext schema) {
+ private static ContainerSchemaNode toRootDataNode(final SchemaContext schema) {
return new NetconfDataRootNode(schema);
}
private static final class NetconfDataRootNode implements ContainerSchemaNode {
- public NetconfDataRootNode(SchemaContext schema) {
+ public NetconfDataRootNode(final SchemaContext schema) {
// TODO Auto-generated constructor stub
}
}
@Override
- public DataSchemaNode getDataChildByName(QName name) {
+ public DataSchemaNode getDataChildByName(final QName name) {
// TODO Auto-generated method stub
return null;
}
@Override
- public DataSchemaNode getDataChildByName(String name) {
+ public DataSchemaNode getDataChildByName(final String name) {
// TODO Auto-generated method stub
return null;
}
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.TransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* Read and write access to stored data is provided only via transactions
* created using {@link #newReadOnlyTransaction()},
* {@link #newWriteOnlyTransaction()} and {@link #newReadWriteTransaction()}, or
- * by creating {@link TransactionChain}.
+ * by creating {@link org.opendaylight.controller.md.sal.common.api.data.TransactionChain}.
*
*/
public interface DOMStore extends DOMStoreTransactionFactory {
/**
- * Registers {@link DataChangeListener} for Data Change callbacks which will
+ * Registers {@link org.opendaylight.controller.md.sal.common.api.data.DataChangeListener} for Data Change callbacks which will
* be triggered on the change of provided subpath. What constitutes a change
* depends on the @scope parameter.
*
*/
package org.opendaylight.controller.sal.core.spi.data;
-import java.util.concurrent.Future;
-
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* read
* @return Listenable Future which contains read result
* <ul>
- * <li>If data at supplied path exists the {@link Future#get()}
+ * <li>If data at supplied path exists the {@link java.util.concurrent.Future#get()}
* returns Optional object containing data
* <li>If data at supplied path does not exists the
- * {@link Future#get()} returns {@link Optional#absent()}.
+ * {@link java.util.concurrent.Future#get()} returns {@link Optional#absent()}.
* </ul>
*/
ListenableFuture<Optional<NormalizedNode<?,?>>> read(InstanceIdentifier path);
*/
package org.opendaylight.controller.sal.core.spi.data;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.yangtools.concepts.Path;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
*/
package org.opendaylight.controller.config.yang.md.sal.remote.rpc;
-import org.opendaylight.controller.sal.connector.remoterpc.*;
+import org.opendaylight.controller.sal.connector.remoterpc.ClientImpl;
+import org.opendaylight.controller.sal.connector.remoterpc.RemoteRpcProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.RoutingTableProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.ServerImpl;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
import org.osgi.framework.BundleContext;
*
*/
public final class ZeroMQServerModule
- extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
+extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
- private static final Integer ZEROMQ_ROUTER_PORT = 5554;
- private BundleContext bundleContext;
+ private static final Integer ZEROMQ_ROUTER_PORT = 5554;
+ private BundleContext bundleContext;
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
- super(identifier, dependencyResolver);
- }
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ super(identifier, dependencyResolver);
+ }
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- ZeroMQServerModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final ZeroMQServerModule oldModule, final java.lang.AutoCloseable oldInstance) {
- super(identifier, dependencyResolver, oldModule, oldInstance);
- }
+ super(identifier, dependencyResolver, oldModule, oldInstance);
+ }
- @Override
- protected void customValidation() {
- // Add custom validation for module attributes here.
- }
+ @Override
+ protected void customValidation() {
+ // Add custom validation for module attributes here.
+ }
- @Override
- public java.lang.AutoCloseable createInstance() {
+ @Override
+ public java.lang.AutoCloseable createInstance() {
- Broker broker = getDomBrokerDependency();
+ Broker broker = getDomBrokerDependency();
- final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
+ final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
- ServerImpl serverImpl = new ServerImpl(port);
+ ServerImpl serverImpl = new ServerImpl(port);
- ClientImpl clientImpl = new ClientImpl();
+ ClientImpl clientImpl = new ClientImpl();
- RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
+ RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
- RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
- facade.setRoutingTableProvider(provider);
- facade.setContext(bundleContext);
- facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
+ RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
+ facade.setRoutingTableProvider(provider);
+ facade.setContext(bundleContext);
+ facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
- broker.registerProvider(facade, bundleContext);
- return facade;
- }
+ broker.registerProvider(facade, bundleContext);
+ return facade;
+ }
- public void setBundleContext(BundleContext bundleContext) {
- this.bundleContext = bundleContext;
- }
+ public void setBundleContext(final BundleContext bundleContext) {
+ this.bundleContext = bundleContext;
+ }
}
import org.opendaylight.controller.sal.connector.remoterpc.dto.Message;
import org.opendaylight.controller.sal.connector.remoterpc.dto.RouteIdentifierImpl;
import org.opendaylight.controller.sal.connector.remoterpc.util.XmlUtils;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
/**
- * An implementation of {@link RpcImplementation} that makes
+ * An implementation of {@link org.opendaylight.controller.sal.core.api.RpcImplementation} that makes
* remote RPC calls
*/
public class ClientImpl implements RemoteRpcClient {
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <packaging>bundle</packaging>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller.tests</groupId>
+ <artifactId>sal-remoterpc-connector-test-parent</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
+ <packaging>bundle</packaging>
+ <dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <configuration>
- <instructions>
- <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
- </instructions>
- </configuration>
- </plugin>
- </plugins>
- </build>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>containermanager</artifactId>
+ <version>0.5.2-SNAPSHOT</version>
+ </dependency>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal</artifactId>
+ <version>0.8.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-binding-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-impl</artifactId>
+ <version>${yangtools.version}</version>
+ </dependency>
+ </dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-impl</artifactId>
- <version>${yangtools.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
+ </instructions>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
import java.io.InputStream;
import java.net.URI;
import java.util.Hashtable;
-import java.util.concurrent.*;
+import java.util.concurrent.Future;
import org.opendaylight.controller.sal.core.api.AbstractConsumer;
import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.opendaylight.yangtools.yang.data.impl.XmlTreeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.CompositeNodeTOImpl;
import javax.xml.stream.XMLStreamException;
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
- <packaging>pom</packaging>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+ <packaging>pom</packaging>
<modules>
<module>consumer-service</module>
<module>test-it</module>
<module>test-nb</module>
</modules>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.8.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-remoterpc-connector</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
</dependencies>
import org.slf4j.LoggerFactory;
import java.net.URI;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
public class ExampleProvider extends AbstractProvider implements RpcImplementation {
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-it</artifactId>
<scm>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-broker-impl</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.ops4j.pax.exam</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>ietf-netconf-monitoring</artifactId>
+ <version>0.2.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
import junit.framework.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.net.URI;
-import java.util.Hashtable;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.baseModelBundles;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.bindingAwareSalBundles;
-import static org.ops4j.pax.exam.CoreOptions.*;
+//import static org.ops4j.pax.exam.CoreOptions.*;
@RunWith(PaxExam.class)
public class RouterTest {
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-nb</artifactId>
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import org.osgi.framework.*;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
*/
package org.opendaylight.controller.sal.rest.doc;
+import java.util.Collection;
+import java.util.Collections;
+
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.impl.ApiDocGenerator;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Collection;
-import java.util.Collections;
-
-
-public class DocProvider implements BundleActivator,
- ServiceTrackerCustomizer<Broker, Broker>,
- Provider,
- AutoCloseable {
-
- private Logger _logger = LoggerFactory.getLogger(DocProvider.class);
-
- private ServiceTracker<Broker, Broker> brokerServiceTracker;
- private BundleContext bundleContext;
- private Broker.ProviderSession session;
-
- @Override
- public void close() throws Exception {
- stop(bundleContext);
- }
-
- @Override
- public void onSessionInitiated(Broker.ProviderSession providerSession) {
- SchemaService schemaService = providerSession.getService(SchemaService.class);
- ApiDocGenerator.getInstance().setSchemaService(schemaService);
-
- _logger.debug("Restconf API Explorer started");
-
- }
-
- @Override
- public Collection<ProviderFunctionality> getProviderFunctionality() {
- return Collections.emptySet();
- }
-
- @Override
- public void start(BundleContext context) throws Exception {
- bundleContext = context;
- brokerServiceTracker = new ServiceTracker(context, Broker.class, this);
- brokerServiceTracker.open();
- }
-
- @Override
- public void stop(BundleContext context) throws Exception {
- if (brokerServiceTracker != null)
- brokerServiceTracker.close();
-
- if (session != null)
- session.close();
- }
-
- @Override
- public Broker addingService(ServiceReference<Broker> reference) {
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- return broker;
- }
-
- @Override
- public void modifiedService(ServiceReference<Broker> reference, Broker service) {
- if (session != null)
- session.close();
-
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- }
-
- @Override
- public void removedService(ServiceReference<Broker> reference, Broker service) {
- bundleContext.ungetService(reference);
- }
+public class DocProvider implements BundleActivator, ServiceTrackerCustomizer<Broker, Broker>, Provider, AutoCloseable {
+
+ private static final Logger _logger = LoggerFactory.getLogger(DocProvider.class);
+
+ private ServiceTracker<Broker, Broker> brokerServiceTracker;
+ private BundleContext bundleContext;
+ private Broker.ProviderSession session;
+
+ @Override
+ public void close() throws Exception {
+ stop(bundleContext);
+ }
+
+ @Override
+ public void onSessionInitiated(final Broker.ProviderSession providerSession) {
+ SchemaService schemaService = providerSession.getService(SchemaService.class);
+ ApiDocGenerator.getInstance().setSchemaService(schemaService);
+
+ _logger.debug("Restconf API Explorer started");
+ }
+
+ @Override
+ public Collection<ProviderFunctionality> getProviderFunctionality() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public void start(final BundleContext context) throws Exception {
+ bundleContext = context;
+ brokerServiceTracker = new ServiceTracker<>(context, Broker.class, this);
+ brokerServiceTracker.open();
+ }
+
+ @Override
+ public void stop(final BundleContext context) throws Exception {
+ if (brokerServiceTracker != null) {
+ brokerServiceTracker.close();
+ }
+
+ if (session != null) {
+ session.close();
+ }
+ }
+
+ @Override
+ public Broker addingService(final ServiceReference<Broker> reference) {
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ return broker;
+ }
+
+ @Override
+ public void modifiedService(final ServiceReference<Broker> reference, final Broker service) {
+ if (session != null) {
+ session.close();
+ }
+
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ }
+
+ @Override
+ public void removedService(final ServiceReference<Broker> reference, final Broker service) {
+ bundleContext.ungetService(reference);
+ }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
-import com.google.common.base.Preconditions;
+import java.io.IOException;
+import java.net.URI;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.ws.rs.core.UriInfo;
+
import org.json.JSONException;
import org.json.JSONObject;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder;
-import org.opendaylight.controller.sal.rest.doc.swagger.*;
+import org.opendaylight.controller.sal.rest.doc.swagger.Api;
+import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
+import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
+import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
+import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
+import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-import java.net.URI;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
+import com.google.common.base.Preconditions;
/**
* This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation.
*/
public class ApiDocGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
-
- private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
- private ObjectMapper mapper = new ObjectMapper();
- private final ModelGenerator jsonConverter = new ModelGenerator();
-
- private SchemaService schemaService;
-
- private final String API_VERSION = "1.0.0";
- private final String SWAGGER_VERSION = "1.2";
- private final String RESTCONF_CONTEXT_ROOT = "restconf";
- private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
-
- //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
- private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
-
- private ApiDocGenerator(){
- mapper.registerModule(new JsonOrgModule());
- mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
- }
-
- /**
- * Returns singleton instance
- * @return
- */
- public static ApiDocGenerator getInstance() {
- return INSTANCE;
- }
-
- /**
- *
- * @param schemaService
- */
- public void setSchemaService(SchemaService schemaService) {
- this.schemaService = schemaService;
- }
- /**
- *
- * @param uriInfo
- * @return list of modules converted to swagger compliant resource list.
- */
- public ResourceList getResourceListing(UriInfo uriInfo) {
-
- Preconditions.checkState(schemaService != null);
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
-
- Set<Module> modules = schemaContext.getModules();
-
- ResourceList resourceList = new ResourceList();
- resourceList.setApiVersion(API_VERSION);
- resourceList.setSwaggerVersion(SWAGGER_VERSION);
-
- List<Resource> resources = new ArrayList<>(modules.size());
- _logger.info("Modules found [{}]", modules.size());
-
- for (Module module : modules) {
- Resource resource = new Resource();
- String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
-
- _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
- ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
-
- if (doc != null) {
- URI uri = uriInfo.getRequestUriBuilder().
- path(generateCacheKey(module.getName(), revisionString)).
- build();
-
- resource.setPath(uri.toASCIIString());
- resources.add(resource);
- } else {
- _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
- }
- }
+ private static final Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
- resourceList.setApis(resources);
+ private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
+ private final ObjectMapper mapper = new ObjectMapper();
+ private final ModelGenerator jsonConverter = new ModelGenerator();
- return resourceList;
- }
+ private SchemaService schemaService;
- public ApiDeclaration getApiDeclaration(String module, String revision, UriInfo uriInfo) {
+ private static final String API_VERSION = "1.0.0";
+ private static final String SWAGGER_VERSION = "1.2";
+ private static final String RESTCONF_CONTEXT_ROOT = "restconf";
+ private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
- //Lookup cache
- String cacheKey = generateCacheKey(module, revision);
+ //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
+ private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
- if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
- _logger.debug("Serving from cache for {}", cacheKey);
- return MODULE_DOC_CACHE.get(cacheKey);
+ private ApiDocGenerator(){
+ mapper.registerModule(new JsonOrgModule());
+ mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
}
- Date rev = null;
- try {
- rev = SIMPLE_DATE_FORMAT.parse(revision);
- } catch (ParseException e) {
- throw new IllegalArgumentException(e);
+ /**
+ * Returns singleton instance
+ * @return
+ */
+ public static ApiDocGenerator getInstance() {
+ return INSTANCE;
}
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
+ /**
+ *
+ * @param schemaService
+ */
+ public void setSchemaService(final SchemaService schemaService) {
+ this.schemaService = schemaService;
+ }
+ /**
+ *
+ * @param uriInfo
+ * @return list of modules converted to swagger compliant resource list.
+ */
+ public ResourceList getResourceListing(final UriInfo uriInfo) {
+
+ Preconditions.checkState(schemaService != null);
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Set<Module> modules = schemaContext.getModules();
+
+ ResourceList resourceList = new ResourceList();
+ resourceList.setApiVersion(API_VERSION);
+ resourceList.setSwaggerVersion(SWAGGER_VERSION);
+
+ List<Resource> resources = new ArrayList<>(modules.size());
+ _logger.info("Modules found [{}]", modules.size());
+
+ for (Module module : modules) {
+ Resource resource = new Resource();
+ String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
+
+ _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
+ ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
+
+ if (doc != null) {
+ URI uri = uriInfo.getRequestUriBuilder().
+ path(generateCacheKey(module.getName(), revisionString)).
+ build();
+
+ resource.setPath(uri.toASCIIString());
+ resources.add(resource);
+ } else {
+ _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
+ }
+ }
+
+ resourceList.setApis(resources);
+
+ return resourceList;
+ }
+
+ public ApiDeclaration getApiDeclaration(final String module, final String revision, final UriInfo uriInfo) {
+
+ //Lookup cache
+ String cacheKey = generateCacheKey(module, revision);
- Module m = schemaContext.findModuleByName(module, rev);
- Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+ if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
+ _logger.debug("Serving from cache for {}", cacheKey);
+ return MODULE_DOC_CACHE.get(cacheKey);
+ }
+
+ Date rev = null;
+ try {
+ rev = SIMPLE_DATE_FORMAT.parse(revision);
+ } catch (ParseException e) {
+ throw new IllegalArgumentException(e);
+ }
- String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Module m = schemaContext.findModuleByName(module, rev);
+ Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+
+ String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
.append("://")
.append(uriInfo.getBaseUri().getHost())
.append(":")
.append(RESTCONF_CONTEXT_ROOT)
.toString();
- ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
- MODULE_DOC_CACHE.put(cacheKey, doc);
- return doc;
- }
+ ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
+ MODULE_DOC_CACHE.put(cacheKey, doc);
+ return doc;
+ }
- public ApiDeclaration getSwaggerDocSpec(Module m, String basePath) {
- ApiDeclaration doc = new ApiDeclaration();
- doc.setApiVersion(API_VERSION);
- doc.setSwaggerVersion(SWAGGER_VERSION);
- doc.setBasePath(basePath);
- doc.setProduces(Arrays.asList("application/json", "application/xml"));
+ public ApiDeclaration getSwaggerDocSpec(final Module m, final String basePath) {
+ ApiDeclaration doc = new ApiDeclaration();
+ doc.setApiVersion(API_VERSION);
+ doc.setSwaggerVersion(SWAGGER_VERSION);
+ doc.setBasePath(basePath);
+ doc.setProduces(Arrays.asList("application/json", "application/xml"));
- List<Api> apis = new ArrayList<Api>();
+ List<Api> apis = new ArrayList<Api>();
- Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
- _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
- for (DataSchemaNode node : dataSchemaNodes) {
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
+ _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
+ for (DataSchemaNode node : dataSchemaNodes) {
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
+ _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
- List<Parameter> pathParams = null;
- if (node.isConfiguration()) {
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/config/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, true);
+ List<Parameter> pathParams = null;
+ if (node.isConfiguration()) {
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/config/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, true);
+ }
+
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/operational/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, false);
+ }
}
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/operational/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, false);
- }
- }
+ Set<RpcDefinition> rpcs = m.getRpcs();
+ for (RpcDefinition rpcDefinition : rpcs) {
+ String resourcePath = "/operations/" + m.getName() + ":";
+ addRpcs(rpcDefinition, apis, resourcePath);
- Set<RpcDefinition> rpcs = m.getRpcs();
- for (RpcDefinition rpcDefinition : rpcs) {
- String resourcePath = "/operations/" + m.getName() + ":";
- addRpcs(rpcDefinition, apis, resourcePath);
+ }
+ _logger.debug("Number of APIs found [{}]", apis.size());
+ doc.setApis(apis);
+ JSONObject models = null;
+
+ try {
+ models = jsonConverter.convertToJsonSchema(m);
+ doc.setModels(models);
+ _logger.debug(mapper.writeValueAsString(doc));
+ } catch (IOException | JSONException e) {
+ e.printStackTrace();
+ }
+ return doc;
}
- _logger.debug("Number of APIs found [{}]", apis.size());
- doc.setApis(apis);
- JSONObject models = null;
-
- try {
- models = jsonConverter.convertToJsonSchema(m);
- doc.setModels(models);
- _logger.debug(mapper.writeValueAsString(doc));
- } catch (IOException | JSONException e) {
- e.printStackTrace();
+
+ private String generateCacheKey(final Module m) {
+ return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
}
- return doc;
- }
-
- private String generateCacheKey(Module m) {
- return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
- }
-
- private String generateCacheKey(String module, String revision) {
- return module + "," + revision;
- }
-
- private void addApis(DataSchemaNode node,
- List<Api> apis,
- String parentPath,
- List<Parameter> parentPathParams,
- boolean addConfigApi) {
-
- Api api = new Api();
- List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
-
- String resourcePath = parentPath + createPath(node, pathParams) + "/";
- _logger.debug("Adding path: [{}]", resourcePath);
- api.setPath(resourcePath);
- api.setOperations(operations(node, pathParams, addConfigApi));
- apis.add(api);
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- DataNodeContainer schemaNode = (DataNodeContainer) node;
- Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
-
- for (DataSchemaNode childNode : dataSchemaNodes) {
- addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
- }
+ private String generateCacheKey(final String module, final String revision) {
+ return module + "," + revision;
}
- }
+ private void addApis(final DataSchemaNode node,
+ final List<Api> apis,
+ final String parentPath,
+ final List<Parameter> parentPathParams,
+ final boolean addConfigApi) {
+
+ Api api = new Api();
+ List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
+
+ String resourcePath = parentPath + createPath(node, pathParams) + "/";
+ _logger.debug("Adding path: [{}]", resourcePath);
+ api.setPath(resourcePath);
+ api.setOperations(operations(node, pathParams, addConfigApi));
+ apis.add(api);
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ DataNodeContainer schemaNode = (DataNodeContainer) node;
+ Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
+
+ for (DataSchemaNode childNode : dataSchemaNodes) {
+ addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
+ }
+ }
- private void addRpcs(RpcDefinition rpcDefn, List<Api> apis, String parentPath) {
- Api rpc = new Api();
- String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
- rpc.setPath(resourcePath);
+ }
+
+ private void addRpcs(final RpcDefinition rpcDefn, final List<Api> apis, final String parentPath) {
+ Api rpc = new Api();
+ String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
+ rpc.setPath(resourcePath);
- Operation operationSpec = new Operation();
- operationSpec.setMethod("POST");
- operationSpec.setNotes(rpcDefn.getDescription());
- operationSpec.setNickname(rpcDefn.getQName().getLocalName());
- rpc.setOperations(Arrays.asList(operationSpec));
+ Operation operationSpec = new Operation();
+ operationSpec.setMethod("POST");
+ operationSpec.setNotes(rpcDefn.getDescription());
+ operationSpec.setNickname(rpcDefn.getQName().getLocalName());
+ rpc.setOperations(Arrays.asList(operationSpec));
- apis.add(rpc);
- }
+ apis.add(rpc);
+ }
- /**
- * @param node
- * @param pathParams
- * @return
- */
- private List<Operation> operations(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig) {
- List<Operation> operations = new ArrayList<>();
+ /**
+ * @param node
+ * @param pathParams
+ * @return
+ */
+ private List<Operation> operations(final DataSchemaNode node, final List<Parameter> pathParams, final boolean isConfig) {
+ List<Operation> operations = new ArrayList<>();
- OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
- operations.add(getBuilder.pathParams(pathParams).build());
+ OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
+ operations.add(getBuilder.pathParams(pathParams).build());
- if (isConfig) {
- OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
- operations.add(postBuilder.pathParams(pathParams).build());
+ if (isConfig) {
+ OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
+ operations.add(postBuilder.pathParams(pathParams).build());
- OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
- operations.add(putBuilder.pathParams(pathParams).build());
+ OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
+ operations.add(putBuilder.pathParams(pathParams).build());
- OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
- operations.add(deleteBuilder.pathParams(pathParams).build());
+ OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
+ operations.add(deleteBuilder.pathParams(pathParams).build());
+ }
+ return operations;
}
- return operations;
- }
-
- private String createPath(final DataSchemaNode schemaNode, List<Parameter> pathParams) {
- ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
- StringBuilder path = new StringBuilder();
- QName _qName = schemaNode.getQName();
- String localName = _qName.getLocalName();
- path.append(localName);
-
- if ((schemaNode instanceof ListSchemaNode)) {
- final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
- for (final QName listKey : listKeys) {
- {
- DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
- pathListParams.add(((LeafSchemaNode) _dataChildByName));
-
- String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
- path.append(pathParamIdentifier);
-
- Parameter pathParam = new Parameter();
- pathParam.setName(listKey.getLocalName());
- pathParam.setDescription(_dataChildByName.getDescription());
- pathParam.setType("string");
- pathParam.setParamType("path");
-
- pathParams.add(pathParam);
+
+ private String createPath(final DataSchemaNode schemaNode, final List<Parameter> pathParams) {
+ ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
+ StringBuilder path = new StringBuilder();
+ QName _qName = schemaNode.getQName();
+ String localName = _qName.getLocalName();
+ path.append(localName);
+
+ if ((schemaNode instanceof ListSchemaNode)) {
+ final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
+ for (final QName listKey : listKeys) {
+ {
+ DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
+ pathListParams.add(((LeafSchemaNode) _dataChildByName));
+
+ String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
+ path.append(pathParamIdentifier);
+
+ Parameter pathParam = new Parameter();
+ pathParam.setName(listKey.getLocalName());
+ pathParam.setDescription(_dataChildByName.getDescription());
+ pathParam.setType("string");
+ pathParam.setParamType("path");
+
+ pathParams.add(pathParam);
+ }
+ }
}
- }
+ return path.toString();
}
- return path.toString();
- }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
-import org.opendaylight.yangtools.yang.model.api.*;
-import org.opendaylight.yangtools.yang.model.api.type.*;
+import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceCaseNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceNode;
+import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition.Bit;
import org.opendaylight.yangtools.yang.model.api.type.EnumTypeDefinition.EnumPair;
-import org.opendaylight.yangtools.yang.model.util.*;
+import org.opendaylight.yangtools.yang.model.api.type.IdentityrefTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.LengthConstraint;
+import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
+import org.opendaylight.yangtools.yang.model.util.BooleanType;
+import org.opendaylight.yangtools.yang.model.util.Decimal64;
+import org.opendaylight.yangtools.yang.model.util.EnumerationType;
+import org.opendaylight.yangtools.yang.model.util.ExtendedType;
+import org.opendaylight.yangtools.yang.model.util.Int16;
+import org.opendaylight.yangtools.yang.model.util.Int32;
+import org.opendaylight.yangtools.yang.model.util.Int64;
+import org.opendaylight.yangtools.yang.model.util.Int8;
+import org.opendaylight.yangtools.yang.model.util.StringType;
+import org.opendaylight.yangtools.yang.model.util.Uint16;
+import org.opendaylight.yangtools.yang.model.util.Uint32;
+import org.opendaylight.yangtools.yang.model.util.Uint64;
+import org.opendaylight.yangtools.yang.model.util.Uint8;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.*;
-
/**
* Generates JSON Schema for data defined in Yang
*/
public class ModelGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
-
- private static final String BASE_64 = "base64";
- private static final String BINARY_ENCODING_KEY = "binaryEncoding";
- private static final String MEDIA_KEY = "media";
- private static final String ONE_OF_KEY = "oneOf";
- private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
- private static final String MAX_ITEMS = "maxItems";
- private static final String MIN_ITEMS = "minItems";
- private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
- private static final String SCHEMA_KEY = "$schema";
- private static final String MAX_LENGTH_KEY = "maxLength";
- private static final String MIN_LENGTH_KEY = "minLength";
- private static final String REQUIRED_KEY = "required";
- private static final String REF_KEY = "$ref";
- private static final String ITEMS_KEY = "items";
- private static final String TYPE_KEY = "type";
- private static final String PROPERTIES_KEY = "properties";
- private static final String DESCRIPTION_KEY = "description";
- private static final String OBJECT_TYPE = "object";
- private static final String ARRAY_TYPE = "array";
- private static final String ENUM = "enum";
- private static final String INTEGER = "integer";
- private static final String NUMBER = "number";
- private static final String BOOLEAN = "boolean";
- private static final String STRING = "string";
-
- private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
-
- static {
- Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
- tempMap1.put(StringType.class , STRING);
- tempMap1.put(BooleanType.class , BOOLEAN);
- tempMap1.put(Int8.class , INTEGER);
- tempMap1.put(Int16.class , INTEGER);
- tempMap1.put(Int32.class , INTEGER);
- tempMap1.put(Int64.class , INTEGER);
- tempMap1.put(Uint16.class , INTEGER);
- tempMap1.put(Uint32.class , INTEGER);
- tempMap1.put(Uint64.class , INTEGER);
- tempMap1.put(Uint8.class , INTEGER);
- tempMap1.put(Decimal64.class , NUMBER);
- tempMap1.put(EnumerationType.class , ENUM);
- //TODO: Binary type
-
- YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
- }
-
- public ModelGenerator(){
- }
-
- public JSONObject convertToJsonSchema(Module module) throws IOException, JSONException {
- JSONObject models = new JSONObject();
- processContainers(module, models);
- processRPCs(module, models);
-
- return models;
- }
-
-
-
- private void processContainers(Module module, JSONObject models) throws IOException, JSONException {
-
- String moduleName = module.getName();
- Set<DataSchemaNode> childNodes = module.getChildNodes();
-
- for(DataSchemaNode childNode : childNodes){
- JSONObject moduleJSON=null;
- String filename = childNode.getQName().getLocalName();
+ private static final Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
+
+ private static final String BASE_64 = "base64";
+ private static final String BINARY_ENCODING_KEY = "binaryEncoding";
+ private static final String MEDIA_KEY = "media";
+ private static final String ONE_OF_KEY = "oneOf";
+ private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
+ private static final String MAX_ITEMS = "maxItems";
+ private static final String MIN_ITEMS = "minItems";
+ private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
+ private static final String SCHEMA_KEY = "$schema";
+ private static final String MAX_LENGTH_KEY = "maxLength";
+ private static final String MIN_LENGTH_KEY = "minLength";
+ private static final String REQUIRED_KEY = "required";
+ private static final String REF_KEY = "$ref";
+ private static final String ITEMS_KEY = "items";
+ private static final String TYPE_KEY = "type";
+ private static final String PROPERTIES_KEY = "properties";
+ private static final String DESCRIPTION_KEY = "description";
+ private static final String OBJECT_TYPE = "object";
+ private static final String ARRAY_TYPE = "array";
+ private static final String ENUM = "enum";
+ private static final String INTEGER = "integer";
+ private static final String NUMBER = "number";
+ private static final String BOOLEAN = "boolean";
+ private static final String STRING = "string";
+
+ private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
+
+ static {
+ Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
+ tempMap1.put(StringType.class , STRING);
+ tempMap1.put(BooleanType.class , BOOLEAN);
+ tempMap1.put(Int8.class , INTEGER);
+ tempMap1.put(Int16.class , INTEGER);
+ tempMap1.put(Int32.class , INTEGER);
+ tempMap1.put(Int64.class , INTEGER);
+ tempMap1.put(Uint16.class , INTEGER);
+ tempMap1.put(Uint32.class , INTEGER);
+ tempMap1.put(Uint64.class , INTEGER);
+ tempMap1.put(Uint8.class , INTEGER);
+ tempMap1.put(Decimal64.class , NUMBER);
+ tempMap1.put(EnumerationType.class , ENUM);
+ //TODO: Binary type
+
+ YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
+ }
+
+ public ModelGenerator(){
+ }
+
+ public JSONObject convertToJsonSchema(final Module module) throws IOException, JSONException {
+ JSONObject models = new JSONObject();
+ processContainers(module, models);
+ processRPCs(module, models);
+
+ return models;
+ }
+
+
+
+ private void processContainers(final Module module, final JSONObject models) throws IOException, JSONException {
+
+ String moduleName = module.getName();
+ Set<DataSchemaNode> childNodes = module.getChildNodes();
+
+ for(DataSchemaNode childNode : childNodes){
+ JSONObject moduleJSON=null;
+ String filename = childNode.getQName().getLocalName();
/*
* For every container in the module
*/
- if(childNode instanceof ContainerSchemaNode) {
- moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
- }
-
- if(moduleJSON!=null) {
- _logger.debug("Adding model for [{}]", filename);
- moduleJSON.put("id", filename);
- models.put(filename, moduleJSON);
- }
- }
+ if(childNode instanceof ContainerSchemaNode) {
+ moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
+ }
+
+ if(moduleJSON!=null) {
+ _logger.debug("Adding model for [{}]", filename);
+ moduleJSON.put("id", filename);
+ models.put(filename, moduleJSON);
+ }
+ }
- }
-
-
- /**
- * Process the RPCs for a Module
- * Spits out a file each of the name <rpcName>-input.json
- * and <rpcName>-output.json for each RPC that contains
- * input & output elements
- *
- * @param module
- * @throws JSONException
- * @throws IOException
- */
- private void processRPCs(Module module, JSONObject models) throws JSONException, IOException {
-
- Set<RpcDefinition> rpcs = module.getRpcs();
- String moduleName = module.getName();
- for(RpcDefinition rpc: rpcs) {
-
- ContainerSchemaNode input = rpc.getInput();
- if(input!=null) {
- JSONObject inputJSON = processContainer(input, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-input";
- inputJSON.put("id", filename);
- //writeToFile(filename, inputJSON.toString(2), moduleName);
- models.put(filename, inputJSON);
- }
-
- ContainerSchemaNode output = rpc.getOutput();
- if(output!=null) {
- JSONObject outputJSON = processContainer(output, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-output";
- outputJSON.put("id", filename);
- models.put(filename, outputJSON);
- }
}
- }
-
-
- /**
- * Processes the container node and populates the moduleJSON
- *
- * @param container
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt, JSONObject models) throws JSONException, IOException{
- JSONObject moduleJSON = getSchemaTemplate();
- if(addSchemaStmt) {
- moduleJSON = getSchemaTemplate();
- } else {
- moduleJSON = new JSONObject();
+
+
+ /**
+ * Process the RPCs for a Module
+ * Spits out a file each of the name <rpcName>-input.json
+ * and <rpcName>-output.json for each RPC that contains
+ * input & output elements
+ *
+ * @param module
+ * @throws JSONException
+ * @throws IOException
+ */
+ private void processRPCs(final Module module, final JSONObject models) throws JSONException, IOException {
+
+ Set<RpcDefinition> rpcs = module.getRpcs();
+ String moduleName = module.getName();
+ for(RpcDefinition rpc: rpcs) {
+
+ ContainerSchemaNode input = rpc.getInput();
+ if(input!=null) {
+ JSONObject inputJSON = processContainer(input, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-input";
+ inputJSON.put("id", filename);
+ //writeToFile(filename, inputJSON.toString(2), moduleName);
+ models.put(filename, inputJSON);
+ }
+
+ ContainerSchemaNode output = rpc.getOutput();
+ if(output!=null) {
+ JSONObject outputJSON = processContainer(output, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-output";
+ outputJSON.put("id", filename);
+ models.put(filename, outputJSON);
+ }
+ }
}
- moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
-
- String containerDescription = container.getDescription();
- moduleJSON.put(DESCRIPTION_KEY, containerDescription);
-
- Set<DataSchemaNode> containerChildren = ((ContainerSchemaNode)container).getChildNodes();
- JSONObject properties = processChildren(containerChildren, moduleName, models);
- moduleJSON.put(PROPERTIES_KEY, properties);
- return moduleJSON;
- }
-
- /**
- * Processes the nodes
- * @param nodes
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChildren(Set<DataSchemaNode> nodes, String moduleName, JSONObject models) throws JSONException, IOException {
-
- JSONObject properties = new JSONObject();
-
- for(DataSchemaNode node : nodes){
- String name = node.getQName().getLocalName();
- JSONObject property = null;
- if(node instanceof LeafSchemaNode) {
- property = processLeafNode((LeafSchemaNode)node);
- } else if (node instanceof ListSchemaNode) {
- property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
-
- } else if (node instanceof LeafListSchemaNode) {
- property = processLeafListNode((LeafListSchemaNode)node);
-
- } else if (node instanceof ChoiceNode) {
- property = processChoiceNode((ChoiceNode)node, moduleName, models);
-
- } else if (node instanceof AnyXmlSchemaNode) {
- property = processAnyXMLNode((AnyXmlSchemaNode)node);
-
- } else if (node instanceof ContainerSchemaNode) {
- property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
-
- } else {
- throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
- }
-
- property.putOpt(DESCRIPTION_KEY, node.getDescription());
- properties.put(name, property);
+
+
+ /**
+ * Processes the container node and populates the moduleJSON
+ *
+ * @param container
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processContainer(final ContainerSchemaNode container, final String moduleName, final boolean addSchemaStmt, final JSONObject models) throws JSONException, IOException{
+ JSONObject moduleJSON = getSchemaTemplate();
+ if(addSchemaStmt) {
+ moduleJSON = getSchemaTemplate();
+ } else {
+ moduleJSON = new JSONObject();
+ }
+ moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
+
+ String containerDescription = container.getDescription();
+ moduleJSON.put(DESCRIPTION_KEY, containerDescription);
+
+ Set<DataSchemaNode> containerChildren = container.getChildNodes();
+ JSONObject properties = processChildren(containerChildren, moduleName, models);
+ moduleJSON.put(PROPERTIES_KEY, properties);
+ return moduleJSON;
}
- return properties;
- }
-
- /**
- *
- * @param listNode
- * @throws JSONException
- */
- private JSONObject processLeafListNode(LeafListSchemaNode listNode) throws JSONException {
- JSONObject props = new JSONObject();
- props.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject itemsVal = new JSONObject();
- processTypeDef(listNode.getType(), itemsVal);
- props.put(ITEMS_KEY, itemsVal);
-
- ConstraintDefinition constraints = listNode.getConstraints();
- processConstraints(constraints, props);
-
- return props;
- }
-
- /**
- *
- * @param choiceNode
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChoiceNode(ChoiceNode choiceNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<ChoiceCaseNode> cases = choiceNode.getCases();
-
- JSONArray choiceProps = new JSONArray();
- for(ChoiceCaseNode choiceCase: cases) {
- String choiceName = choiceCase.getQName().getLocalName();
- JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
- JSONObject choiceObj = new JSONObject();
- choiceObj.put(choiceName, choiceProp);
- choiceObj.put(TYPE_KEY, OBJECT_TYPE);
- choiceProps.put(choiceObj);
+
+ /**
+ * Processes the nodes
+ * @param nodes
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChildren(final Set<DataSchemaNode> nodes, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ JSONObject properties = new JSONObject();
+
+ for(DataSchemaNode node : nodes){
+ String name = node.getQName().getLocalName();
+ JSONObject property = null;
+ if(node instanceof LeafSchemaNode) {
+ property = processLeafNode((LeafSchemaNode)node);
+ } else if (node instanceof ListSchemaNode) {
+ property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
+
+ } else if (node instanceof LeafListSchemaNode) {
+ property = processLeafListNode((LeafListSchemaNode)node);
+
+ } else if (node instanceof ChoiceNode) {
+ property = processChoiceNode((ChoiceNode)node, moduleName, models);
+
+ } else if (node instanceof AnyXmlSchemaNode) {
+ property = processAnyXMLNode((AnyXmlSchemaNode)node);
+
+ } else if (node instanceof ContainerSchemaNode) {
+ property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
+
+ } else {
+ throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
+ }
+
+ property.putOpt(DESCRIPTION_KEY, node.getDescription());
+ properties.put(name, property);
+ }
+ return properties;
}
- JSONObject oneOfProps = new JSONObject();
- oneOfProps.put(ONE_OF_KEY, choiceProps);
- oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+ /**
+ *
+ * @param listNode
+ * @throws JSONException
+ */
+ private JSONObject processLeafListNode(final LeafListSchemaNode listNode) throws JSONException {
+ JSONObject props = new JSONObject();
+ props.put(TYPE_KEY, ARRAY_TYPE);
- return oneOfProps;
- }
+ JSONObject itemsVal = new JSONObject();
+ processTypeDef(listNode.getType(), itemsVal);
+ props.put(ITEMS_KEY, itemsVal);
+ ConstraintDefinition constraints = listNode.getConstraints();
+ processConstraints(constraints, props);
- /**
- *
- * @param constraints
- * @param props
- * @throws JSONException
- */
- private void processConstraints(ConstraintDefinition constraints, JSONObject props) throws JSONException {
- boolean isMandatory = constraints.isMandatory();
- props.put(REQUIRED_KEY, isMandatory);
+ return props;
+ }
- Integer minElements = constraints.getMinElements();
- Integer maxElements = constraints.getMaxElements();
- if(minElements !=null) {
- props.put(MIN_ITEMS, minElements);
+ /**
+ *
+ * @param choiceNode
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChoiceNode(final ChoiceNode choiceNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<ChoiceCaseNode> cases = choiceNode.getCases();
+
+ JSONArray choiceProps = new JSONArray();
+ for(ChoiceCaseNode choiceCase: cases) {
+ String choiceName = choiceCase.getQName().getLocalName();
+ JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
+ JSONObject choiceObj = new JSONObject();
+ choiceObj.put(choiceName, choiceProp);
+ choiceObj.put(TYPE_KEY, OBJECT_TYPE);
+ choiceProps.put(choiceObj);
+ }
+
+ JSONObject oneOfProps = new JSONObject();
+ oneOfProps.put(ONE_OF_KEY, choiceProps);
+ oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+
+ return oneOfProps;
}
- if(maxElements !=null) {
- props.put(MAX_ITEMS, maxElements);
+
+
+ /**
+ *
+ * @param constraints
+ * @param props
+ * @throws JSONException
+ */
+ private void processConstraints(final ConstraintDefinition constraints, final JSONObject props) throws JSONException {
+ boolean isMandatory = constraints.isMandatory();
+ props.put(REQUIRED_KEY, isMandatory);
+
+ Integer minElements = constraints.getMinElements();
+ Integer maxElements = constraints.getMaxElements();
+ if(minElements !=null) {
+ props.put(MIN_ITEMS, minElements);
+ }
+ if(maxElements !=null) {
+ props.put(MAX_ITEMS, maxElements);
+ }
}
- }
-
- /**
- * Parses a ListSchema node.
- *
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
- * must be in a separate JSON schema file. Hence, we have to write
- * some properties to a new file, while continuing to process the rest.
- *
- * @param listNode
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<DataSchemaNode> listChildren = listNode.getChildNodes();
- String fileName = listNode.getQName().getLocalName();
-
- JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
- JSONObject childSchema = getSchemaTemplate();
- childSchema.put(TYPE_KEY, OBJECT_TYPE);
- childSchema.put(PROPERTIES_KEY, childSchemaProperties);
+
+ /**
+ * Parses a ListSchema node.
+ *
+ * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
+ * must be in a separate JSON schema file. Hence, we have to write
+ * some properties to a new file, while continuing to process the rest.
+ *
+ * @param listNode
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processListSchemaNode(final ListSchemaNode listNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<DataSchemaNode> listChildren = listNode.getChildNodes();
+ String fileName = listNode.getQName().getLocalName();
+
+ JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
+ JSONObject childSchema = getSchemaTemplate();
+ childSchema.put(TYPE_KEY, OBJECT_TYPE);
+ childSchema.put(PROPERTIES_KEY, childSchemaProperties);
/*
* Due to a limitation of the RAML--->JAX-RS tool, sub-properties
* must be in a separate JSON schema file. Hence, we have to write
* some properties to a new file, while continuing to process the rest.
*/
- //writeToFile(fileName, childSchema.toString(2), moduleName);
- childSchema.put("id", fileName);
- models.put(fileName, childSchema);
-
-
- JSONObject listNodeProperties = new JSONObject();
- listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject items = new JSONObject();
- items.put(REF_KEY,fileName );
- listNodeProperties.put(ITEMS_KEY, items);
-
- return listNodeProperties;
-
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processLeafNode(LeafSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
- processTypeDef(leafNode.getType(), property);
-
- return property;
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processAnyXMLNode(AnyXmlSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
-
- return property;
- }
-
- /**
- * @param property
- * @throws JSONException
- */
- private void processTypeDef(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
-
- if(leafTypeDef instanceof ExtendedType){
- processExtendedType(leafTypeDef, property);
- } else if (leafTypeDef instanceof EnumerationType) {
- processEnumType((EnumerationType)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof BitsTypeDefinition) {
- processBitsType((BitsTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof UnionTypeDefinition) {
- processUnionType((UnionTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
- property.putOpt(TYPE_KEY, "object");
- } else if (leafTypeDef instanceof BinaryTypeDefinition) {
- processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
- } else {
- //System.out.println("In else: " + leafTypeDef.getClass());
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
- if(jsonType==null) {
- jsonType = "object";
- }
- property.putOpt(TYPE_KEY, jsonType);
+ //writeToFile(fileName, childSchema.toString(2), moduleName);
+ childSchema.put("id", fileName);
+ models.put(fileName, childSchema);
+
+
+ JSONObject listNodeProperties = new JSONObject();
+ listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
+
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY,fileName );
+ listNodeProperties.put(ITEMS_KEY, items);
+
+ return listNodeProperties;
+
}
- }
-
- /**
- *
- * @param leafTypeDef
- * @param property
- * @throws JSONException
- */
- private void processExtendedType(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
- Object leafBaseType = leafTypeDef.getBaseType();
- if(leafBaseType instanceof ExtendedType){
- //recursively process an extended type until we hit a base type
- processExtendedType((TypeDefinition<?>)leafBaseType, property);
- } else {
- List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
- for(LengthConstraint lengthConstraint: lengthConstraints) {
- Number min = lengthConstraint.getMin();
- Number max = lengthConstraint.getMax();
- property.putOpt(MIN_LENGTH_KEY, min);
- property.putOpt(MAX_LENGTH_KEY, max);
- }
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
- property.putOpt(TYPE_KEY,jsonType );
+
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processLeafNode(final LeafSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
+
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+
+ processConstraints(leafNode.getConstraints(), property);
+ processTypeDef(leafNode.getType(), property);
+
+ return property;
}
- }
-
- /*
- *
- */
- private void processBinaryType(BinaryTypeDefinition binaryType, JSONObject property) throws JSONException {
- property.put(TYPE_KEY, STRING);
- JSONObject media = new JSONObject();
- media.put(BINARY_ENCODING_KEY, BASE_64);
- property.put(MEDIA_KEY, media);
- }
-
- /**
- *
- * @param enumLeafType
- * @param property
- * @throws JSONException
- */
- private void processEnumType(EnumerationType enumLeafType, JSONObject property) throws JSONException {
- List<EnumPair> enumPairs = enumLeafType.getValues();
- List<String> enumNames = new ArrayList<String>();
- for(EnumPair enumPair: enumPairs) {
- enumNames.add(enumPair.getName());
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processAnyXMLNode(final AnyXmlSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
+
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+
+ processConstraints(leafNode.getConstraints(), property);
+
+ return property;
}
- property.putOpt(ENUM, new JSONArray(enumNames));
- }
-
- /**
- *
- * @param bitsType
- * @param property
- * @throws JSONException
- */
- private void processBitsType(BitsTypeDefinition bitsType, JSONObject property) throws JSONException{
- property.put(TYPE_KEY, ARRAY_TYPE);
- property.put(MIN_ITEMS, 0);
- property.put(UNIQUE_ITEMS_KEY, true);
- JSONArray enumValues = new JSONArray();
-
- List<Bit> bits = bitsType.getBits();
- for(Bit bit: bits) {
- enumValues.put(bit.getName());
+
+ /**
+ * @param property
+ * @throws JSONException
+ */
+ private void processTypeDef(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+
+ if(leafTypeDef instanceof ExtendedType){
+ processExtendedType(leafTypeDef, property);
+ } else if (leafTypeDef instanceof EnumerationType) {
+ processEnumType((EnumerationType)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof BitsTypeDefinition) {
+ processBitsType((BitsTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof UnionTypeDefinition) {
+ processUnionType((UnionTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
+ property.putOpt(TYPE_KEY, "object");
+ } else if (leafTypeDef instanceof BinaryTypeDefinition) {
+ processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
+ } else {
+ //System.out.println("In else: " + leafTypeDef.getClass());
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
+ if(jsonType==null) {
+ jsonType = "object";
+ }
+ property.putOpt(TYPE_KEY, jsonType);
+ }
}
- JSONObject itemsValue = new JSONObject();
- itemsValue.put(ENUM, enumValues);
- property.put(ITEMS_KEY, itemsValue);
- }
-
-
- /**
- *
- * @param unionType
- * @param property
- * @throws JSONException
- */
- private void processUnionType(UnionTypeDefinition unionType, JSONObject property) throws JSONException{
-
- List<TypeDefinition<?>> unionTypes = unionType.getTypes();
- JSONArray unionArray = new JSONArray();
- for(TypeDefinition<?> typeDef: unionTypes) {
- unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+
+ /**
+ *
+ * @param leafTypeDef
+ * @param property
+ * @throws JSONException
+ */
+ private void processExtendedType(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+ Object leafBaseType = leafTypeDef.getBaseType();
+ if(leafBaseType instanceof ExtendedType){
+ //recursively process an extended type until we hit a base type
+ processExtendedType((TypeDefinition<?>)leafBaseType, property);
+ } else {
+ List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
+ for(LengthConstraint lengthConstraint: lengthConstraints) {
+ Number min = lengthConstraint.getMin();
+ Number max = lengthConstraint.getMax();
+ property.putOpt(MIN_LENGTH_KEY, min);
+ property.putOpt(MAX_LENGTH_KEY, max);
+ }
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
+ property.putOpt(TYPE_KEY,jsonType );
+ }
+
+ }
+
+ /*
+ *
+ */
+ private void processBinaryType(final BinaryTypeDefinition binaryType, final JSONObject property) throws JSONException {
+ property.put(TYPE_KEY, STRING);
+ JSONObject media = new JSONObject();
+ media.put(BINARY_ENCODING_KEY, BASE_64);
+ property.put(MEDIA_KEY, media);
+ }
+
+ /**
+ *
+ * @param enumLeafType
+ * @param property
+ * @throws JSONException
+ */
+ private void processEnumType(final EnumerationType enumLeafType, final JSONObject property) throws JSONException {
+ List<EnumPair> enumPairs = enumLeafType.getValues();
+ List<String> enumNames = new ArrayList<String>();
+ for(EnumPair enumPair: enumPairs) {
+ enumNames.add(enumPair.getName());
+ }
+ property.putOpt(ENUM, new JSONArray(enumNames));
+ }
+
+ /**
+ *
+ * @param bitsType
+ * @param property
+ * @throws JSONException
+ */
+ private void processBitsType(final BitsTypeDefinition bitsType, final JSONObject property) throws JSONException{
+ property.put(TYPE_KEY, ARRAY_TYPE);
+ property.put(MIN_ITEMS, 0);
+ property.put(UNIQUE_ITEMS_KEY, true);
+ JSONArray enumValues = new JSONArray();
+
+ List<Bit> bits = bitsType.getBits();
+ for(Bit bit: bits) {
+ enumValues.put(bit.getName());
+ }
+ JSONObject itemsValue = new JSONObject();
+ itemsValue.put(ENUM, enumValues);
+ property.put(ITEMS_KEY, itemsValue);
+ }
+
+
+ /**
+ *
+ * @param unionType
+ * @param property
+ * @throws JSONException
+ */
+ private void processUnionType(final UnionTypeDefinition unionType, final JSONObject property) throws JSONException{
+
+ List<TypeDefinition<?>> unionTypes = unionType.getTypes();
+ JSONArray unionArray = new JSONArray();
+ for(TypeDefinition<?> typeDef: unionTypes) {
+ unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+ }
+ property.put(TYPE_KEY, unionArray);
+ }
+
+
+ /**
+ * Helper method to generate a pre-filled
+ * JSON schema object.
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject getSchemaTemplate() throws JSONException {
+ JSONObject schemaJSON = new JSONObject();
+ schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
+
+ return schemaJSON;
}
- property.put(TYPE_KEY, unionArray);
- }
-
-
- /**
- * Helper method to generate a pre-filled
- * JSON schema object.
- * @return
- * @throws JSONException
- */
- private JSONObject getSchemaTemplate() throws JSONException {
- JSONObject schemaJSON = new JSONObject();
- schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
-
- return schemaJSON;
- }
}