--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../opendaylight</relativePath>
+ </parent>
+ <artifactId>filter-valve</artifactId>
+ <packaging>bundle</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>equinoxSDK381</groupId>
+ <artifactId>javax.servlet</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>orbit</groupId>
+ <artifactId>org.apache.catalina</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Fragment-Host>org.eclipse.gemini.web.tomcat</Fragment-Host>
+ <Import-Package>javax.servlet,
+ org.apache.catalina,
+ org.apache.catalina.connector,
+ org.apache.catalina.valves,
+ org.slf4j,
+ javax.xml.bind,
+ javax.xml.bind.annotation,
+ org.apache.commons.io,
+ com.google.common.base,
+ com.google.common.collect</Import-Package>
+ </instructions>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Objects;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.apache.catalina.valves.ValveBase;
+import org.apache.commons.io.FileUtils;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Host;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Parser;
+import org.opendaylight.controller.filtervalve.cors.model.FilterProcessor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Valve that allows adding filters per context. Each context can have its own filter definitions.
+ * Main purpose is to allow externalizing security filters from application bundles to a single
+ * file per OSGi distribution.
+ */
+public class FilterValve extends ValveBase {
+ private static final Logger logger = LoggerFactory.getLogger(FilterValve.class);
+ private FilterProcessor filterProcessor;
+
+ public void invoke(final Request request, final Response response) throws IOException, ServletException {
+ if (filterProcessor == null) {
+ throw new IllegalStateException("Initialization error");
+ }
+
+ FilterChain nextValveFilterChain = new FilterChain() {
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse resp) throws IOException, ServletException {
+ boolean reqEquals = Objects.equals(request, req);
+ boolean respEquals = Objects.equals(response, resp);
+ if (reqEquals == false || respEquals == false) {
+ logger.error("Illegal change was detected by valve - request {} or " +
+ "response {} was replaced by a filter. This is not supported by this valve",
+ reqEquals, respEquals);
+ throw new IllegalStateException("Request or response was replaced in a filter");
+ }
+ getNext().invoke(request, response);
+ }
+ };
+ filterProcessor.process(request, response, nextValveFilterChain);
+ }
+
+ /**
+ * Called by Tomcat when configurationFile attribute is set.
+ * @param fileName path to xml file containing valve configuration
+ * @throws Exception
+ */
+ @SuppressWarnings("UnusedDeclaration")
+ public void setConfigurationFile(String fileName) throws Exception {
+ File configurationFile = new File(fileName);
+ if (configurationFile.exists() == false || configurationFile.canRead() == false) {
+ throw new IllegalArgumentException(
+ "Cannot read 'configurationFile' of this valve defined in tomcat-server.xml: " + fileName);
+ }
+ String xmlContent;
+ try {
+ xmlContent = FileUtils.readFileToString(configurationFile);
+ } catch (IOException e) {
+ logger.error("Cannot read {} of this valve defined in tomcat-server.xml", fileName, e);
+ throw new IllegalStateException("Cannot read " + fileName, e);
+ }
+ Host host;
+ try {
+ host = Parser.parse(xmlContent, fileName);
+ } catch (Exception e) {
+ logger.error("Cannot parse {} of this valve defined in tomcat-server.xml", fileName, e);
+ throw new IllegalStateException("Error while parsing " + fileName, e);
+ }
+ filterProcessor = new FilterProcessor(host);
+ }
+
+ /**
+ * @see org.apache.catalina.valves.ValveBase#getInfo()
+ */
+ public String getInfo() {
+ return getClass() + "/1.0";
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+import static java.lang.String.format;
+
+import com.google.common.base.Optional;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import org.opendaylight.controller.filtervalve.cors.model.UrlMatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@XmlRootElement
+public class Context {
+ private static final Logger logger = LoggerFactory.getLogger(Context.class);
+
+ private String path;
+ private List<Filter> filters = new ArrayList<>();
+ private List<FilterMapping> filterMappings = new ArrayList<>();
+ private boolean initialized;
+ private UrlMatcher<Filter> urlMatcher;
+
+
+ public synchronized void initialize(String fileName, Map<String, Filter> namesToTemplates) {
+ checkState(initialized == false, "Already initialized");
+ Map<String, Filter> namesToFilters = new HashMap<>();
+ for (Filter filter : filters) {
+ try {
+ filter.initialize(fileName, Optional.fromNullable(namesToTemplates.get(filter.getFilterName())));
+ } catch (Exception e) {
+ throw new IllegalStateException(format("Error while processing filter %s of context %s, defined in %s",
+ filter.getFilterName(), path, fileName), e);
+ }
+ namesToFilters.put(filter.getFilterName(), filter);
+ }
+ filters = Collections.unmodifiableList(new ArrayList<>(filters));
+ LinkedHashMap<String, Filter> patternMap = new LinkedHashMap<>();
+ for (FilterMapping filterMapping : filterMappings) {
+ filterMapping.initialize();
+ Filter found = namesToFilters.get(filterMapping.getFilterName());
+ if (found != null) {
+ patternMap.put(filterMapping.getUrlPattern(), found);
+ } else {
+ logger.error("Cannot find matching filter for filter-mapping {} of context {}, defined in {}",
+ filterMapping.getFilterName(), path, fileName);
+ throw new IllegalStateException(format(
+ "Cannot find filter for filter-mapping %s of context %s, defined in %s",
+ filterMapping.getFilterName(), path, fileName));
+ }
+ }
+ filterMappings = Collections.unmodifiableList(new ArrayList<>(filterMappings));
+ urlMatcher = new UrlMatcher<>(patternMap);
+ initialized = true;
+ }
+
+ public List<Filter> findMatchingFilters(String pathInfo) {
+ checkState(initialized, "Not initialized");
+ return urlMatcher.findMatchingFilters(pathInfo);
+ }
+
+ @XmlAttribute(name = "path")
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ checkArgument(initialized == false, "Already initialized");
+ this.path = path;
+ }
+
+ @XmlElement(name = "filter")
+ public List<Filter> getFilters() {
+ return filters;
+ }
+
+ public void setFilters(List<Filter> filters) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filters = filters;
+ }
+
+ @XmlElement(name = "filter-mapping")
+ public List<FilterMapping> getFilterMappings() {
+ return filterMappings;
+ }
+
+ public void setFilterMappings(List<FilterMapping> filterMappings) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterMappings = filterMappings;
+ }
+
+ @Override
+ public String toString() {
+ return "Context{" +
+ "path='" + path + '\'' +
+ '}';
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@XmlRootElement
+public class Filter implements FilterConfig {
+ private static final Logger logger = LoggerFactory.getLogger(Filter.class);
+
+ private String filterName;
+ private String filterClass;
+ private List<InitParam> initParams = new ArrayList<>();
+ private javax.servlet.Filter actualFilter;
+ private boolean initialized, isTemplate;
+
+
+ /**
+ * Called in filter-template nodes defined in <Host/> node - do not actually initialize the filter.
+ * In this case filter is only used to hold values of init params to be merged with
+ * filter defined in <Context/>
+ */
+ public synchronized void initializeTemplate(){
+ checkState(initialized == false, "Already initialized");
+ for (InitParam initParam : initParams) {
+ initParam.inititialize();
+ }
+ isTemplate = true;
+ initialized = true;
+ }
+
+
+ public synchronized void initialize(String fileName, Optional<Filter> maybeTemplate) {
+ checkState(initialized == false, "Already initialized");
+ logger.trace("Initializing filter {} : {}", filterName, filterClass);
+ for (InitParam initParam : initParams) {
+ initParam.inititialize();
+ }
+ if (maybeTemplate.isPresent()) {
+ // merge non conflicting init params
+ Filter template = maybeTemplate.get();
+ checkArgument(template.isTemplate);
+ Map<String, InitParam> templateParams = template.getInitParamsMap();
+ Map<String, InitParam> currentParams = getInitParamsMap();
+ // add values of template that are not present in current
+ MapDifference<String, InitParam> difference = Maps.difference(templateParams, currentParams);
+ for (Entry<String, InitParam> templateUnique : difference.entriesOnlyOnLeft().entrySet()) {
+ initParams.add(templateUnique.getValue());
+ }
+ // merge filterClass
+ if (filterClass == null) {
+ filterClass = template.filterClass;
+ } else if (Objects.equals(filterClass, template.filterClass) == false) {
+ logger.error("Conflict detected in filter-class of {} defined in {}, template class {}, child class {}" ,
+ filterName, fileName, template.filterClass, filterClass);
+ throw new IllegalStateException("Conflict detected in template/filter filter-class definitions," +
+ " filter name: " + filterName + " in file " + fileName);
+ }
+ }
+ initParams = Collections.unmodifiableList(new ArrayList<>(initParams));
+ Class<?> clazz;
+ try {
+ clazz = Class.forName(filterClass);
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot instantiate class defined in filter " + filterName
+ + " in file " + fileName, e);
+ }
+ try {
+ actualFilter = (javax.servlet.Filter) clazz.newInstance();
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot instantiate class defined in filter " + filterName
+ + " in file " + fileName, e);
+ }
+ logger.trace("Initializing {} with following init-params:{}", filterName, getInitParams());
+ try {
+ actualFilter.init(this);
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot initialize filter " + filterName
+ + " in file " + fileName, e);
+ }
+ initialized = true;
+ }
+
+ @Override
+ public ServletContext getServletContext() {
+ throw new UnsupportedOperationException("Getting ServletContext is currently not supported");
+ }
+
+ @Override
+ public String getInitParameter(String name) {
+ for (InitParam initParam : initParams) {
+ if (Objects.equals(name, initParam.getParamName())) {
+ return initParam.getParamValue();
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ final Iterator<InitParam> iterator = initParams.iterator();
+ return new Enumeration<String>() {
+ @Override
+ public boolean hasMoreElements() {
+ return iterator.hasNext();
+ }
+
+ @Override
+ public String nextElement() {
+ return iterator.next().getParamName();
+ }
+ };
+ }
+
+ public javax.servlet.Filter getActualFilter() {
+ checkState(initialized, "Not initialized");
+ return actualFilter;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+
+ @XmlElement(name = "filter-name")
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public void setFilterName(String filterName) {
+ this.filterName = filterName;
+ }
+
+ @XmlElement(name = "filter-class")
+ public String getFilterClass() {
+ return filterClass;
+ }
+
+ public void setFilterClass(String filterClass) {
+ this.filterClass = filterClass;
+ }
+
+ @XmlElement(name = "init-param")
+ public List<InitParam> getInitParams() {
+ return initParams;
+ }
+
+ public void setInitParams(List<InitParam> initParams) {
+ this.initParams = initParams;
+ }
+
+
+ @Override
+ public String toString() {
+ return "Filter{" +
+ "filterName='" + filterName + '\'' +
+ '}';
+ }
+
+ public Map<String, InitParam> getInitParamsMap() {
+ Map<String, InitParam> result = new HashMap<>();
+ for (InitParam initParam : initParams) {
+ checkState(initParam.isInitialized());
+ result.put(initParam.getParamName(), initParam);
+ }
+ return result;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement
+public class FilterMapping {
+ private String filterName;
+ private String urlPattern;
+ private boolean initialized;
+
+ @XmlElement(name = "filter-name")
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public void setFilterName(String filterName) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterName = filterName;
+ }
+
+ @XmlElement(name = "url-pattern")
+ public String getUrlPattern() {
+ return urlPattern;
+ }
+
+ public void setUrlPattern(String urlPattern) {
+ checkArgument(initialized == false, "Already initialized");
+ this.urlPattern = urlPattern;
+ }
+
+ public synchronized void initialize() {
+ checkArgument(initialized == false, "Already initialized");
+ initialized = true;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.base.Optional;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+
+/**
+ * Root element, arbitrarily named Host to match tomcat-server.xml, but does not allow specifying which host
+ * name to be matched.
+ */
+@XmlRootElement(name = "Host")
+public class Host {
+ private List<Context> contexts = new ArrayList<>();
+ private List<Filter> filterTemplates = new ArrayList<>();
+ private boolean initialized;
+ private Map<String, Context> contextMap;
+
+
+ public synchronized void initialize(String fileName) {
+ checkState(initialized == false, "Already initialized");
+ Map<String, Filter> namesToTemplates = new HashMap<>();
+ for (Filter template : filterTemplates) {
+ template.initializeTemplate();
+ namesToTemplates.put(template.getFilterName(), template);
+ }
+ contextMap = new HashMap<>();
+ for (Context context : getContexts()) {
+ checkState(contextMap.containsKey(context.getPath()) == false,
+ "Context {} already defined in {}", context.getPath(), fileName);
+ context.initialize(fileName, namesToTemplates);
+ contextMap.put(context.getPath(), context);
+ }
+ contextMap = Collections.unmodifiableMap(new HashMap<>(contextMap));
+ contexts = Collections.unmodifiableList(new ArrayList<>(contexts));
+ initialized = true;
+ }
+
+ public Optional<Context> findContext(String contextPath) {
+ checkState(initialized, "Not initialized");
+ Context context = contextMap.get(contextPath);
+ return Optional.fromNullable(context);
+ }
+
+ @XmlElement(name = "Context")
+ public List<Context> getContexts() {
+ return contexts;
+ }
+
+ public void setContexts(List<Context> contexts) {
+ checkArgument(initialized == false, "Already initialized");
+ this.contexts = contexts;
+ }
+
+ @XmlElement(name = "filter-template")
+ public List<Filter> getFilterTemplates() {
+ return filterTemplates;
+ }
+
+ public void setFilterTemplates(List<Filter> filterTemplates) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterTemplates = filterTemplates;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement
+public class InitParam {
+ private String paramName;
+ private String paramValue;
+ private boolean initialized;
+
+ public synchronized void inititialize() {
+ checkState(initialized == false, "Already initialized");
+ initialized = true;
+ }
+
+ @XmlElement(name = "param-name")
+ public String getParamName() {
+ return paramName;
+ }
+
+ public void setParamName(String paramName) {
+ this.paramName = paramName;
+ }
+
+ @XmlElement(name = "param-value")
+ public String getParamValue() {
+ return paramValue;
+ }
+
+ public void setParamValue(String paramValue) {
+ this.paramValue = paramValue;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+ @Override
+ public String toString() {
+ return "{" + paramName + '=' + paramValue + "}";
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.StringReader;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+
+public class Parser {
+
+ public static Host parse(String xmlFileContent, String fileName) throws JAXBException {
+ JAXBContext context = JAXBContext.newInstance(Host.class);
+ javax.xml.bind.Unmarshaller um = context.createUnmarshaller();
+ Host host = (Host) um.unmarshal(new StringReader(xmlFileContent));
+ host.initialize(fileName);
+ return host;
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import com.google.common.base.Optional;
+import java.io.IOException;
+import java.util.List;
+import java.util.ListIterator;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Context;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Filter;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Host;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class FilterProcessor {
+ private static final Logger logger = LoggerFactory.getLogger(FilterProcessor.class);
+
+ private final Host host;
+
+ public FilterProcessor(Host host) {
+ this.host = host;
+ }
+
+ public void process(Request request, Response response, FilterChain nextValveFilterChain)
+ throws IOException, ServletException {
+
+ String contextPath = request.getContext().getPath();
+ String pathInfo = request.getPathInfo();
+
+ Optional<Context> maybeContext = host.findContext(contextPath);
+ logger.trace("Processing context {} path {}, found {}", contextPath, pathInfo, maybeContext);
+ if (maybeContext.isPresent()) {
+ // process filters
+ Context context = maybeContext.get();
+ List<Filter> matchingFilters = context.findMatchingFilters(pathInfo);
+ FilterChain fromLast = nextValveFilterChain;
+ ListIterator<Filter> it = matchingFilters.listIterator(matchingFilters.size());
+ final boolean trace = logger.isTraceEnabled();
+ while (it.hasPrevious()) {
+ final Filter currentFilter = it.previous();
+ final FilterChain copy = fromLast;
+ fromLast = new FilterChain() {
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException {
+ if (trace) {
+ logger.trace("Applying {}", currentFilter);
+ }
+ javax.servlet.Filter actualFilter = currentFilter.getActualFilter();
+ actualFilter.doFilter(request, response, copy);
+ }
+ };
+ }
+ // call first filter
+ fromLast.doFilter(request, response);
+ } else {
+ // move to next valve
+ nextValveFilterChain.doFilter(request, response);
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.collect.Maps.immutableEntry;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Match incoming URL with user defined patterns according to servlet specification.
+ * In the Web application deployment descriptor, the following syntax is used to define mappings:
+ * <ul>
+ * <li>A string beginning with a ‘/’ character and ending with a ‘/*’ suffix is used for path mapping.</li>
+ * <li>A string beginning with a ‘*.’ prefix is used as an extension mapping.</li>
+ * <li>All other strings are used for exact matches only.</li>
+ * </ul>
+ */
+public class UrlMatcher<FILTER> {
+ private static final Logger logger = LoggerFactory.getLogger(UrlMatcher.class);
+ // order index for each FILTER is kept as Entry.value
+ private final Map<String, Entry<FILTER, Integer>> prefixMap = new HashMap<>(); // contains patterns ending with '/*', '*' is stripped from each key
+ private final Map<String, Entry<FILTER, Integer>> suffixMap = new HashMap<>(); // contains patterns starting with '*.' prefix, '*' is stripped from each key
+ private final Map<String, Entry<FILTER, Integer>> exactMatchMap = new HashMap<>(); // contains exact matches only
+
+ /**
+ * @param patternMap order preserving map containing path info pattern as key
+ */
+ public UrlMatcher(LinkedHashMap<String, FILTER> patternMap) {
+ int idx = 0;
+ for (Entry<String, FILTER> entry : patternMap.entrySet()) {
+ idx++;
+ String pattern = checkNotNull(entry.getKey());
+ FILTER value = entry.getValue();
+ Entry<FILTER, Integer> valueWithIdx = immutableEntry(value, idx);
+ if (pattern.startsWith("/") && pattern.endsWith("/*")) {
+ pattern = pattern.substring(0, pattern.length() - 1);
+ prefixMap.put(pattern, valueWithIdx);
+ } else if (pattern.startsWith("*.")) {
+ pattern = pattern.substring(1);
+ suffixMap.put(pattern, valueWithIdx);
+ } else {
+ exactMatchMap.put(pattern, valueWithIdx);
+ }
+ }
+ }
+
+ /**
+ * Find filters matching path
+ *
+ * @param pathInfo as returned by request.getPathInfo()
+ * @return list of matching filters
+ */
+ public List<FILTER> findMatchingFilters(String pathInfo) {
+ checkNotNull(pathInfo);
+ TreeMap<Integer, FILTER> sortedMap = new TreeMap<>();
+ // add matching prefixes
+ for (Entry<String, Entry<FILTER, Integer>> prefixEntry : prefixMap.entrySet()) {
+ if (pathInfo.startsWith(prefixEntry.getKey())) {
+ put(sortedMap, prefixEntry.getValue());
+ }
+ }
+ // add matching suffixes
+ for (Entry<String, Entry<FILTER, Integer>> suffixEntry : suffixMap.entrySet()) {
+ if (pathInfo.endsWith(suffixEntry.getKey())) {
+ put(sortedMap, suffixEntry.getValue());
+ }
+ }
+ // add exact match
+ Entry<FILTER, Integer> exactMatch = exactMatchMap.get(pathInfo);
+ if (exactMatch != null) {
+ put(sortedMap, exactMatch);
+ }
+ ArrayList<FILTER> filters = new ArrayList<>(sortedMap.values());
+ logger.trace("Matching filters for path {} are {}", pathInfo, filters);
+ return filters;
+ }
+
+ private void put(TreeMap<Integer, FILTER> sortedMap, Entry<FILTER, Integer> entry) {
+ sortedMap.put(entry.getValue(), entry.getKey());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.IOException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+public class DummyFilter implements javax.servlet.Filter {
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void destroy() {
+ throw new UnsupportedOperationException();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.IOException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+public class MockedFilter implements javax.servlet.Filter {
+ private FilterConfig filterConfig;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ this.filterConfig = filterConfig;
+ }
+
+ public FilterConfig getFilterConfig() {
+ return filterConfig;
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void destroy() {
+ throw new UnsupportedOperationException();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.matchers.JUnitMatchers.containsString;
+
+import com.google.common.base.Optional;
+import java.io.File;
+import javax.servlet.FilterConfig;
+import org.apache.commons.io.FileUtils;
+import org.junit.Test;
+
+public class ParserTest {
+
+ @Test
+ public void testParsing() throws Exception {
+ File xmlFile = new File(getClass().getResource("/sample-cors-config.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ Host host = Parser.parse(xmlFileContent, "fileName");
+ assertEquals(1, host.getContexts().size());
+ // check that MockedFilter has init params merged/replaced
+ Optional<Context> context = host.findContext("/restconf");
+ assertTrue(context.isPresent());
+ assertEquals(1, context.get().getFilters().size());
+ MockedFilter filter = (MockedFilter) context.get().getFilters().get(0).getActualFilter();
+ FilterConfig filterConfig = filter.getFilterConfig();
+ assertEquals("*", filterConfig.getInitParameter("cors.allowed.origins"));
+ assertEquals("11", filterConfig.getInitParameter("cors.preflight.maxage"));
+ }
+
+
+ @Test
+ public void testParsing_NoFilterDefined() throws Exception {
+ File xmlFile = new File(getClass().getResource("/no-filter-defined.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ try {
+ Parser.parse(xmlFileContent, "fileName");
+ fail();
+ }catch(Exception e){
+ assertThat(e.getMessage(), containsString("Cannot find filter for filter-mapping CorsFilter"));
+ }
+ }
+
+ @Test
+ public void testConflictingClass() throws Exception {
+ File xmlFile = new File(getClass().getResource("/conflicting-class.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ try {
+ Parser.parse(xmlFileContent, "fileName");
+ fail();
+ } catch (RuntimeException e) {
+ assertThat(e.getMessage(), containsString("Error while processing filter CorsFilter of context /restconf"));
+ assertThat(e.getCause().getMessage(), containsString("Conflict detected in template/filter filter-class definitions, filter name: CorsFilter"));
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import static java.util.Arrays.asList;
+import static org.junit.Assert.assertEquals;
+
+import java.util.LinkedHashMap;
+import org.junit.Test;
+
+public class UrlMatcherTest {
+ UrlMatcher<String> urlMatcher;
+
+ @Test
+ public void test() throws Exception {
+ final String defaultFilter = "default";
+ final String exactMatchFilter = "someFilter";
+ final String jspFilter = "jspFilter";
+ final String exactMatch = "/somePath";
+ final String prefixFilter = "prefixFilter";
+ LinkedHashMap<String, String> patternMap = new LinkedHashMap<String, String>() {
+ {
+ put(exactMatch, exactMatchFilter);
+ put("/*", defaultFilter);
+ put("*.jsp", jspFilter);
+ put("/foo/*", prefixFilter);
+ }
+ };
+ urlMatcher = new UrlMatcher<>(patternMap);
+ assertMatches("/abc", defaultFilter);
+ assertMatches(exactMatch, exactMatchFilter, defaultFilter);
+ assertMatches("/some.jsp", defaultFilter, jspFilter);
+ assertMatches("/foo/bar", defaultFilter, prefixFilter);
+ assertMatches("/foo/bar.jsp", defaultFilter, jspFilter, prefixFilter);
+ }
+
+ public void assertMatches(String testedPath, String... filters) {
+ assertEquals(asList(filters), urlMatcher.findMatchingFilters(testedPath));
+ }
+
+}
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- conflict -->
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.DummyFilter</filter-class>
+ </filter>
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <!-- Filters are allowed here, only serving as a template -->
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.methods</param-name>
+ <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.headers</param-name>
+ <param-value>Content-Type,X-Requested-With,accept,authorization,
+ origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers
+ </param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.exposed.headers</param-name>
+ <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.support.credentials</param-name>
+ <param-value>true</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <!-- Filters are also allowed here. -->
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- init params can be added/overriden if template is used -->
+ </filter>
+ <!-- only local references are allowed -->
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+
+ <Context path="/controller/nb/v2/connectionmanager">
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- override value -->
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>11</param-value>
+ </init-param>
+ </filter>
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
<felix.dependencymanager.version>3.1.0</felix.dependencymanager.version>
<felix.fileinstall.version>3.1.6</felix.fileinstall.version>
<felix.webconsole.version>4.2.0</felix.webconsole.version>
+ <filtervalve.version>1.4.2-SNAPSHOT</filtervalve.version>
<flowprogrammer.northbound.version>0.4.2-SNAPSHOT</flowprogrammer.northbound.version>
<flows.web.version>0.4.2-SNAPSHOT</flows.web.version>
<forwarding.staticrouting>0.5.2-SNAPSHOT</forwarding.staticrouting>
<artifactId>devices.web</artifactId>
<version>${devices.web.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>filter-valve</artifactId>
+ <version>${filtervalve.version}</version>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>flowprogrammer.northbound</artifactId>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-persister-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>filter-valve</artifactId>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>logback-config</artifactId>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <!-- Filters are allowed here, only serving as a template -->
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.apache.catalina.filters.CorsFilter</filter-class>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.methods</param-name>
+ <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.headers</param-name>
+ <param-value>Content-Type,X-Requested-With,accept,authorization,
+ origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers
+ </param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.exposed.headers</param-name>
+ <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.support.credentials</param-name>
+ <param-value>true</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- init params can be added/overriden if template is used -->
+ </filter>
+ <!-- references to templates without <filter> declaration are not allowed -->
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+
+</Host>
rotatable="true" fileDateFormat="yyyy-MM"
pattern="%{yyyy-MM-dd HH:mm:ss.SSS z}t - [%a] - %r"/>
+ <Valve className="org.opendaylight.controller.filtervalve.cors.FilterValve"
+ configurationFile="configuration/cors-config.xml"
+ />
</Host>
</Engine>
</Service>
//assertEquals("Wrong value for action SetDlSrc for MAC address.", "24:77:03:7C:C5:F1", new String(
// ((SetDlSrc) action).getDlAddress()));
} else if (action instanceof SetDlType) {
- assertEquals("Wrong value for action SetDlType for.", 513l, ((SetDlType) action).getDlType());
+ assertEquals("Wrong value for action SetDlType for.", 513L, ((SetDlType) action).getDlType());
} else if (action instanceof SetNextHop) {
InetAddress inetAddress = ((SetNextHop) action).getAddress();
checkIpAddresses(inetAddress, "192.168.100.100", "2001:db8:85a3::8a2e:370:7334");
private void prepareActionSetDlType(SetDlTypeActionCaseBuilder wrapper) {
SetDlTypeActionBuilder setDlTypeActionBuilder = new SetDlTypeActionBuilder();
- setDlTypeActionBuilder.setDlType(new EtherType(513l));
+ setDlTypeActionBuilder.setDlType(new EtherType(513L));
wrapper.setSetDlTypeAction(setDlTypeActionBuilder.build());
}
private EthernetType prepEthType() {
EthernetTypeBuilder ethTypeBuild = new EthernetTypeBuilder();
- ethTypeBuild.setType(new EtherType(0xffffl));
+ ethTypeBuild.setType(new EtherType(0xffffL));
return ethTypeBuild.build();
}
package org.opendaylight.controller.sal.connector.remoterpc.impl;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableSet;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+import javax.transaction.HeuristicMixedException;
+import javax.transaction.HeuristicRollbackException;
+import javax.transaction.NotSupportedException;
+import javax.transaction.RollbackException;
+
import org.apache.felix.dm.Component;
-import org.opendaylight.controller.clustering.services.*;
+import org.opendaylight.controller.clustering.services.CacheConfigException;
+import org.opendaylight.controller.clustering.services.CacheExistException;
+import org.opendaylight.controller.clustering.services.CacheListenerAddException;
+import org.opendaylight.controller.clustering.services.ICacheUpdateAware;
+import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
+import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.transaction.HeuristicMixedException;
-import javax.transaction.HeuristicRollbackException;
-import javax.transaction.NotSupportedException;
-import javax.transaction.RollbackException;
-import java.util.*;
-import java.util.concurrent.ConcurrentMap;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableSet;
public class RoutingTableImpl<I, R> implements RoutingTable<I, R>, ICacheUpdateAware<I, R> {
- private Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
+ private final Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
- private IClusterGlobalServices clusterGlobalServices = null;
+ private IClusterGlobalServices clusterGlobalServices = null;
- private ConcurrentMap<I,R> globalRpcCache = null;
- private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
+ private ConcurrentMap<I,R> globalRpcCache = null;
+ private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
- public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
- public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
+ public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
+ public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
- public RoutingTableImpl() {
- }
-
- @Override
- public R getGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
- return globalRpcCache.get(routeId);
- }
-
- @Override
- public void addGlobalRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
- try {
-
- log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
- clusterGlobalServices.tbegin();
- if (globalRpcCache.putIfAbsent(routeId, route) != null) {
- throw new DuplicateRouteException(" There is already existing route " + routeId);
- }
- clusterGlobalServices.tcommit();
-
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to create route id="
- + routeId + "with route" + route, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to create with value", e);
+ public RoutingTableImpl() {
}
- }
+ @Override
+ public R getGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
+ return globalRpcCache.get(routeId);
+ }
- @Override
- public void removeGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
- try {
- log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+ @Override
+ public void addGlobalRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
+ try {
+
+ log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
+ clusterGlobalServices.tbegin();
+ if (globalRpcCache.putIfAbsent(routeId, route) != null) {
+ throw new DuplicateRouteException(" There is already existing route " + routeId);
+ }
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to create route id="
+ + routeId + "with route" + route, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to create with value", e);
+ }
- clusterGlobalServices.tbegin();
- globalRpcCache.remove(routeId);
- clusterGlobalServices.tcommit();
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
+ try {
+ log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+
+ clusterGlobalServices.tbegin();
+ globalRpcCache.remove(routeId);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public Set<R> getRoutes(I routeId) {
- Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
- Set<R> routes = rpcCache.get(routeId);
+ @Override
+ public Set<R> getRoutes(final I routeId) {
+ Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
+ Set<R> routes = rpcCache.get(routeId);
- if (routes == null) return Collections.emptySet();
+ if (routes == null) {
+ return Collections.emptySet();
+ }
- return ImmutableSet.copyOf(routes);
- }
+ return ImmutableSet.copyOf(routes);
+ }
- public R getLastAddedRoute(I routeId) {
+ @Override
+ public R getLastAddedRoute(final I routeId) {
- Set<R> routes = getRoutes(routeId);
+ Set<R> routes = getRoutes(routeId);
- if (routes.isEmpty()) return null;
+ if (routes.isEmpty()) {
+ return null;
+ }
- R route = null;
- Iterator<R> iter = routes.iterator();
- while (iter.hasNext())
- route = iter.next();
+ R route = null;
+ Iterator<R> iter = routes.iterator();
+ while (iter.hasNext()) {
+ route = iter.next();
+ }
- return route;
- }
+ return route;
+ }
- @Override
- public void addRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
- Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+ @Override
+ public void addRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
+ Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+
+ try{
+ clusterGlobalServices.tbegin();
+ log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
+ threadSafeAdd(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
+ }
- try{
- clusterGlobalServices.tbegin();
- log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
- threadSafeAdd(routeId, route);
- clusterGlobalServices.tcommit();
+ @Override
+ public void addRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ addRoute(routeId, route);
+ }
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+
+ LinkedHashSet<R> routes = rpcCache.get(routeId);
+ if (routes == null) {
+ return;
+ }
+
+ try {
+ log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
+
+ clusterGlobalServices.tbegin();
+ threadSafeRemove(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public void addRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- addRoute(routeId, route);
+ @Override
+ public void removeRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ removeRoute(routeId, route);
+ }
}
- }
- @Override
- public void removeRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 100 times then throw
+ */
+ private void threadSafeAdd(final I routeId, final R route) {
+
+ for (int i=0;i<100;i++){
+
+ LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
+ updatedRoutes.add(route);
+ LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
+ if (oldRoutes == null) {
+ return;
+ }
+
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.add(route);
+
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+ }
+ //the method did not already return means it failed to add route in 10 attempts
+ throw new IllegalStateException("Failed to add route [" + routeId + "]");
+ }
- LinkedHashSet<R> routes = rpcCache.get(routeId);
- if (routes == null) return;
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 10 times then throw
+ */
+ private void threadSafeRemove(final I routeId, final R route) {
+ LinkedHashSet<R> updatedRoutes = null;
+ for (int i=0;i<10;i++){
+ LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
+
+ // if route to be deleted is the only entry in the set then remove routeId from the cache
+ if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
+ rpcCache.remove(routeId);
+ return;
+ }
+
+ // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.remove(route);
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+
+ }
+ //the method did not already return means it failed to remove route in 10 attempts
+ throw new IllegalStateException("Failed to remove route [" + routeId + "]");
+ }
- try {
- log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
- clusterGlobalServices.tbegin();
- threadSafeRemove(routeId, route);
- clusterGlobalServices.tcommit();
+ // /**
+ // * @deprecated doesn't do anything will be removed once listeners used
+ // * whiteboard pattern Registers listener for sending any change
+ // * notification
+ // * @param listener
+ // */
+ // @Override
+ // public void registerRouteChangeListener(RouteChangeListener listener) {
+ //
+ // }
+
+ // public void setRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.add(rcl);
+ // }else{
+ // log.warn("setRouteChangeListener called with null listener");
+ // }
+ // }
+ //
+ // public void unSetRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.remove(rcl);
+ // }else{
+ // log.warn("unSetRouteChangeListener called with null listener");
+ // }
+ // }
+
+ /**
+ * Returning the set of route change listeners for Unit testing Note: the
+ * package scope is default
+ *
+ * @return List of registered RouteChangeListener<I,R> listeners
+ */
+ // Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
+ // return routeChangeListeners;
+ // }
+ public void setClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ this.clusterGlobalServices = clusterGlobalServices;
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ public void unsetClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
+ this.clusterGlobalServices = null;
+ }
}
- }
- @Override
- public void removeRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- removeRoute(routeId, route);
+ /**
+ * Finds OR Creates clustered cache for Global RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ // let us check here if the cache already exists -- if so don't create
+ if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
+
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
+
+ } else {
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
+ log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
+ }
}
- }
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 100 times then throw
- */
- private void threadSafeAdd(I routeId, R route) {
+ /**
+ * Finds OR Creates clustered cache for Routed RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ if (clusterGlobalServices.existCache(RPC_CACHE)){
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
+ log.debug("Cache exists [{}] ", RPC_CACHE);
+ return;
+ }
+
+ //cache doesnt exist, create one
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", RPC_CACHE);
+ }
- for (int i=0;i<100;i++){
- LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
- updatedRoutes.add(route);
- LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
- if (oldRoutes == null) return;
+ /**
+ * Function called by the dependency manager when all the required
+ * dependencies are satisfied
+ */
+ void init(final Component c) {
+ try {
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.add(route);
+ findOrCreateGlobalRpcCache();
+ findOrCreateRpcCache();
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
+ throw new IllegalStateException("could not construct routing table cache");
+ }
}
- //the method did not already return means it failed to add route in 10 attempts
- throw new IllegalStateException("Failed to add route [" + routeId + "]");
- }
-
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 10 times then throw
- */
- private void threadSafeRemove(I routeId, R route) {
- LinkedHashSet<R> updatedRoutes = null;
- for (int i=0;i<10;i++){
- LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
-
- // if route to be deleted is the only entry in the set then remove routeId from the cache
- if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
- rpcCache.remove(routeId);
- return;
- }
-
- // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.remove(route);
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, R> getGlobalRpcCache() {
+ return this.globalRpcCache;
}
- //the method did not already return means it failed to remove route in 10 attempts
- throw new IllegalStateException("Failed to remove route [" + routeId + "]");
- }
-
-
-// /**
-// * @deprecated doesn't do anything will be removed once listeners used
-// * whiteboard pattern Registers listener for sending any change
-// * notification
-// * @param listener
-// */
-// @Override
-// public void registerRouteChangeListener(RouteChangeListener listener) {
-//
-// }
-
-// public void setRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.add(rcl);
-// }else{
-// log.warn("setRouteChangeListener called with null listener");
-// }
-// }
-//
-// public void unSetRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.remove(rcl);
-// }else{
-// log.warn("unSetRouteChangeListener called with null listener");
-// }
-// }
-
- /**
- * Returning the set of route change listeners for Unit testing Note: the
- * package scope is default
- *
- * @return List of registered RouteChangeListener<I,R> listeners
- */
-// Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
-// return routeChangeListeners;
-// }
- public void setClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- this.clusterGlobalServices = clusterGlobalServices;
- }
-
- public void unsetClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
- this.clusterGlobalServices = null;
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Global RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- // let us check here if the cache already exists -- if so don't create
- if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
-
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
-
- } else {
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
- log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Routed RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- if (clusterGlobalServices.existCache(RPC_CACHE)){
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
- log.debug("Cache exists [{}] ", RPC_CACHE);
- return;
- }
-
- //cache doesnt exist, create one
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", RPC_CACHE);
- }
-
-
- /**
- * Function called by the dependency manager when all the required
- * dependencies are satisfied
- */
- void init(Component c) {
- try {
- findOrCreateGlobalRpcCache();
- findOrCreateRpcCache();
-
- } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
- throw new IllegalStateException("could not construct routing table cache");
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, LinkedHashSet<R>> getRpcCache() {
+ return this.rpcCache;
}
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getGlobalRpcCache() {
- return this.globalRpcCache;
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getRpcCache() {
- return this.rpcCache;
- }
-
- /**
- * This is used from integration test NP rest API to check out the result of the
- * cache population
- * <Note> For testing purpose only-- use it wisely</Note>
- *
- * @return
- */
- public String dumpGlobalRpcCache() {
- Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, R> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ /**
+ * This is used from integration test NP rest API to check out the result of the
+ * cache population
+ * <Note> For testing purpose only-- use it wisely</Note>
+ *
+ * @return
+ */
+ public String dumpGlobalRpcCache() {
+ Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, R> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
-
- public String dumpRpcCache() {
- Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ public String dumpRpcCache() {
+ Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
- /**
- * Invoked when a new entry is available in the cache, the key is only
- * provided, the value will come as an entryUpdate invocation
- *
- * @param key Key for the entry just created
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryCreated(I key, String cacheName, boolean originLocal) {
- // TBD: do we require this.
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ /**
+ * Invoked when a new entry is available in the cache, the key is only
+ * provided, the value will come as an entryUpdate invocation
+ *
+ * @param key Key for the entry just created
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryCreated(final I key, final String cacheName, final boolean originLocal) {
+ // TBD: do we require this.
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ }
}
- }
-
- /**
- * Called anytime a given entry is updated
- *
- * @param key Key for the entry modified
- * @param new_value the new value the key will have
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryUpdated(I key, R new_value, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
- + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given entry is updated
+ *
+ * @param key Key for the entry modified
+ * @param new_value the new value the key will have
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryUpdated(final I key, final R new_value, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
+ + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteUpdated(key, new_value);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteUpdated(key, new_value);
-// }
-// }
- }
-
- /**
- * Called anytime a given key is removed from the ConcurrentHashMap we are
- * listening to.
- *
- * @param key Key of the entry removed
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryDeleted(I key, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
- + " cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given key is removed from the ConcurrentHashMap we are
+ * listening to.
+ *
+ * @param key Key of the entry removed
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryDeleted(final I key, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
+ + " cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteDeleted(key);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteDeleted(key);
-// }
-// }
- }
-}
\ No newline at end of file
+}
package org.opendaylight.controller.sal.connector.remoterpc.impl;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
import junit.framework.Assert;
+
import org.apache.felix.dm.Component;
import org.junit.After;
import org.junit.Before;
import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
-import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
-import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.Set;
-import java.util.concurrent.*;
-
-import static org.mockito.Mockito.*;
-
public class RoutingTableImplTest {
- private final URI namespace = URI.create("http://cisco.com/example");
- private final QName QNAME = new QName(namespace, "global");
-
- private IClusterGlobalServices clusterService;
- private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
- ConcurrentMap mockGlobalRpcCache;
- ConcurrentMap mockRpcCache;
-
- @Before
- public void setUp() throws Exception{
- clusterService = mock(IClusterGlobalServices.class);
- routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
- mockGlobalRpcCache = new ConcurrentHashMap<>();
- mockRpcCache = new ConcurrentHashMap<>();
- createRoutingTableCache();
- }
-
- @After
- public void tearDown(){
- reset(clusterService);
- mockGlobalRpcCache = null;
- mockRpcCache = null;
- }
+ private final URI namespace = URI.create("http://cisco.com/example");
+ private final QName QNAME = new QName(namespace, "global");
+
+ private IClusterGlobalServices clusterService;
+ private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
+ ConcurrentMap mockGlobalRpcCache;
+ ConcurrentMap mockRpcCache;
+
+ @Before
+ public void setUp() throws Exception{
+ clusterService = mock(IClusterGlobalServices.class);
+ routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
+ mockGlobalRpcCache = new ConcurrentHashMap<>();
+ mockRpcCache = new ConcurrentHashMap<>();
+ createRoutingTableCache();
+ }
- @Test
- public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
+ @After
+ public void tearDown(){
+ reset(clusterService);
+ mockGlobalRpcCache = null;
+ mockRpcCache = null;
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
- final String expectedRoute = "172.27.12.1:5000";
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
- Assert.assertEquals(mockGlobalRpcCache, latestCache);
- Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
- }
+ final String expectedRoute = "172.27.12.1:5000";
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test (expected = RoutingTable.DuplicateRouteException.class)
- public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
+ ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
+ Assert.assertEquals(mockGlobalRpcCache, latestCache);
+ Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
+ @Test (expected = RoutingTable.DuplicateRouteException.class)
+ public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.addGlobalRoute(routeIdentifier, new String());
- routingTable.addGlobalRoute(routeIdentifier, new String());
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
- @Test
- public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- String expectedRoute = "172.27.12.1:5000";
+ @Test
+ public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ String expectedRoute = "172.27.12.1:5000";
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertEquals(expectedRoute, actualRoute);
- }
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test
- public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertEquals(expectedRoute, actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertNull(actualRoute);
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- @Test
- public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertNull(actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
- routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
- Assert.assertTrue(cache.size() == 1);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
+ routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
+ Assert.assertTrue(cache.size() == 1);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
- Assert.assertTrue(mockRpcCache.size() == 0);
+ }
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ @Test
+ public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
+ Assert.assertTrue(mockRpcCache.size() == 0);
- routingTable.addRoute(routeId, new String());
- Assert.assertTrue(mockRpcCache.size() == 1);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(1, routes.size());
- }
+ routingTable.addRoute(routeId, new String());
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(1, routes.size());
+ }
- Assert.assertTrue(mockRpcCache.size() == 0);
+ @Test
+ public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ Assert.assertTrue(mockRpcCache.size() == 0);
- String route_1 = "10.0.0.1:5955";
- String route_2 = "10.0.0.2:5955";
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ String route_1 = "10.0.0.1:5955";
+ String route_2 = "10.0.0.2:5955";
- Assert.assertTrue(mockRpcCache.size() == 1);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(2, routes.size());
- Assert.assertTrue(routes.contains(route_1));
- Assert.assertTrue(routes.contains(route_2));
- }
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
- ExecutorService threadPool = Executors.newCachedThreadPool();
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(2, routes.size());
+ Assert.assertTrue(routes.contains(route_1));
+ Assert.assertTrue(routes.contains(route_2));
+ }
- int numOfRoutesToAdd = 100;
- String routePrefix_1 = "10.0.0.1:555";
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
- String routePrefix_2 = "10.0.0.1:556";
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+ @Test
+ public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
+ ExecutorService threadPool = Executors.newCachedThreadPool();
+
+ int numOfRoutesToAdd = 100;
+ String routePrefix_1 = "10.0.0.1:555";
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
+ String routePrefix_2 = "10.0.0.1:556";
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+
+ // wait for all tasks to complete; timeout in 10 sec
+ threadPool.shutdown();
+ try {
+ threadPool.awaitTermination(10, TimeUnit.SECONDS); //
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
- // wait for all tasks to complete; timeout in 10 sec
- threadPool.shutdown();
- try {
- threadPool.awaitTermination(10, TimeUnit.SECONDS); //
- } catch (InterruptedException e) {
- e.printStackTrace();
+ Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
}
- Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
- }
-
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
- routingTable.addRoute(null, new String());
- }
+ routingTable.addRoute(null, new String());
+ }
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
- routingTable.addRoute(getRouteIdentifier(), null);
- }
+ routingTable.addRoute(getRouteIdentifier(), null);
+ }
- @Test (expected = UnsupportedOperationException.class)
- public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, new String());
+ @Test (expected = UnsupportedOperationException.class)
+ public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, new String());
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- routes.add(new String()); //can not be modified; should throw
- }
+ routes.add(new String()); //can not be modified; should throw
+ }
- @Test
- public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, "10.0.0.1:5555");
- routingTable.addRoute(routeId, "10.0.0.2:5555");
+ @Test
+ public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, "10.0.0.1:5555");
+ routingTable.addRoute(routeId, "10.0.0.2:5555");
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- Assert.assertEquals(2, routes.size());
- }
+ Assert.assertEquals(2, routes.size());
+ }
- @Test
- public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
- throws Exception {
+ @Test
+ public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
+ throws Exception {
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
- }
+ Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
+ }
- @Test
- public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
+ @Test
+ public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
+ Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.removeRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- }
+ }
- @Test
- public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
+ @Test
+ public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
- routingTable.addRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.addRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- ConcurrentMap cache = routingTable.getRpcCache();
- Assert.assertFalse(cache.containsKey(routeId));
+ routingTable.removeRoute(routeId, route_1);
+ ConcurrentMap cache = routingTable.getRpcCache();
+ Assert.assertFalse(cache.containsKey(routeId));
- }
+ }
- /*
- * Private helper methods
- */
- private void createRoutingTableCache() throws Exception {
+ /*
+ * Private helper methods
+ */
+ private void createRoutingTableCache() throws Exception {
- //here init
- Component c = mock(Component.class);
+ //here init
+ Component c = mock(Component.class);
- when(clusterService.existCache(
- RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockGlobalRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockGlobalRpcCache);
- when(clusterService.existCache(
- RoutingTableImpl.RPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.RPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockRpcCache);
- doNothing().when(clusterService).tbegin();
- doNothing().when(clusterService).tcommit();
+ doNothing().when(clusterService).tbegin();
+ doNothing().when(clusterService).tcommit();
- routingTable.setClusterGlobalServices(this.clusterService);
- routingTable.init(c);
+ routingTable.setClusterGlobalServices(this.clusterService);
+ routingTable.init(c);
- Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
- Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
- }
+ Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
+ Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
+ }
- private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
- InstanceIdentifier identifier = mock(InstanceIdentifier.class);
- when(routeIdentifier.getType()).thenReturn(QNAME);
- when(routeIdentifier.getRoute()).thenReturn(identifier);
+ private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
+ InstanceIdentifier identifier = mock(InstanceIdentifier.class);
+ when(routeIdentifier.getType()).thenReturn(QNAME);
+ when(routeIdentifier.getRoute()).thenReturn(identifier);
- return routeIdentifier;
- }
+ return routeIdentifier;
+ }
- private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
- return new Runnable() {
- @Override
- public void run() {
- for (int i=0;i<numRoutes;i++){
- String route = routePrefix + i;
- try {
- routingTable.addRoute(routeId, route);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- };
- }
+ private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
+ return new Runnable() {
+ @Override
+ public void run() {
+ for (int i=0;i<numRoutes;i++){
+ String route = routePrefix + i;
+ try {
+ routingTable.addRoute(routeId, route);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ };
+ }
}
@Override
public void run() {
- onBrokerAvailable(broker, context);;
+ onBrokerAvailable(broker, context);
}
});
return broker;
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
import org.opendaylight.controller.sal.binding.api.BindingAwareProvider.ProviderFunctionality;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
* <li>Notification Service - see {@link NotificationService} and
* {@link NotificationProviderService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.binding.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.binding.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
*
* For more information about session-based access see {@link ConsumerContext}
* and {@link ProviderContext}
- *
- *
- *
*/
public interface BindingAwareBroker {
/**
* functionality) for the consumer and provides access to the SAL
* infrastructure services and other functionality provided by
* {@link Provider}s.
- *
- *
- *
*/
public interface ConsumerContext extends RpcConsumerRegistry {
* @return Session specific implementation of service
*/
<T extends BindingAwareService> T getSALService(Class<T> service);
-
-
}
/**
*/
package org.opendaylight.controller.sal.binding.api;
-import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext;
-
/**
- *
* Session-specific instance of the broker functionality.
*
* <p>
*
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerContext#getSALService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext#getSALService(Class)} method on session
* assigned to the consumer.
*
* <p>
- * {@link BindingAwareService} and {@link BindingAwareProvider} may seem
+ * {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext} and {@link BindingAwareProvider} may seem
* similar, but provider provides YANG model-based functionality and
* {@link BindingAwareProvider} exposes the necessary supporting functionality
* to implement specific functionality of YANG and to reuse it in the
- * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}
- * s.
- *
- *
- *
+ * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}s.
*/
public interface BindingAwareService {
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler;
import org.opendaylight.controller.md.sal.common.api.data.DataModification;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.common.RpcResult;
public interface DataModificationTransaction extends DataModification<InstanceIdentifier<? extends DataObject>, DataObject> {
-
/**
* Returns an unique identifier for transaction
*
*/
@Override
- public Object getIdentifier();
+ Object getIdentifier();
/**
* Initiates a two-phase commit of candidate data.
* of this changes.
*
*
- * @see DataCommitHandler for further information how two-phase commit is
+ * @see org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler for further information how two-phase commit is
* processed.
* @param store
* Identifier of the store, where commit should occur.
* encountered errors, if commit was not successful.
*/
@Override
- public Future<RpcResult<TransactionStatus>> commit();
-
-
+ Future<RpcResult<TransactionStatus>> commit();
/**
* Register a listener for transaction
*/
ListenerRegistration<DataTransactionListener> registerListener(DataTransactionListener listener);
-
-
/**
* Listener for transaction state changes
- *
- *
*/
public interface DataTransactionListener extends EventListener {
/**
*/
package org.opendaylight.controller.sal.binding.api.data;
-
import org.opendaylight.controller.md.sal.common.api.data.DataProvisionService;
import org.opendaylight.controller.md.sal.common.api.data.DataReader;
-import org.opendaylight.controller.sal.binding.api.BindingAwareProvider;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
/**
- * DataProviderService is common access point for {@link BindingAwareProvider} providers
+ * DataProviderService is common access point for {@link org.opendaylight.controller.sal.binding.api.BindingAwareProvider} providers
* to access data trees described by the YANG model.
- *
*/
public interface DataProviderService extends DataBrokerService, DataProvisionService<InstanceIdentifier<? extends DataObject>, DataObject> {
-
-
/**
* Registers a data reader for particular subtree of overal YANG data tree.
*
import org.opendaylight.controller.sal.binding.api.rpc.RpcRouter;
import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.RpcService;
-import org.opendaylight.yangtools.yang.binding.annotations.RoutingContext;
public interface RuntimeCodeGenerator {
* Returned instance:
* <ul>
* <li>implements provided subclass of RpcService type and
- * {@link DelegateProxy} interface.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy} interface.
* <li>
* <p>
* delegates all invocations of methods, which are defined in RpcService
* subtype to delegate which is defined by
- * {@link DelegateProxy#setDelegate(Object)}.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)}.
* <p>
* If delegate is not defined (<code>getDelegate() == null</code>)
* implementation throws {@link IllegalStateException}
- * <li>{@link DelegateProxy#getDelegate()} - returns the delegate to which
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#getDelegate()} - returns the delegate to which
* all calls are delegated.
- * <li>{@link DelegateProxy#setDelegate(Object)} - sets the delegate for
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)} - sets the delegate for
* particular instance
*
* </ul>
* - Subclass of RpcService for which direct proxy is to be
* generated.
* @return Instance of RpcService of provided serviceType which implements
- * and {@link DelegateProxy}
+ * and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* @throws IllegalArgumentException
*
*/
* <ul>
* <li>Implements:
* <ul>
- * <li>{@link DelegateProxy}
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* <li>{@link RpcRouter}
* </ul>
* <li>
* <ul>
* <li>
* Implementation uses
- * {@link RpcRouter#getService(Class, InstanceIdentifier)} method to
+ * {@link RpcRouter#getService(Class, org.opendaylight.yangtools.yang.binding.InstanceIdentifier)} method to
* retrieve particular instance to which call will be routed.
* <li>
- * Instance of {@link InstanceIdentifier} is determined by first argument of
+ * Instance of {@link org.opendaylight.yangtools.yang.binding.InstanceIdentifier} is determined by first argument of
* method and is retrieved via method which is annotated with
- * {@link RoutingContext}. Class representing Routing Context Identifier is
- * retrieved by {@link RoutingContext}.
- * <li>If first argument is not defined / {@link RoutingContext} annotation
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * Class representing Routing Context Identifier is retrieved by a
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * <li>If first argument is not defined / {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext} annotation
* is not present on any field invocation will be delegated to default
* service {@link RpcRouter#getDefaultService()}.
* </ul>
* @param serviceType
* - Subclass of RpcService for which Router is to be generated.
* @return Instance of RpcService of provided serviceType which implements
- * also {@link RpcRouter}<T> and {@link DelegateProxy}
+ * also {@link RpcRouter}<T> and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
*/
<T extends RpcService> RpcRouter<T> getRouterFor(Class<T> serviceType,String name) throws IllegalArgumentException;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
-import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
*/
package org.opendaylight.controller.sal.binding.impl.util;
-import java.util.Iterator;
import org.opendaylight.controller.md.sal.common.impl.routing.AbstractDataReadRouter;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-@SuppressWarnings("all")
public class BindingAwareDataReaderRouter extends AbstractDataReadRouter<InstanceIdentifier<? extends DataObject>,DataObject> {
- protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
- return data.iterator().next();
- }
+ @Override
+ protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
+ return data.iterator().next();
+ }
}
Nodes nodes = checkForNodes();
verifyNode(nodes, flowCapableNode).assertHasAugmentation(FlowCapableNode.class);
- ;
assertBindingIndependentVersion(NODE_INSTANCE_ID_BI);
// Node meterStatsNode = createTestNode(NodeMeterStatistics.class, nodeMeterStatistics());
// commitNodeAndVerifyTransaction(meterStatsNode);
*/
package org.opendaylight.controller.md.sal.common.api.data;
-import java.util.concurrent.Future;
-
import org.opendaylight.yangtools.concepts.Path;
import com.google.common.base.Optional;
*/
package org.opendaylight.controller.sal.compability;
-import org.opendaylight.controller.sal.core.*;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeConnector;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.port.rev130925.PortFeatures;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
-import org.opendaylight.controller.sal.core.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.core.api.data.DataProviderService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationPublishService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationService;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.common.QName;
* <li>RPC Invocation - see {@link ConsumerSession#rpc(QName, CompositeNode)},
* {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)} and
* {@link RpcImplementation}
- * <li>Notification Service - see {@link NotificationService} and
- * {@link NotificationPublishService}
+ * <li>Notification Service - see {@link org.opendaylight.controller.sal.core.api.notify.NotificationService} and
+ * {@link org.opendaylight.controller.sal.core.api.notify.NotificationPublishService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.core.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.core.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
* functionality of the provider from the system.
*/
@Override
- public void close();
+ void close();
@Override
boolean isClosed();
void close();
}
- public interface RoutedRpcRegistration extends RpcRegistration,
- RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
+ public interface RoutedRpcRegistration extends RpcRegistration, RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
}
}
*/
package org.opendaylight.controller.sal.core.api;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-
/**
*
* Session-specific instance of the broker functionality.
*
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerSession#getService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#getService(Class)} method on session
* assigned to the consumer.
*
* <p>
import java.util.Set;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
* {@link Provider#getProviderFunctionality()}
* <li>passing an instance of implementation and {@link QName} of rpc as
* arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, RpcImplementation)}
* </ul>
*
* The simplified process of the invocation of rpc is following:
*
* <ol>
* <li> {@link Consumer} invokes
- * {@link ConsumerSession#rpc(QName, CompositeNode)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#rpc(QName, CompositeNode)}
* <li> {@link Broker} finds registered {@link RpcImplementation}s
* <li> {@link Broker} invokes
* {@link RpcImplementation#invokeRpc(QName, CompositeNode)}
*/
package org.opendaylight.controller.sal.core.api.notify;
-import org.opendaylight.controller.sal.core.api.Broker;
-import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
* The simplified process of the notification publishing is following:
*
* <ol>
- * <li> {@link Provider} invokes {@link #sendNotification(CompositeNode)}
- * <li> {@link Broker} finds {@link NotificationListener}s which subscribed for
+ * <li> {@link org.opendaylight.controller.sal.core.api.Provider} invokes {@link #sendNotification(CompositeNode)}
+ * <li> {@link org.opendaylight.controller.sal.core.api.Broker} finds {@link NotificationListener}s which subscribed for
* the notification type.
*
- * <li>For each subscriber {@link Broker} invokes
+ * <li>For each subscriber {@link org.opendaylight.controller.sal.core.api.Broker} invokes
* {@link NotificationListener#onNotification(CompositeNode)}
* </ol>
*/
package org.opendaylight.controller.sal.core.api.notify;
import org.opendaylight.controller.sal.core.api.BrokerService;
-import org.opendaylight.controller.sal.core.api.Provider;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.common.QName;
* The registration of notification listeners could be done by:
* <ul>
* <li>returning an instance of implementation in the return value of
- * {@link Provider#getProviderFunctionality()}
- * <li>passing an instance of implementation and {@link QName} of rpc as an
- * arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Provider#getProviderFunctionality()}
+ * <li>passing an instance of implementation and {@link QName} of an RPC as an
+ * argument to
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, org.opendaylight.controller.sal.core.api.RpcImplementation)}
* </ul>
*
*
import java.util.List;
import java.util.Map;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier.PathArgument;
import com.google.common.base.Predicates;
/**
- * A set of utility methods for interacting with {@link TreeNode} objects.
+ * A set of utility methods for interacting with {@link org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode} objects.
*/
public final class TreeNodeUtils {
private TreeNodeUtils() {
if (CONTEXT_REFERENCE.equals(extension.getNodeType())) {
return Optional.fromNullable(extension.getQName());
}
- ;
}
return Optional.absent();
}
*/
package org.opendaylight.controller.sal.dom.broker.osgi;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.opendaylight.controller.sal.core.api.BrokerService;
import org.opendaylight.yangtools.concepts.Registration;
import org.osgi.framework.ServiceReference;
-import static com.google.common.base.Preconditions.*;
public abstract class AbstractBrokerServiceProxy<T extends BrokerService> implements AutoCloseable, BrokerService {
private T delegate;
private final ServiceReference<T> reference;
- public AbstractBrokerServiceProxy(ServiceReference<T> ref, T delegate) {
+ public AbstractBrokerServiceProxy(final ServiceReference<T> ref, final T delegate) {
this.delegate = checkNotNull(delegate, "Delegate should not be null.");
this.reference = checkNotNull(ref, "Reference should not be null.");
}
return reference;
}
- private Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
+ private final Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
- protected <R extends Registration<?>> R addRegistration(R registration) {
+ protected <R extends Registration<?>> R addRegistration(final R registration) {
if (registration != null) {
registrations.add(registration);
}
import java.util.Set;
import org.opendaylight.controller.md.sal.common.api.routing.RouteChangeListener;
-import org.opendaylight.controller.sal.core.api.*;
+import org.opendaylight.controller.sal.core.api.Broker;
+import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
+import org.opendaylight.controller.sal.core.api.RpcImplementation;
+import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
+import org.opendaylight.controller.sal.core.api.RpcRegistrationListener;
+import org.opendaylight.controller.sal.core.api.RpcRoutingContext;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
-public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry>
- implements RpcProvisionRegistry {
+public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry> implements RpcProvisionRegistry {
- public RpcProvisionRegistryProxy(ServiceReference<RpcProvisionRegistry> ref, RpcProvisionRegistry delegate) {
+ public RpcProvisionRegistryProxy(final ServiceReference<RpcProvisionRegistry> ref, final RpcProvisionRegistry delegate) {
super(ref, delegate);
}
@Override
- public Broker.RpcRegistration addRpcImplementation(QName rpcType, RpcImplementation implementation) throws IllegalArgumentException {
+ public Broker.RpcRegistration addRpcImplementation(final QName rpcType, final RpcImplementation implementation) throws IllegalArgumentException {
return getDelegate().addRpcImplementation(rpcType, implementation);
}
@Override
- public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(RpcRegistrationListener listener) {
+ public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(final RpcRegistrationListener listener) {
return getDelegate().addRpcRegistrationListener(listener);
}
@Override
- public Broker.RoutedRpcRegistration addRoutedRpcImplementation(QName rpcType, RpcImplementation implementation) {
+ public Broker.RoutedRpcRegistration addRoutedRpcImplementation(final QName rpcType, final RpcImplementation implementation) {
return getDelegate().addRoutedRpcImplementation(rpcType, implementation);
}
@Override
- public void setRoutedRpcDefaultDelegate(RoutedRpcDefaultImplementation defaultImplementation) {
+ public void setRoutedRpcDefaultDelegate(final RoutedRpcDefaultImplementation defaultImplementation) {
getDelegate().setRoutedRpcDefaultDelegate(defaultImplementation);
}
@Override
- public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(L listener) {
+ public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(final L listener) {
return getDelegate().registerRouteChangeListener(listener);
}
}
@Override
- public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(QName rpc, CompositeNode input) {
+ public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(final QName rpc, final CompositeNode input) {
return getDelegate().invokeRpc(rpc, input);
}
}
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.SimpleNode;
-import static com.google.common.base.Preconditions.*;
-
-public class YangDataUtils {
+public final class YangDataUtils {
- public YangDataUtils() {
- // TODO Auto-generated constructor stub
+ private YangDataUtils() {
+ throw new UnsupportedOperationException("Utility class");
}
-
-
- public static Map<Map<QName,Object>,CompositeNode> toIndexMap(List<CompositeNode> nodes,List<QName> keys) {
+ public static Map<Map<QName,Object>,CompositeNode> toIndexMap(final List<CompositeNode> nodes,final List<QName> keys) {
ConcurrentHashMap<Map<QName,Object>,CompositeNode> ret = new ConcurrentHashMap<>();
for(CompositeNode node : nodes) {
Map<QName, Object> key = getKeyMap(node,keys);
return ret;
}
-
-
- public static Map<QName,Object> getKeyMap(CompositeNode node, List<QName> keys) {
+ public static Map<QName,Object> getKeyMap(final CompositeNode node, final List<QName> keys) {
Map<QName,Object> map = new HashMap<>();
for(QName key : keys) {
SimpleNode<?> keyNode = node.getFirstSimpleByName(QName.create(node.getNodeType(), key.getLocalName()));
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
import java.util.Iterator;
import java.util.List;
import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
import org.opendaylight.yangtools.yang.model.api.UsesNode;
-import static com.google.common.base.Preconditions.*;
-
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
-public class YangSchemaUtils {
+public final class YangSchemaUtils {
private static final Function<PathArgument, QName> QNAME_FROM_PATH_ARGUMENT = new Function<PathArgument, QName>(){
@Override
- public QName apply(PathArgument input) {
+ public QName apply(final PathArgument input) {
if(input == null) {
return null;
}
}
};
- private YangSchemaUtils() {
+ private YangSchemaUtils() {
throw new UnsupportedOperationException("Utility class.");
}
-
- public static DataSchemaNode getSchemaNode(SchemaContext schema,InstanceIdentifier path) {
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final InstanceIdentifier path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
return getSchemaNode(schema, FluentIterable.from(path.getPath()).transform(QNAME_FROM_PATH_ARGUMENT));
}
- public static DataSchemaNode getSchemaNode(SchemaContext schema,Iterable<QName> path) {
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final Iterable<QName> path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
if(!path.iterator().hasNext()){
return (DataSchemaNode) previous;
}
- private static DataSchemaNode searchInChoices(DataNodeContainer node, QName arg) {
+ private static DataSchemaNode searchInChoices(final DataNodeContainer node, final QName arg) {
Set<DataSchemaNode> children = node.getChildNodes();
for (DataSchemaNode child : children) {
if (child instanceof ChoiceNode) {
return null;
}
- private static DataSchemaNode searchInCases(ChoiceNode choiceNode, QName arg) {
+ private static DataSchemaNode searchInCases(final ChoiceNode choiceNode, final QName arg) {
Set<ChoiceCaseNode> cases = choiceNode.getCases();
for (ChoiceCaseNode caseNode : cases) {
DataSchemaNode node = caseNode.getDataChildByName(arg);
return null;
}
- private static ContainerSchemaNode toRootDataNode(SchemaContext schema) {
+ private static ContainerSchemaNode toRootDataNode(final SchemaContext schema) {
return new NetconfDataRootNode(schema);
}
private static final class NetconfDataRootNode implements ContainerSchemaNode {
- public NetconfDataRootNode(SchemaContext schema) {
+ public NetconfDataRootNode(final SchemaContext schema) {
// TODO Auto-generated constructor stub
}
}
@Override
- public DataSchemaNode getDataChildByName(QName name) {
+ public DataSchemaNode getDataChildByName(final QName name) {
// TODO Auto-generated method stub
return null;
}
@Override
- public DataSchemaNode getDataChildByName(String name) {
+ public DataSchemaNode getDataChildByName(final String name) {
// TODO Auto-generated method stub
return null;
}
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.TransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* Read and write access to stored data is provided only via transactions
* created using {@link #newReadOnlyTransaction()},
* {@link #newWriteOnlyTransaction()} and {@link #newReadWriteTransaction()}, or
- * by creating {@link TransactionChain}.
+ * by creating {@link org.opendaylight.controller.md.sal.common.api.data.TransactionChain}.
*
*/
public interface DOMStore extends DOMStoreTransactionFactory {
/**
- * Registers {@link DataChangeListener} for Data Change callbacks which will
+ * Registers {@link org.opendaylight.controller.md.sal.common.api.data.DataChangeListener} for Data Change callbacks which will
* be triggered on the change of provided subpath. What constitutes a change
* depends on the @scope parameter.
*
*/
package org.opendaylight.controller.sal.core.spi.data;
-import java.util.concurrent.Future;
-
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* read
* @return Listenable Future which contains read result
* <ul>
- * <li>If data at supplied path exists the {@link Future#get()}
+ * <li>If data at supplied path exists the {@link java.util.concurrent.Future#get()}
* returns Optional object containing data
* <li>If data at supplied path does not exists the
- * {@link Future#get()} returns {@link Optional#absent()}.
+ * {@link java.util.concurrent.Future#get()} returns {@link Optional#absent()}.
* </ul>
*/
ListenableFuture<Optional<NormalizedNode<?,?>>> read(InstanceIdentifier path);
*/
package org.opendaylight.controller.sal.core.spi.data;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.yangtools.concepts.Path;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
*/
package org.opendaylight.controller.sal.connect.api;
-import org.opendaylight.controller.sal.connect.netconf.sal.NetconfDeviceRpc;
import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider;
import org.opendaylight.controller.netconf.client.NetconfClientDispatcher;
import org.opendaylight.controller.netconf.client.NetconfClientSession;
import org.opendaylight.controller.netconf.client.NetconfClientSessionListener;
-import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration;
import org.opendaylight.controller.netconf.client.conf.NetconfReconnectingClientConfiguration;
import org.opendaylight.controller.netconf.util.xml.XmlElement;
import org.opendaylight.controller.netconf.util.xml.XmlNetconfConstants;
import com.google.common.util.concurrent.Futures;
import java.util.Collections;
import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import java.util.List;
import java.util.Set;
-import javax.annotation.concurrent.ThreadSafe;
-
import org.opendaylight.controller.sal.connect.api.SchemaContextProviderFactory;
import org.opendaylight.controller.sal.connect.util.RemoteDeviceId;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
*/
package org.opendaylight.controller.config.yang.md.sal.remote.rpc;
-import org.opendaylight.controller.sal.connector.remoterpc.*;
+import org.opendaylight.controller.sal.connector.remoterpc.ClientImpl;
+import org.opendaylight.controller.sal.connector.remoterpc.RemoteRpcProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.RoutingTableProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.ServerImpl;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
import org.osgi.framework.BundleContext;
*
*/
public final class ZeroMQServerModule
- extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
+extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
- private static final Integer ZEROMQ_ROUTER_PORT = 5554;
- private BundleContext bundleContext;
+ private static final Integer ZEROMQ_ROUTER_PORT = 5554;
+ private BundleContext bundleContext;
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
- super(identifier, dependencyResolver);
- }
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ super(identifier, dependencyResolver);
+ }
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- ZeroMQServerModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final ZeroMQServerModule oldModule, final java.lang.AutoCloseable oldInstance) {
- super(identifier, dependencyResolver, oldModule, oldInstance);
- }
+ super(identifier, dependencyResolver, oldModule, oldInstance);
+ }
- @Override
- protected void customValidation() {
- // Add custom validation for module attributes here.
- }
+ @Override
+ protected void customValidation() {
+ // Add custom validation for module attributes here.
+ }
- @Override
- public java.lang.AutoCloseable createInstance() {
+ @Override
+ public java.lang.AutoCloseable createInstance() {
- Broker broker = getDomBrokerDependency();
+ Broker broker = getDomBrokerDependency();
- final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
+ final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
- ServerImpl serverImpl = new ServerImpl(port);
+ ServerImpl serverImpl = new ServerImpl(port);
- ClientImpl clientImpl = new ClientImpl();
+ ClientImpl clientImpl = new ClientImpl();
- RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
+ RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
- RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
- facade.setRoutingTableProvider(provider);
- facade.setContext(bundleContext);
- facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
+ RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
+ facade.setRoutingTableProvider(provider);
+ facade.setContext(bundleContext);
+ facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
- broker.registerProvider(facade, bundleContext);
- return facade;
- }
+ broker.registerProvider(facade, bundleContext);
+ return facade;
+ }
- public void setBundleContext(BundleContext bundleContext) {
- this.bundleContext = bundleContext;
- }
+ public void setBundleContext(final BundleContext bundleContext) {
+ this.bundleContext = bundleContext;
+ }
}
import org.opendaylight.controller.sal.connector.remoterpc.dto.Message;
import org.opendaylight.controller.sal.connector.remoterpc.dto.RouteIdentifierImpl;
import org.opendaylight.controller.sal.connector.remoterpc.util.XmlUtils;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
/**
- * An implementation of {@link RpcImplementation} that makes
+ * An implementation of {@link org.opendaylight.controller.sal.core.api.RpcImplementation} that makes
* remote RPC calls
*/
public class ClientImpl implements RemoteRpcClient {
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <packaging>bundle</packaging>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller.tests</groupId>
+ <artifactId>sal-remoterpc-connector-test-parent</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
+ <packaging>bundle</packaging>
+ <dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <configuration>
- <instructions>
- <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
- </instructions>
- </configuration>
- </plugin>
- </plugins>
- </build>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>containermanager</artifactId>
+ <version>0.5.2-SNAPSHOT</version>
+ </dependency>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal</artifactId>
+ <version>0.8.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-binding-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-impl</artifactId>
+ <version>${yangtools.version}</version>
+ </dependency>
+ </dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-impl</artifactId>
- <version>${yangtools.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
+ </instructions>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
import java.io.InputStream;
import java.net.URI;
import java.util.Hashtable;
-import java.util.concurrent.*;
+import java.util.concurrent.Future;
import org.opendaylight.controller.sal.core.api.AbstractConsumer;
import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.opendaylight.yangtools.yang.data.impl.XmlTreeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.CompositeNodeTOImpl;
import javax.xml.stream.XMLStreamException;
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
- <packaging>pom</packaging>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+ <packaging>pom</packaging>
<modules>
<module>consumer-service</module>
<module>test-it</module>
<module>test-nb</module>
</modules>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.8.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-remoterpc-connector</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
</dependencies>
import org.slf4j.LoggerFactory;
import java.net.URI;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
public class ExampleProvider extends AbstractProvider implements RpcImplementation {
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-it</artifactId>
<scm>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-broker-impl</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.ops4j.pax.exam</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>ietf-netconf-monitoring</artifactId>
+ <version>0.2.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
import junit.framework.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.net.URI;
-import java.util.Hashtable;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.baseModelBundles;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.bindingAwareSalBundles;
-import static org.ops4j.pax.exam.CoreOptions.*;
+//import static org.ops4j.pax.exam.CoreOptions.*;
@RunWith(PaxExam.class)
public class RouterTest {
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-nb</artifactId>
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import org.osgi.framework.*;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package org.opendaylight.controller.sal.rest.impl;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERRORS_CONTAINER_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_APP_TAG_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_INFO_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_LIST_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_MESSAGE_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_TAG_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_TYPE_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.NAMESPACE;
+
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
-import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.*;
-
import org.opendaylight.controller.sal.restconf.impl.ControllerContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
import org.opendaylight.controller.sal.restconf.impl.RestconfError;
private HttpHeaders headers;
@Override
- public Response toResponse( RestconfDocumentedException exception ) {
+ public Response toResponse( final RestconfDocumentedException exception ) {
LOG.debug( "In toResponse: {}", exception.getMessage() );
// single space char in the entity.
return Response.status( exception.getStatus() )
- .type( MediaType.TEXT_PLAIN_TYPE )
- .entity( " " ).build();
+ .type( MediaType.TEXT_PLAIN_TYPE )
+ .entity( " " ).build();
}
int status = errors.iterator().next().getErrorTag().getStatusCode();
if( errorsSchemaNode == null ) {
return Response.status( status )
- .type( MediaType.TEXT_PLAIN_TYPE )
- .entity( exception.getMessage() ).build();
+ .type( MediaType.TEXT_PLAIN_TYPE )
+ .entity( exception.getMessage() ).build();
}
ImmutableList.Builder<Node<?>> errorNodes = ImmutableList.<Node<?>> builder();
}
ImmutableCompositeNode errorsNode =
- ImmutableCompositeNode.create( ERRORS_CONTAINER_QNAME, errorNodes.build() );
+ ImmutableCompositeNode.create( ERRORS_CONTAINER_QNAME, errorNodes.build() );
Object responseBody;
if( mediaType.getSubtype().endsWith( "json" ) ) {
return Response.status( status ).type( mediaType ).entity( responseBody ).build();
}
- private Object toJsonResponseBody( ImmutableCompositeNode errorsNode,
- DataNodeContainer errorsSchemaNode ) {
+ private Object toJsonResponseBody( final ImmutableCompositeNode errorsNode,
+ final DataNodeContainer errorsSchemaNode ) {
JsonMapper jsonMapper = new JsonMapper();
return responseBody;
}
- private Object toXMLResponseBody( ImmutableCompositeNode errorsNode,
- DataNodeContainer errorsSchemaNode ) {
+ private Object toXMLResponseBody( final ImmutableCompositeNode errorsNode,
+ final DataNodeContainer errorsSchemaNode ) {
XmlMapper xmlMapper = new XmlMapper();
return responseBody;
}
- private String documentToString( Document doc ) throws TransformerException, UnsupportedEncodingException {
+ private String documentToString( final Document doc ) throws TransformerException, UnsupportedEncodingException {
Transformer transformer = createTransformer();
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
}
private Transformer createTransformer() throws TransformerFactoryConfigurationError,
- TransformerConfigurationException {
+ TransformerConfigurationException {
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "no" );
return transformer;
}
- private Node<?> toDomNode( RestconfError error ) {
+ private Node<?> toDomNode( final RestconfError error ) {
CompositeNodeBuilder<ImmutableCompositeNode> builder = ImmutableCompositeNode.builder();
builder.setQName( ERROR_LIST_QNAME );
return builder.toInstance();
}
- private Node<?> parseErrorInfo( String errorInfo ) {
+ private Node<?> parseErrorInfo( final String errorInfo ) {
if( Strings.isNullOrEmpty( errorInfo ) ) {
return null;
}
String errorInfoWithRoot =
new StringBuilder( "<error-info xmlns=\"" ).append( NAMESPACE ).append( "\">" )
- .append( errorInfo ).append( "</error-info>" ).toString();
+ .append( errorInfo ).append( "</error-info>" ).toString();
Document doc = null;
try {
doc = factory.newDocumentBuilder().parse(
- new InputSource( new StringReader( errorInfoWithRoot ) ) );
+ new InputSource( new StringReader( errorInfoWithRoot ) ) );
}
catch( Exception e ) {
// TODO: what if the content is text that happens to contain invalid markup? Could
// wrap in CDATA and try again.
LOG.warn( "Error parsing restconf error-info, \"" + errorInfo + "\", as XML: " +
- e.toString() );
+ e.toString() );
return null;
}
return errorInfoNode;
}
- private void addLeaf( CompositeNodeBuilder<ImmutableCompositeNode> builder, QName qname,
- String value ) {
+ private void addLeaf( final CompositeNodeBuilder<ImmutableCompositeNode> builder, final QName qname,
+ final String value ) {
if( !Strings.isNullOrEmpty( value ) ) {
builder.addLeaf( qname, value );
}
*/
package org.opendaylight.controller.sal.restconf.impl;
-import com.google.common.base.Function;
-import com.google.common.base.Objects;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.BiMap;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.HashBiMap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;
import org.opendaylight.controller.sal.core.api.mount.MountService;
import org.opendaylight.controller.sal.rest.api.Draft02;
import org.opendaylight.controller.sal.rest.impl.RestUtil;
-import org.opendaylight.controller.sal.restconf.impl.InstanceIdWithSchemaNode;
-import org.opendaylight.controller.sal.restconf.impl.RestCodec;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
import org.opendaylight.yangtools.concepts.Codec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Function;
+import com.google.common.base.Objects;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.BiMap;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.HashBiMap;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
public class ControllerContext implements SchemaContextListener {
private final static Logger LOG = LoggerFactory.getLogger( ControllerContext.class );
}
private InstanceIdWithSchemaNode toIdentifier( final String restconfInstance,
- final boolean toMountPointIdentifier ) {
+ final boolean toMountPointIdentifier ) {
this.checkPreconditions();
Iterable<String> split = Splitter.on( "/" ).split( restconfInstance );
InstanceIdentifierBuilder builder = InstanceIdentifier.builder();
Module latestModule = this.getLatestModule( globalSchema, startModule );
InstanceIdWithSchemaNode iiWithSchemaNode = this.collectPathArguments( builder, pathArgs,
- latestModule, null, toMountPointIdentifier );
+ latestModule, null, toMountPointIdentifier );
if( iiWithSchemaNode == null ) {
throw new RestconfDocumentedException(
Predicate<Module> filter = new Predicate<Module>() {
@Override
- public boolean apply( Module m ) {
+ public boolean apply( final Module m ) {
return Objects.equal( m.getName(), moduleName );
}
};
final SchemaContext mountPointSchema = mountPoint.getSchemaContext();
Set<Module> moduleSchemas = mountPointSchema == null ? null :
- mountPointSchema.findModuleByNamespace( namespace );
+ mountPointSchema.findModuleByNamespace( namespace );
return moduleSchemas == null ? null : this.filterLatestModule( moduleSchemas );
}
public Module findModuleByNameAndRevision( final QName module ) {
this.checkPreconditions();
Preconditions.checkArgument( module != null && module.getLocalName() != null &&
- module.getRevision() != null );
+ module.getRevision() != null );
return globalSchema.findModuleByName( module.getLocalName(), module.getRevision() );
}
public Module findModuleByNameAndRevision( final MountInstance mountPoint, final QName module ) {
this.checkPreconditions();
Preconditions.checkArgument( module != null && module.getLocalName() != null &&
- module.getRevision() != null && mountPoint != null );
+ module.getRevision() != null && mountPoint != null );
SchemaContext schemaContext = mountPoint.getSchemaContext();
return schemaContext == null ? null :
- schemaContext.findModuleByName( module.getLocalName(), module.getRevision() );
+ schemaContext.findModuleByName( module.getLocalName(), module.getRevision() );
}
public DataNodeContainer getDataNodeContainerFor( final InstanceIdentifier path ) {
String module = this.uriToModuleName.get( qname.getNamespace() );
if( module == null ) {
final Module moduleSchema = globalSchema.findModuleByNamespaceAndRevision(
- qname.getNamespace(), qname.getRevision() );
+ qname.getNamespace(), qname.getRevision() );
if( moduleSchema == null ) {
return null;
}
SchemaContext schemaContext = mountPoint.getSchemaContext();
final Module moduleSchema = schemaContext.findModuleByNamespaceAndRevision(
- qname.getNamespace(), qname.getRevision() );
+ qname.getNamespace(), qname.getRevision() );
if( moduleSchema == null ) {
return null;
}
@Override
public boolean apply(final GroupingDefinition g) {
return Objects.equal(g.getQName().getLocalName(),
- Draft02.RestConfModule.ERRORS_GROUPING_SCHEMA_NODE);
+ Draft02.RestConfModule.ERRORS_GROUPING_SCHEMA_NODE);
}
};
List<DataSchemaNode> instanceDataChildrenByName =
this.findInstanceDataChildrenByName(restconfGrouping,
- Draft02.RestConfModule.ERRORS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.ERRORS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instanceDataChildrenByName, null);
}
- public DataSchemaNode getRestconfModuleRestConfSchemaNode( Module inRestconfModule,
- String schemaNodeName ) {
+ public DataSchemaNode getRestconfModuleRestConfSchemaNode( final Module inRestconfModule,
+ final String schemaNodeName ) {
Module restconfModule = inRestconfModule;
if( restconfModule == null ) {
restconfModule = getRestconfModule();
@Override
public boolean apply(final GroupingDefinition g) {
return Objects.equal(g.getQName().getLocalName(),
- Draft02.RestConfModule.RESTCONF_GROUPING_SCHEMA_NODE);
+ Draft02.RestConfModule.RESTCONF_GROUPING_SCHEMA_NODE);
}
};
List<DataSchemaNode> instanceDataChildrenByName =
this.findInstanceDataChildrenByName(restconfGrouping,
- Draft02.RestConfModule.RESTCONF_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.RESTCONF_CONTAINER_SCHEMA_NODE);
final DataSchemaNode restconfContainer = Iterables.getFirst(instanceDataChildrenByName, null);
if (Objects.equal(schemaNodeName, Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
final DataSchemaNode modules = Iterables.getFirst(instances, null);
instances = this.findInstanceDataChildrenByName(((DataNodeContainer) modules),
- Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
final DataSchemaNode modules = Iterables.getFirst(instances, null);
instances = this.findInstanceDataChildrenByName(((DataNodeContainer) modules),
- Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
private String toUriString( final Object object ) throws UnsupportedEncodingException {
return object == null ? "" :
- URLEncoder.encode( object.toString(), ControllerContext.URI_ENCODING_CHAR_SET );
+ URLEncoder.encode( object.toString(), ControllerContext.URI_ENCODING_CHAR_SET );
}
private InstanceIdWithSchemaNode collectPathArguments( final InstanceIdentifierBuilder builder,
if( strings.isEmpty() ) {
return new InstanceIdWithSchemaNode( builder.toInstance(),
- ((DataSchemaNode) parentNode), mountPoint );
+ ((DataSchemaNode) parentNode), mountPoint );
}
String head = strings.iterator().next();
DataSchemaNode targetNode = null;
if( !Strings.isNullOrEmpty( moduleName ) ) {
if( Objects.equal( moduleName, ControllerContext.MOUNT_MODULE ) &&
- Objects.equal( nodeName, ControllerContext.MOUNT_NODE ) ) {
+ Objects.equal( nodeName, ControllerContext.MOUNT_NODE ) ) {
if( mountPoint != null ) {
throw new RestconfDocumentedException(
"Restconf supports just one mount point in URI.",
if( mount == null ) {
LOG.debug( "Instance identifier to missing mount point: {}", partialPath );
throw new RestconfDocumentedException(
- "Mount point does not exist.", ErrorType.PROTOCOL, ErrorTag.UNKNOWN_ELEMENT );
+ "Mount point does not exist.", ErrorType.PROTOCOL, ErrorTag.UNKNOWN_ELEMENT );
}
final SchemaContext mountPointSchema = mount.getSchemaContext();
final String moduleNameBehindMountPoint = toModuleName( strings.get( 1 ) );
if( moduleNameBehindMountPoint == null ) {
throw new RestconfDocumentedException(
- "First node after mount point in URI has to be in format \"moduleName:nodeName\"",
- ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ "First node after mount point in URI has to be in format \"moduleName:nodeName\"",
+ ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
final Module moduleBehindMountPoint = this.getLatestModule( mountPointSchema,
- moduleNameBehindMountPoint );
+ moduleNameBehindMountPoint );
if( moduleBehindMountPoint == null ) {
throw new RestconfDocumentedException(
"\"" +moduleName + "\" module does not exist in mount point.",
List<String> subList = strings.subList( 1, strings.size() );
return this.collectPathArguments( InstanceIdentifier.builder(), subList, moduleBehindMountPoint,
- mount, returnJustMountPoint );
+ mount, returnJustMountPoint );
}
Module module = null;
else {
SchemaContext schemaContext = mountPoint.getSchemaContext();
module = schemaContext == null ? null :
- this.getLatestModule( schemaContext, moduleName );
+ this.getLatestModule( schemaContext, moduleName );
if( module == null ) {
throw new RestconfDocumentedException(
"\"" + moduleName + "\" module does not exist in mount point.",
}
targetNode = this.findInstanceDataChildByNameAndNamespace(
- parentNode, nodeName, module.getNamespace() );;
+ parentNode, nodeName, module.getNamespace() );
if( targetNode == null ) {
throw new RestconfDocumentedException(
"URI has bad format. Possible reasons:\n" +
- " 1. \"" + head + "\" was not found in parent data node.\n" +
- " 2. \"" + head + "\" is behind mount point. Then it should be in format \"/" +
- MOUNT + "/" + head + "\".", ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ " 1. \"" + head + "\" was not found in parent data node.\n" +
+ " 2. \"" + head + "\" is behind mount point. Then it should be in format \"/" +
+ MOUNT + "/" + head + "\".", ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
} else {
final List<DataSchemaNode> potentialSchemaNodes =
- this.findInstanceDataChildrenByName( parentNode, nodeName );
+ this.findInstanceDataChildrenByName( parentNode, nodeName );
if( potentialSchemaNodes.size() > 1 ) {
final StringBuilder strBuilder = new StringBuilder();
for( final DataSchemaNode potentialNodeSchema : potentialSchemaNodes ) {
strBuilder.append( " " )
- .append( potentialNodeSchema.getQName().getNamespace() )
- .append( "\n" );
+ .append( potentialNodeSchema.getQName().getNamespace() )
+ .append( "\n" );
}
throw new RestconfDocumentedException(
}
this.addKeyValue( keyValues, listNode.getDataChildByName( key ),
- uriKeyValue, mountPoint );
+ uriKeyValue, mountPoint );
i++;
}
}
if( (targetNode instanceof DataNodeContainer) ) {
final List<String> remaining = strings.subList( consumed, strings.size() );
return this.collectPathArguments( builder, remaining,
- ((DataNodeContainer) targetNode), mountPoint, returnJustMountPoint );
+ ((DataNodeContainer) targetNode), mountPoint, returnJustMountPoint );
}
return new InstanceIdWithSchemaNode( builder.toInstance(), targetNode, mountPoint );
Predicate<DataSchemaNode> filter = new Predicate<DataSchemaNode>() {
@Override
- public boolean apply( DataSchemaNode node ) {
+ public boolean apply( final DataSchemaNode node ) {
return Objects.equal( node.getQName().getNamespace(), namespace );
}
};
}
public List<DataSchemaNode> findInstanceDataChildrenByName( final DataNodeContainer container,
- final String name ) {
+ final String name ) {
Preconditions.<DataNodeContainer> checkNotNull( container );
Preconditions.<String> checkNotNull( name );
Predicate<DataSchemaNode> filter = new Predicate<DataSchemaNode>() {
@Override
- public boolean apply( DataSchemaNode node ) {
+ public boolean apply( final DataSchemaNode node ) {
return Objects.equal( node.getQName().getLocalName(), name );
}
};
}
Iterable<ChoiceNode> choiceNodes = Iterables.<ChoiceNode> filter( container.getChildNodes(),
- ChoiceNode.class );
+ ChoiceNode.class );
final Function<ChoiceNode, Set<ChoiceCaseNode>> choiceFunction =
new Function<ChoiceNode, Set<ChoiceCaseNode>>() {
};
Iterable<Set<ChoiceCaseNode>> map = Iterables.<ChoiceNode, Set<ChoiceCaseNode>> transform(
- choiceNodes, choiceFunction );
+ choiceNodes, choiceFunction );
final Iterable<ChoiceCaseNode> allCases = Iterables.<ChoiceCaseNode> concat( map );
for( final ChoiceCaseNode caze : allCases ) {
public boolean isInstantiatedDataSchema( final DataSchemaNode node ) {
return node instanceof LeafSchemaNode || node instanceof LeafListSchemaNode ||
- node instanceof ContainerSchemaNode || node instanceof ListSchemaNode;
+ node instanceof ContainerSchemaNode || node instanceof ListSchemaNode;
}
private void addKeyValue( final HashMap<QName, Object> map, final DataSchemaNode node,
- final String uriValue, final MountInstance mountPoint ) {
+ final String uriValue, final MountInstance mountPoint ) {
Preconditions.<String> checkNotNull( uriValue );
Preconditions.checkArgument( (node instanceof LeafSchemaNode) );
if( decoded == null ) {
throw new RestconfDocumentedException(
- uriValue + " from URI can't be resolved. " + additionalInfo,
- ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ uriValue + " from URI can't be resolved. " + additionalInfo,
+ ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
map.put( node.getQName(), decoded );
};
Optional<QName> namespace = FluentIterable.from( sorted )
- .transform( transform )
- .firstMatch( findFirst );
+ .transform( transform )
+ .firstMatch( findFirst );
return namespace.isPresent() ? QName.create( namespace.get(), node ) : null;
}
}
private CharSequence convertToRestconfIdentifier( final PathArgument argument,
- final DataNodeContainer node ) {
+ final DataNodeContainer node ) {
if( argument instanceof NodeIdentifier && node instanceof ContainerSchemaNode ) {
return convertToRestconfIdentifier( (NodeIdentifier) argument, (ContainerSchemaNode) node );
}
}
else if( argument != null && node != null ) {
throw new IllegalArgumentException(
- "Conversion of generic path argument is not supported" );
+ "Conversion of generic path argument is not supported" );
}
else {
throw new IllegalArgumentException( "Unhandled parameter types: "
}
private CharSequence convertToRestconfIdentifier( final NodeIdentifier argument,
- final ContainerSchemaNode node ) {
+ final ContainerSchemaNode node ) {
StringBuilder builder = new StringBuilder();
builder.append( "/" );
QName nodeType = argument.getNodeType();
}
private CharSequence convertToRestconfIdentifier( final NodeIdentifierWithPredicates argument,
- final ListSchemaNode node ) {
+ final ListSchemaNode node ) {
QName nodeType = argument.getNodeType();
final CharSequence nodeIdentifier = this.toRestconfIdentifier( nodeType );
final Map<QName, Object> keyValues = argument.getKeyValues();
import org.opendaylight.controller.sal.core.api.mount.MountInstance;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
public class StructuredData {
private final DataSchemaNode schema;
private final MountInstance mountPoint;
- public StructuredData(CompositeNode data, DataSchemaNode schema, MountInstance mountPoint) {
+ public StructuredData(final CompositeNode data, final DataSchemaNode schema, final MountInstance mountPoint) {
this.data = data;
this.schema = schema;
this.mountPoint = mountPoint;
package org.opendaylight.controller.sal.streams.websockets;
-import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
/**
* {@link WebSocketServerInitializer} is used to setup the
- * {@link ChannelPipeline} of a {@link Channel}.
+ * {@link ChannelPipeline} of a {@link io.netty.channel.Channel}.
*/
-public class WebSocketServerInitializer extends
- ChannelInitializer<SocketChannel> {
+public class WebSocketServerInitializer extends ChannelInitializer<SocketChannel> {
@Override
- protected void initChannel(SocketChannel ch) throws Exception {
+ protected void initChannel(final SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("codec-http", new HttpServerCodec());
pipeline.addLast("aggregator", new HttpObjectAggregator(65536));
<url-pattern>/*</url-pattern>
</servlet-mapping>
- <filter>
- <filter-name>CorsFilter</filter-name>
- <filter-class>org.apache.catalina.filters.CorsFilter</filter-class>
- <init-param>
- <param-name>cors.allowed.origins</param-name>
- <param-value>*</param-value>
- </init-param>
- <init-param>
- <param-name>cors.allowed.methods</param-name>
- <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
- </init-param>
- <init-param>
- <param-name>cors.allowed.headers</param-name>
- <param-value>Content-Type,X-Requested-With,accept,authorization,
- origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers</param-value>
- </init-param>
- <init-param>
- <param-name>cors.exposed.headers</param-name>
- <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
- </init-param>
- <init-param>
- <param-name>cors.support.credentials</param-name>
- <param-value>true</param-value>
- </init-param>
- <init-param>
- <param-name>cors.preflight.maxage</param-name>
- <param-value>10</param-value>
- </init-param>
- </filter>
- <filter-mapping>
- <filter-name>CorsFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
<security-constraint>
<web-resource-collection>
<web-resource-name>NB api</web-resource-name>
assertEquals( "Json token type for key " + keyName, expToken, peek );
}
- verifier.verify( jReader, keyName );;
+ verifier.verify( jReader, keyName );
}
if( !expectedMap.isEmpty() ) {
*/
package org.opendaylight.controller.sal.restconf.impl.test;
-import java.util.concurrent.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.yangtools.yang.common.RpcResult;
result = null;
}
- private DummyFuture(Builder builder) {
+ private DummyFuture(final Builder builder) {
cancel = builder.cancel;
isCancelled = builder.isCancelled;
isDone = builder.isDone;
}
@Override
- public boolean cancel(boolean mayInterruptIfRunning) {
+ public boolean cancel(final boolean mayInterruptIfRunning) {
return cancel;
}
}
@Override
- public RpcResult<TransactionStatus> get(long timeout, TimeUnit unit) throws InterruptedException,
- ExecutionException, TimeoutException {
+ public RpcResult<TransactionStatus> get(final long timeout, final TimeUnit unit) throws InterruptedException,
+ ExecutionException, TimeoutException {
return result;
}
private boolean isDone;
private RpcResult<TransactionStatus> result;
- public Builder cancel(boolean cancel) {
+ public Builder cancel(final boolean cancel) {
this.cancel = cancel;
return this;
}
- public Builder isCancelled(boolean isCancelled) {
+ public Builder isCancelled(final boolean isCancelled) {
this.isCancelled = isCancelled;
return this;
}
- public Builder isDone(boolean isDone) {
+ public Builder isDone(final boolean isDone) {
this.isDone = isDone;
return this;
}
- public Builder rpcResult(RpcResult<TransactionStatus> result) {
+ public Builder rpcResult(final RpcResult<TransactionStatus> result) {
this.result = result;
return this;
}
import java.util.Collection;
-import org.opendaylight.yangtools.yang.common.*;
+import org.opendaylight.yangtools.yang.common.RpcError;
+import org.opendaylight.yangtools.yang.common.RpcResult;
public class DummyRpcResult<T> implements RpcResult<T> {
errors = null;
}
- private DummyRpcResult(Builder<T> builder) {
+ private DummyRpcResult(final Builder<T> builder) {
isSuccessful = builder.isSuccessful;
result = builder.result;
errors = builder.errors;
private T result;
private Collection<RpcError> errors;
- public Builder<T> isSuccessful(boolean isSuccessful) {
+ public Builder<T> isSuccessful(final boolean isSuccessful) {
this.isSuccessful = isSuccessful;
return this;
}
- public Builder<T> result(T result) {
+ public Builder<T> result(final T result) {
this.result = result;
return this;
}
- public Builder<T> errors(Collection<RpcError> errors) {
+ public Builder<T> errors(final Collection<RpcError> errors) {
this.errors = errors;
return this;
}
import java.util.List;
import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.SchemaPath;
+import org.opendaylight.yangtools.yang.model.api.Status;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
public class DummyType implements TypeDefinition<DummyType> {
QName dummyQName = TestUtils.buildQName("dummy type", "simple:uri", "2012-12-17");
*/
package org.opendaylight.controller.sal.restconf.impl.test;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
import java.net.URI;
import java.net.URISyntaxException;
TestUtils.normalizeCompositeNode(prepareCnSn("normalize-node-module"), modules, schemaNodePath);
}
- private CompositeNode prepareCnSn(String namespace) {
+ private CompositeNode prepareCnSn(final String namespace) {
URI uri = null;
if (namespace != null) {
try {
package org.opendaylight.controller.sal.restconf.impl.test;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.Map.Entry;
+import java.util.Set;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import org.opendaylight.controller.sal.restconf.impl.ControllerContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
import org.opendaylight.controller.sal.restconf.impl.RestconfError;
-import org.opendaylight.controller.sal.restconf.impl.StructuredData;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
+import org.opendaylight.controller.sal.restconf.impl.StructuredData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
Map<String, String> expErrorInfo;
- public ComplexErrorInfoVerifier( Map<String, String> expErrorInfo ) {
+ public ComplexErrorInfoVerifier( final Map<String, String> expErrorInfo ) {
this.expErrorInfo = expErrorInfo;
}
@Override
- public void verifyXML( Node errorInfoNode ) {
+ public void verifyXML( final Node errorInfoNode ) {
Map<String, String> mutableExpMap = Maps.newHashMap( expErrorInfo );
NodeList childNodes = errorInfoNode.getChildNodes();
if( child instanceof Element ) {
String expValue = mutableExpMap.remove( child.getNodeName() );
assertNotNull( "Found unexpected \"error-info\" child node: " +
- child.getNodeName(), expValue );
+ child.getNodeName(), expValue );
assertEquals( "Text content for \"error-info\" child node " +
- child.getNodeName(), expValue, child.getTextContent() );
+ child.getNodeName(), expValue, child.getTextContent() );
}
}
}
@Override
- public void verifyJson( JsonElement errorInfoElement ) {
+ public void verifyJson( final JsonElement errorInfoElement ) {
assertTrue( "\"error-info\" Json element is not an Object",
errorInfoElement.isJsonObject() );
for( Entry<String,String> actual: actualErrorInfo.entrySet() ) {
String expValue = mutableExpMap.remove( actual.getKey() );
assertNotNull( "Found unexpected \"error-info\" child node: " +
- actual.getKey(), expValue );
+ actual.getKey(), expValue );
assertEquals( "Text content for \"error-info\" child node " +
- actual.getKey(), expValue, actual.getValue() );
+ actual.getKey(), expValue, actual.getValue() );
}
if( !mutableExpMap.isEmpty() ) {
String expTextContent;
- public SimpleErrorInfoVerifier( String expErrorInfo ) {
+ public SimpleErrorInfoVerifier( final String expErrorInfo ) {
this.expTextContent = expErrorInfo;
}
- void verifyContent( String actualContent ) {
+ void verifyContent( final String actualContent ) {
assertNotNull( "Actual \"error-info\" text content is null", actualContent );
assertTrue( "", actualContent.contains( expTextContent ) );
}
@Override
- public void verifyXML( Node errorInfoNode ) {
+ public void verifyXML( final Node errorInfoNode ) {
verifyContent( errorInfoNode.getTextContent() );
}
@Override
- public void verifyJson( JsonElement errorInfoElement ) {
+ public void verifyJson( final JsonElement errorInfoElement ) {
verifyContent( errorInfoElement.getAsString() );
}
}
NamespaceContext nsContext = new NamespaceContext() {
@Override
- public Iterator getPrefixes( String namespaceURI ) {
+ public Iterator<?> getPrefixes( final String namespaceURI ) {
return null;
}
@Override
- public String getPrefix( String namespaceURI ) {
+ public String getPrefix( final String namespaceURI ) {
return null;
}
@Override
- public String getNamespaceURI( String prefix ) {
+ public String getNamespaceURI( final String prefix ) {
return "ietf-restconf".equals( prefix ) ? Draft02.RestConfModule.NAMESPACE : null;
}
};
protected Application configure() {
ResourceConfig resourceConfig = new ResourceConfig();
resourceConfig = resourceConfig.registerInstances( mockRestConf, StructuredDataToXmlProvider.INSTANCE,
- StructuredDataToJsonProvider.INSTANCE );
+ StructuredDataToJsonProvider.INSTANCE );
resourceConfig.registerClasses( RestconfDocumentedExceptionMapper.class );
return resourceConfig;
}
- void stageMockEx( RestconfDocumentedException ex ) {
+ void stageMockEx( final RestconfDocumentedException ex ) {
reset( mockRestConf );
when( mockRestConf.readOperationalData( any( String.class ) ) ).thenThrow( ex );
}
- void testJsonResponse( RestconfDocumentedException ex, Status expStatus, ErrorType expErrorType,
- ErrorTag expErrorTag, String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void testJsonResponse( final RestconfDocumentedException ex, final Status expStatus, final ErrorType expErrorType,
+ final ErrorTag expErrorTag, final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
stageMockEx( ex );
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_JSON, expStatus );
verifyJsonResponseBody( stream, expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
@Test
public void testToJsonResponseWithMessageOnly() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error" ), Status.INTERNAL_SERVER_ERROR,
- ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
// To test verification code
-// String json =
-// "{ errors: {" +
-// " error: [{" +
-// " error-tag : \"operation-failed\"" +
-// " ,error-type : \"application\"" +
-// " ,error-message : \"An error occurred\"" +
-// " ,error-info : {" +
-// " session-id: \"123\"" +
-// " ,address: \"1.2.3.4\"" +
-// " }" +
-// " }]" +
-// " }" +
-// "}";
-//
-// verifyJsonResponseBody( new java.io.StringBufferInputStream(json ), ErrorType.APPLICATION,
-// ErrorTag.OPERATION_FAILED, "An error occurred", null,
-// com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
+ // String json =
+ // "{ errors: {" +
+ // " error: [{" +
+ // " error-tag : \"operation-failed\"" +
+ // " ,error-type : \"application\"" +
+ // " ,error-message : \"An error occurred\"" +
+ // " ,error-info : {" +
+ // " session-id: \"123\"" +
+ // " ,address: \"1.2.3.4\"" +
+ // " }" +
+ // " }]" +
+ // " }" +
+ // "}";
+ //
+ // verifyJsonResponseBody( new java.io.StringBufferInputStream(json ), ErrorType.APPLICATION,
+ // ErrorTag.OPERATION_FAILED, "An error occurred", null,
+ // com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
}
@Test
public void testToJsonResponseWithInUseErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.IN_USE ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.IN_USE, "mock error", null, null );
+ ErrorTag.IN_USE ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.IN_USE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithInvalidValueErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.RPC,
- ErrorTag.INVALID_VALUE ),
- Status.BAD_REQUEST, ErrorType.RPC,
- ErrorTag.INVALID_VALUE, "mock error", null, null );
+ ErrorTag.INVALID_VALUE ),
+ Status.BAD_REQUEST, ErrorType.RPC,
+ ErrorTag.INVALID_VALUE, "mock error", null, null );
}
public void testToJsonResponseWithTooBigErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG ),
- Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG, "mock error", null, null );
+ ErrorTag.TOO_BIG ),
+ Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
+ ErrorTag.TOO_BIG, "mock error", null, null );
}
public void testToJsonResponseWithMissingAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.MISSING_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithBadAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.BAD_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.UNKNOWN_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithBadElementErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT ),
- Status.BAD_REQUEST,
- ErrorType.PROTOCOL, ErrorTag.BAD_ELEMENT, "mock error", null, null );
+ ErrorTag.BAD_ELEMENT ),
+ Status.BAD_REQUEST,
+ ErrorType.PROTOCOL, ErrorTag.BAD_ELEMENT, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownElementErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
+ ErrorTag.UNKNOWN_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownNamespaceErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
+ ErrorTag.UNKNOWN_NAMESPACE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithMalformedMessageErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
+ ErrorTag.MALFORMED_MESSAGE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithAccessDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED ),
- Status.FORBIDDEN, ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED, "mock error", null, null );
+ ErrorTag.ACCESS_DENIED ),
+ Status.FORBIDDEN, ErrorType.PROTOCOL,
+ ErrorTag.ACCESS_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithLockDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED, "mock error", null, null );
+ ErrorTag.LOCK_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.LOCK_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithResourceDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED, "mock error", null, null );
+ ErrorTag.RESOURCE_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.RESOURCE_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithRollbackFailedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
+ ErrorTag.ROLLBACK_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithDataExistsErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS, "mock error", null, null );
+ ErrorTag.DATA_EXISTS ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_EXISTS, "mock error", null, null );
}
@Test
public void testToJsonResponseWithDataMissingErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING, "mock error", null, null );
+ ErrorTag.DATA_MISSING ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_MISSING, "mock error", null, null );
}
@Test
public void testToJsonResponseWithOperationNotSupportedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED ),
- Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
+ ErrorTag.OPERATION_NOT_SUPPORTED ),
+ Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithOperationFailedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorTag.OPERATION_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_FAILED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithPartialOperationErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
+ ErrorTag.PARTIAL_OPERATION ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
}
@Test
public void testToJsonResponseWithErrorAppTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag" ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag" ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
}
@Test
assertEquals( "\"error\" Json array element length", 2, arrayElement.size() );
verifyJsonErrorNode( arrayElement.get( 0 ), ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
- "mock error1", null, null );
+ "mock error1", null, null );
verifyJsonErrorNode( arrayElement.get( 1 ), ErrorType.RPC, ErrorTag.ROLLBACK_FAILED,
- "mock error2", null, null );
+ "mock error2", null, null );
}
@Test
String errorInfo = "<address>1.2.3.4</address> <session-id>123</session-id>";
testJsonResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag", errorInfo ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
- new ComplexErrorInfoVerifier( ImmutableMap.of(
- "session-id", "123", "address", "1.2.3.4" ) ) );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag", errorInfo ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
+ new ComplexErrorInfoVerifier( ImmutableMap.of(
+ "session-id", "123", "address", "1.2.3.4" ) ) );
}
@Test
Exception cause = new Exception( "mock exception cause" );
testJsonResponse( new RestconfDocumentedException( "mock error", cause ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
- ErrorTag.OPERATION_FAILED, "mock error", null,
- new SimpleErrorInfoVerifier( cause.getMessage() ) );
+ Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
+ ErrorTag.OPERATION_FAILED, "mock error", null,
+ new SimpleErrorInfoVerifier( cause.getMessage() ) );
}
- void testXMLResponse( RestconfDocumentedException ex, Status expStatus, ErrorType expErrorType,
- ErrorTag expErrorTag, String expErrorMessage,
- String expErrorAppTag, ErrorInfoVerifier errorInfoVerifier ) throws Exception
+ void testXMLResponse( final RestconfDocumentedException ex, final Status expStatus, final ErrorType expErrorType,
+ final ErrorTag expErrorTag, final String expErrorMessage,
+ final String expErrorAppTag, final ErrorInfoVerifier errorInfoVerifier ) throws Exception
{
stageMockEx( ex );
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_XML, expStatus );
verifyXMLResponseBody( stream, expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
@Test
public void testToXMLResponseWithMessageOnly() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error" ), Status.INTERNAL_SERVER_ERROR,
- ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
// To test verification code
-// String xml =
-// "<errors xmlns=\"urn:ietf:params:xml:ns:yang:ietf-restconf\">"+
-// " <error>" +
-// " <error-type>application</error-type>"+
-// " <error-tag>operation-failed</error-tag>"+
-// " <error-message>An error occurred</error-message>"+
-// " <error-info>" +
-// " <session-id>123</session-id>" +
-// " <address>1.2.3.4</address>" +
-// " </error-info>" +
-// " </error>" +
-// "</errors>";
-//
-// verifyXMLResponseBody( new java.io.StringBufferInputStream(xml), ErrorType.APPLICATION,
-// ErrorTag.OPERATION_FAILED, "An error occurred", null,
-// com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
+ // String xml =
+ // "<errors xmlns=\"urn:ietf:params:xml:ns:yang:ietf-restconf\">"+
+ // " <error>" +
+ // " <error-type>application</error-type>"+
+ // " <error-tag>operation-failed</error-tag>"+
+ // " <error-message>An error occurred</error-message>"+
+ // " <error-info>" +
+ // " <session-id>123</session-id>" +
+ // " <address>1.2.3.4</address>" +
+ // " </error-info>" +
+ // " </error>" +
+ // "</errors>";
+ //
+ // verifyXMLResponseBody( new java.io.StringBufferInputStream(xml), ErrorType.APPLICATION,
+ // ErrorTag.OPERATION_FAILED, "An error occurred", null,
+ // com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
}
@Test
public void testToXMLResponseWithInUseErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.IN_USE ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.IN_USE, "mock error", null, null );
+ ErrorTag.IN_USE ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.IN_USE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithInvalidValueErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.RPC,
- ErrorTag.INVALID_VALUE ),
- Status.BAD_REQUEST, ErrorType.RPC,
- ErrorTag.INVALID_VALUE, "mock error", null, null );
+ ErrorTag.INVALID_VALUE ),
+ Status.BAD_REQUEST, ErrorType.RPC,
+ ErrorTag.INVALID_VALUE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithTooBigErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG ),
- Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG, "mock error", null, null );
+ ErrorTag.TOO_BIG ),
+ Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
+ ErrorTag.TOO_BIG, "mock error", null, null );
}
@Test
public void testToXMLResponseWithMissingAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.MISSING_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithBadAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.BAD_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.UNKNOWN_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithBadElementErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT, "mock error", null, null );
+ ErrorTag.BAD_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ELEMENT, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownElementErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
+ ErrorTag.UNKNOWN_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownNamespaceErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
+ ErrorTag.UNKNOWN_NAMESPACE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithMalformedMessageErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
+ ErrorTag.MALFORMED_MESSAGE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithAccessDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED ),
- Status.FORBIDDEN, ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED, "mock error", null, null );
+ ErrorTag.ACCESS_DENIED ),
+ Status.FORBIDDEN, ErrorType.PROTOCOL,
+ ErrorTag.ACCESS_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithLockDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED, "mock error", null, null );
+ ErrorTag.LOCK_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.LOCK_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithResourceDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED, "mock error", null, null );
+ ErrorTag.RESOURCE_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.RESOURCE_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithRollbackFailedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
+ ErrorTag.ROLLBACK_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithDataExistsErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS, "mock error", null, null );
+ ErrorTag.DATA_EXISTS ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_EXISTS, "mock error", null, null );
}
@Test
public void testToXMLResponseWithDataMissingErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING, "mock error", null, null );
+ ErrorTag.DATA_MISSING ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_MISSING, "mock error", null, null );
}
@Test
public void testToXMLResponseWithOperationNotSupportedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED ),
- Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
+ ErrorTag.OPERATION_NOT_SUPPORTED ),
+ Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithOperationFailedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorTag.OPERATION_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_FAILED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithPartialOperationErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
+ ErrorTag.PARTIAL_OPERATION ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
}
@Test
public void testToXMLResponseWithErrorAppTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag" ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag" ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
}
@Test
String errorInfo = "<address>1.2.3.4</address> <session-id>123</session-id>";
testXMLResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag", errorInfo ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
- new ComplexErrorInfoVerifier( ImmutableMap.of(
- "session-id", "123", "address", "1.2.3.4" ) ) );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag", errorInfo ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
+ new ComplexErrorInfoVerifier( ImmutableMap.of(
+ "session-id", "123", "address", "1.2.3.4" ) ) );
}
@Test
Exception cause = new Exception( "mock exception cause" );
testXMLResponse( new RestconfDocumentedException( "mock error", cause ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
- ErrorTag.OPERATION_FAILED, "mock error", null,
- new SimpleErrorInfoVerifier( cause.getMessage() ) );
+ Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
+ ErrorTag.OPERATION_FAILED, "mock error", null,
+ new SimpleErrorInfoVerifier( cause.getMessage() ) );
}
@Test
NodeList children = getXMLErrorList( doc, 2 );
verifyXMLErrorNode( children.item( 0 ), ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
- "mock error1", null, null );
+ "mock error1", null, null );
verifyXMLErrorNode( children.item( 1 ), ErrorType.RPC, ErrorTag.ROLLBACK_FAILED,
- "mock error2", null, null );
+ "mock error2", null, null );
}
@Test
stageMockEx( new RestconfDocumentedException( "mock error" ) );
Response resp = target("/operational/foo")
- .request().header( "Accept", MediaType.APPLICATION_JSON ).get();
+ .request().header( "Accept", MediaType.APPLICATION_JSON ).get();
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_JSON,
- Status.INTERNAL_SERVER_ERROR );
+ Status.INTERNAL_SERVER_ERROR );
verifyJsonResponseBody( stream, ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error",
- null, null );
+ null, null );
}
@Test
// The StructuredDataToJsonProvider should throw a RestconfDocumentedException with no data
when( mockRestConf.readOperationalData( any( String.class ) ) )
- .thenReturn( new StructuredData( null, null, null ) );
+ .thenReturn( new StructuredData( null, null, null ) );
Response resp = target("/operational/foo").request( MediaType.APPLICATION_JSON ).get();
verifyResponse( resp, MediaType.TEXT_PLAIN, Status.NOT_FOUND );
}
- InputStream verifyResponse( Response resp, String expMediaType, Status expStatus ) {
+ InputStream verifyResponse( final Response resp, final String expMediaType, final Status expStatus ) {
assertEquals( "getMediaType", MediaType.valueOf( expMediaType ), resp.getMediaType() );
assertEquals( "getStatus", expStatus.getStatusCode(), resp.getStatus() );
return stream;
}
- void verifyJsonResponseBody( InputStream stream, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void verifyJsonResponseBody( final InputStream stream, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
JsonArray arrayElement = parseJsonErrorArrayElement( stream );
assertEquals( "\"error\" Json array element length", 1, arrayElement.size() );
verifyJsonErrorNode( arrayElement.get( 0 ), expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
- private JsonArray parseJsonErrorArrayElement( InputStream stream ) throws IOException {
+ private JsonArray parseJsonErrorArrayElement( final InputStream stream ) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ByteStreams.copy( stream, bos );
try {
rootElement = parser.parse(
- new InputStreamReader( new ByteArrayInputStream( bos.toByteArray() ) ) );
+ new InputStreamReader( new ByteArrayInputStream( bos.toByteArray() ) ) );
}
catch( Exception e ) {
throw new IllegalArgumentException( "Invalid JSON response:\n" + bos.toString(), e );
JsonElement errorListElement = errorListEntrySet.iterator().next().getValue();
assertEquals( "\"errors\" child Json element name", "error",
- errorListEntrySet.iterator().next().getKey() );
+ errorListEntrySet.iterator().next().getKey() );
assertTrue( "\"error\" Json element is not an Array", errorListElement.isJsonArray() );
return errorListElement.getAsJsonArray();
}
- void verifyJsonErrorNode( JsonElement errorEntryElement, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) {
+ void verifyJsonErrorNode( final JsonElement errorEntryElement, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) {
JsonElement errorInfoElement = null;
Map<String, String> actualErrorInfo = null;
}
else {
assertTrue( "\"error\" leaf Json element " + leafName +
- " is not a Primitive", leafElement.isJsonPrimitive() );
+ " is not a Primitive", leafElement.isJsonPrimitive() );
leafMap.put( leafName, leafElement.getAsString() );
}
}
}
- void verifyOptionalJsonLeaf( String actualValue, String expValue, String tagName ) {
+ void verifyOptionalJsonLeaf( final String actualValue, final String expValue, final String tagName ) {
if( expValue != null ) {
assertEquals( tagName, expValue, actualValue );
}
}
}
- void verifyXMLResponseBody( InputStream stream, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier )
- throws Exception {
+ void verifyXMLResponseBody( final InputStream stream, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier )
+ throws Exception {
Document doc = parseXMLDocument( stream );
NodeList children = getXMLErrorList( doc, 1 );
verifyXMLErrorNode( children.item( 0 ), expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
- private Document parseXMLDocument( InputStream stream ) throws IOException {
+ private Document parseXMLDocument( final InputStream stream ) throws IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
factory.setCoalescing(true);
return doc;
}
- void verifyXMLErrorNode( Node errorNode, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void verifyXMLErrorNode( final Node errorNode, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
String errorType = (String)ERROR_TYPE.evaluate( errorNode, XPathConstants.STRING );
assertEquals( "error-type", expErrorType.getErrorTypeTag(), errorType );
}
}
- void verifyOptionalXMLLeaf( Node fromNode, XPathExpression xpath, String expValue,
- String tagName ) throws Exception {
+ void verifyOptionalXMLLeaf( final Node fromNode, final XPathExpression xpath, final String expValue,
+ final String tagName ) throws Exception {
if( expValue != null ) {
String actual = (String)xpath.evaluate( fromNode, XPathConstants.STRING );
assertEquals( tagName, expValue, actual );
}
else {
assertNull( "Found unexpected \"error\" leaf entry for: " + tagName,
- xpath.evaluate( fromNode, XPathConstants.NODE ) );
+ xpath.evaluate( fromNode, XPathConstants.NODE ) );
}
}
- NodeList getXMLErrorList( Node fromNode, int count ) throws Exception {
+ NodeList getXMLErrorList( final Node fromNode, final int count ) throws Exception {
NodeList errorList = (NodeList)ERROR_LIST.evaluate( fromNode, XPathConstants.NODESET );
assertNotNull( "Root errors node is empty", errorList );
assertEquals( "Root errors node child count", count, errorList.getLength() );
*/
package org.opendaylight.controller.sal.restconf.impl.test.structures;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import org.junit.Test;
*/
package org.opendaylight.controller.sal.restconf.impl.test.structures;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Set;
public class Lst extends YangElement {
- private Set<LstItem> lstItems;
+ private final Set<LstItem> lstItems;
- public Lst(String name) {
+ public Lst(final String name) {
super(name);
lstItems = new HashSet<>();
}
- public Lst addLstItem(LstItem lstItem) {
+ public Lst addLstItem(final LstItem lstItem) {
lstItem.setLstName(name);
while (this.lstItems.contains(lstItem)) {
lstItem.incNumOfEqualItems();
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
result = prime * result + ((lstItems == null) ? 0 : lstItems.hashCode());
return result;
}
-
}
*/
package org.opendaylight.controller.sal.rest.doc;
+import java.util.Collection;
+import java.util.Collections;
+
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.impl.ApiDocGenerator;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Collection;
-import java.util.Collections;
-
-
-public class DocProvider implements BundleActivator,
- ServiceTrackerCustomizer<Broker, Broker>,
- Provider,
- AutoCloseable {
-
- private Logger _logger = LoggerFactory.getLogger(DocProvider.class);
-
- private ServiceTracker<Broker, Broker> brokerServiceTracker;
- private BundleContext bundleContext;
- private Broker.ProviderSession session;
-
- @Override
- public void close() throws Exception {
- stop(bundleContext);
- }
-
- @Override
- public void onSessionInitiated(Broker.ProviderSession providerSession) {
- SchemaService schemaService = providerSession.getService(SchemaService.class);
- ApiDocGenerator.getInstance().setSchemaService(schemaService);
-
- _logger.debug("Restconf API Explorer started");
-
- }
-
- @Override
- public Collection<ProviderFunctionality> getProviderFunctionality() {
- return Collections.emptySet();
- }
-
- @Override
- public void start(BundleContext context) throws Exception {
- bundleContext = context;
- brokerServiceTracker = new ServiceTracker(context, Broker.class, this);
- brokerServiceTracker.open();
- }
-
- @Override
- public void stop(BundleContext context) throws Exception {
- if (brokerServiceTracker != null)
- brokerServiceTracker.close();
-
- if (session != null)
- session.close();
- }
-
- @Override
- public Broker addingService(ServiceReference<Broker> reference) {
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- return broker;
- }
-
- @Override
- public void modifiedService(ServiceReference<Broker> reference, Broker service) {
- if (session != null)
- session.close();
-
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- }
-
- @Override
- public void removedService(ServiceReference<Broker> reference, Broker service) {
- bundleContext.ungetService(reference);
- }
+public class DocProvider implements BundleActivator, ServiceTrackerCustomizer<Broker, Broker>, Provider, AutoCloseable {
+
+ private static final Logger _logger = LoggerFactory.getLogger(DocProvider.class);
+
+ private ServiceTracker<Broker, Broker> brokerServiceTracker;
+ private BundleContext bundleContext;
+ private Broker.ProviderSession session;
+
+ @Override
+ public void close() throws Exception {
+ stop(bundleContext);
+ }
+
+ @Override
+ public void onSessionInitiated(final Broker.ProviderSession providerSession) {
+ SchemaService schemaService = providerSession.getService(SchemaService.class);
+ ApiDocGenerator.getInstance().setSchemaService(schemaService);
+
+ _logger.debug("Restconf API Explorer started");
+ }
+
+ @Override
+ public Collection<ProviderFunctionality> getProviderFunctionality() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public void start(final BundleContext context) throws Exception {
+ bundleContext = context;
+ brokerServiceTracker = new ServiceTracker<>(context, Broker.class, this);
+ brokerServiceTracker.open();
+ }
+
+ @Override
+ public void stop(final BundleContext context) throws Exception {
+ if (brokerServiceTracker != null) {
+ brokerServiceTracker.close();
+ }
+
+ if (session != null) {
+ session.close();
+ }
+ }
+
+ @Override
+ public Broker addingService(final ServiceReference<Broker> reference) {
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ return broker;
+ }
+
+ @Override
+ public void modifiedService(final ServiceReference<Broker> reference, final Broker service) {
+ if (session != null) {
+ session.close();
+ }
+
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ }
+
+ @Override
+ public void removedService(final ServiceReference<Broker> reference, final Broker service) {
+ bundleContext.ungetService(reference);
+ }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
-import com.google.common.base.Preconditions;
+import java.io.IOException;
+import java.net.URI;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.ws.rs.core.UriInfo;
+
import org.json.JSONException;
import org.json.JSONObject;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder;
-import org.opendaylight.controller.sal.rest.doc.swagger.*;
+import org.opendaylight.controller.sal.rest.doc.swagger.Api;
+import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
+import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
+import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
+import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
+import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-import java.net.URI;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
+import com.google.common.base.Preconditions;
/**
* This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation.
*/
public class ApiDocGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
-
- private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
- private ObjectMapper mapper = new ObjectMapper();
- private final ModelGenerator jsonConverter = new ModelGenerator();
-
- private SchemaService schemaService;
-
- private final String API_VERSION = "1.0.0";
- private final String SWAGGER_VERSION = "1.2";
- private final String RESTCONF_CONTEXT_ROOT = "restconf";
- private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
-
- //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
- private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
-
- private ApiDocGenerator(){
- mapper.registerModule(new JsonOrgModule());
- mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
- }
-
- /**
- * Returns singleton instance
- * @return
- */
- public static ApiDocGenerator getInstance() {
- return INSTANCE;
- }
-
- /**
- *
- * @param schemaService
- */
- public void setSchemaService(SchemaService schemaService) {
- this.schemaService = schemaService;
- }
- /**
- *
- * @param uriInfo
- * @return list of modules converted to swagger compliant resource list.
- */
- public ResourceList getResourceListing(UriInfo uriInfo) {
-
- Preconditions.checkState(schemaService != null);
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
-
- Set<Module> modules = schemaContext.getModules();
-
- ResourceList resourceList = new ResourceList();
- resourceList.setApiVersion(API_VERSION);
- resourceList.setSwaggerVersion(SWAGGER_VERSION);
-
- List<Resource> resources = new ArrayList<>(modules.size());
- _logger.info("Modules found [{}]", modules.size());
-
- for (Module module : modules) {
- Resource resource = new Resource();
- String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
-
- _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
- ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
-
- if (doc != null) {
- URI uri = uriInfo.getRequestUriBuilder().
- path(generateCacheKey(module.getName(), revisionString)).
- build();
-
- resource.setPath(uri.toASCIIString());
- resources.add(resource);
- } else {
- _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
- }
- }
+ private static final Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
- resourceList.setApis(resources);
+ private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
+ private final ObjectMapper mapper = new ObjectMapper();
+ private final ModelGenerator jsonConverter = new ModelGenerator();
- return resourceList;
- }
+ private SchemaService schemaService;
- public ApiDeclaration getApiDeclaration(String module, String revision, UriInfo uriInfo) {
+ private static final String API_VERSION = "1.0.0";
+ private static final String SWAGGER_VERSION = "1.2";
+ private static final String RESTCONF_CONTEXT_ROOT = "restconf";
+ private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
- //Lookup cache
- String cacheKey = generateCacheKey(module, revision);
+ //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
+ private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
- if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
- _logger.debug("Serving from cache for {}", cacheKey);
- return MODULE_DOC_CACHE.get(cacheKey);
+ private ApiDocGenerator(){
+ mapper.registerModule(new JsonOrgModule());
+ mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
}
- Date rev = null;
- try {
- rev = SIMPLE_DATE_FORMAT.parse(revision);
- } catch (ParseException e) {
- throw new IllegalArgumentException(e);
+ /**
+ * Returns singleton instance
+ * @return
+ */
+ public static ApiDocGenerator getInstance() {
+ return INSTANCE;
}
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
+ /**
+ *
+ * @param schemaService
+ */
+ public void setSchemaService(final SchemaService schemaService) {
+ this.schemaService = schemaService;
+ }
+ /**
+ *
+ * @param uriInfo
+ * @return list of modules converted to swagger compliant resource list.
+ */
+ public ResourceList getResourceListing(final UriInfo uriInfo) {
+
+ Preconditions.checkState(schemaService != null);
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Set<Module> modules = schemaContext.getModules();
+
+ ResourceList resourceList = new ResourceList();
+ resourceList.setApiVersion(API_VERSION);
+ resourceList.setSwaggerVersion(SWAGGER_VERSION);
+
+ List<Resource> resources = new ArrayList<>(modules.size());
+ _logger.info("Modules found [{}]", modules.size());
+
+ for (Module module : modules) {
+ Resource resource = new Resource();
+ String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
+
+ _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
+ ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
+
+ if (doc != null) {
+ URI uri = uriInfo.getRequestUriBuilder().
+ path(generateCacheKey(module.getName(), revisionString)).
+ build();
+
+ resource.setPath(uri.toASCIIString());
+ resources.add(resource);
+ } else {
+ _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
+ }
+ }
+
+ resourceList.setApis(resources);
+
+ return resourceList;
+ }
+
+ public ApiDeclaration getApiDeclaration(final String module, final String revision, final UriInfo uriInfo) {
+
+ //Lookup cache
+ String cacheKey = generateCacheKey(module, revision);
- Module m = schemaContext.findModuleByName(module, rev);
- Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+ if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
+ _logger.debug("Serving from cache for {}", cacheKey);
+ return MODULE_DOC_CACHE.get(cacheKey);
+ }
+
+ Date rev = null;
+ try {
+ rev = SIMPLE_DATE_FORMAT.parse(revision);
+ } catch (ParseException e) {
+ throw new IllegalArgumentException(e);
+ }
- String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Module m = schemaContext.findModuleByName(module, rev);
+ Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+
+ String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
.append("://")
.append(uriInfo.getBaseUri().getHost())
.append(":")
.append(RESTCONF_CONTEXT_ROOT)
.toString();
- ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
- MODULE_DOC_CACHE.put(cacheKey, doc);
- return doc;
- }
+ ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
+ MODULE_DOC_CACHE.put(cacheKey, doc);
+ return doc;
+ }
- public ApiDeclaration getSwaggerDocSpec(Module m, String basePath) {
- ApiDeclaration doc = new ApiDeclaration();
- doc.setApiVersion(API_VERSION);
- doc.setSwaggerVersion(SWAGGER_VERSION);
- doc.setBasePath(basePath);
- doc.setProduces(Arrays.asList("application/json", "application/xml"));
+ public ApiDeclaration getSwaggerDocSpec(final Module m, final String basePath) {
+ ApiDeclaration doc = new ApiDeclaration();
+ doc.setApiVersion(API_VERSION);
+ doc.setSwaggerVersion(SWAGGER_VERSION);
+ doc.setBasePath(basePath);
+ doc.setProduces(Arrays.asList("application/json", "application/xml"));
- List<Api> apis = new ArrayList<Api>();
+ List<Api> apis = new ArrayList<Api>();
- Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
- _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
- for (DataSchemaNode node : dataSchemaNodes) {
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
+ _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
+ for (DataSchemaNode node : dataSchemaNodes) {
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
+ _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
- List<Parameter> pathParams = null;
- if (node.isConfiguration()) {
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/config/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, true);
+ List<Parameter> pathParams = null;
+ if (node.isConfiguration()) {
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/config/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, true);
+ }
+
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/operational/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, false);
+ }
}
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/operational/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, false);
- }
- }
+ Set<RpcDefinition> rpcs = m.getRpcs();
+ for (RpcDefinition rpcDefinition : rpcs) {
+ String resourcePath = "/operations/" + m.getName() + ":";
+ addRpcs(rpcDefinition, apis, resourcePath);
- Set<RpcDefinition> rpcs = m.getRpcs();
- for (RpcDefinition rpcDefinition : rpcs) {
- String resourcePath = "/operations/" + m.getName() + ":";
- addRpcs(rpcDefinition, apis, resourcePath);
+ }
+ _logger.debug("Number of APIs found [{}]", apis.size());
+ doc.setApis(apis);
+ JSONObject models = null;
+
+ try {
+ models = jsonConverter.convertToJsonSchema(m);
+ doc.setModels(models);
+ _logger.debug(mapper.writeValueAsString(doc));
+ } catch (IOException | JSONException e) {
+ e.printStackTrace();
+ }
+ return doc;
}
- _logger.debug("Number of APIs found [{}]", apis.size());
- doc.setApis(apis);
- JSONObject models = null;
-
- try {
- models = jsonConverter.convertToJsonSchema(m);
- doc.setModels(models);
- _logger.debug(mapper.writeValueAsString(doc));
- } catch (IOException | JSONException e) {
- e.printStackTrace();
+
+ private String generateCacheKey(final Module m) {
+ return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
}
- return doc;
- }
-
- private String generateCacheKey(Module m) {
- return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
- }
-
- private String generateCacheKey(String module, String revision) {
- return module + "," + revision;
- }
-
- private void addApis(DataSchemaNode node,
- List<Api> apis,
- String parentPath,
- List<Parameter> parentPathParams,
- boolean addConfigApi) {
-
- Api api = new Api();
- List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
-
- String resourcePath = parentPath + createPath(node, pathParams) + "/";
- _logger.debug("Adding path: [{}]", resourcePath);
- api.setPath(resourcePath);
- api.setOperations(operations(node, pathParams, addConfigApi));
- apis.add(api);
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- DataNodeContainer schemaNode = (DataNodeContainer) node;
- Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
-
- for (DataSchemaNode childNode : dataSchemaNodes) {
- addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
- }
+ private String generateCacheKey(final String module, final String revision) {
+ return module + "," + revision;
}
- }
+ private void addApis(final DataSchemaNode node,
+ final List<Api> apis,
+ final String parentPath,
+ final List<Parameter> parentPathParams,
+ final boolean addConfigApi) {
+
+ Api api = new Api();
+ List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
+
+ String resourcePath = parentPath + createPath(node, pathParams) + "/";
+ _logger.debug("Adding path: [{}]", resourcePath);
+ api.setPath(resourcePath);
+ api.setOperations(operations(node, pathParams, addConfigApi));
+ apis.add(api);
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ DataNodeContainer schemaNode = (DataNodeContainer) node;
+ Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
+
+ for (DataSchemaNode childNode : dataSchemaNodes) {
+ addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
+ }
+ }
- private void addRpcs(RpcDefinition rpcDefn, List<Api> apis, String parentPath) {
- Api rpc = new Api();
- String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
- rpc.setPath(resourcePath);
+ }
+
+ private void addRpcs(final RpcDefinition rpcDefn, final List<Api> apis, final String parentPath) {
+ Api rpc = new Api();
+ String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
+ rpc.setPath(resourcePath);
- Operation operationSpec = new Operation();
- operationSpec.setMethod("POST");
- operationSpec.setNotes(rpcDefn.getDescription());
- operationSpec.setNickname(rpcDefn.getQName().getLocalName());
- rpc.setOperations(Arrays.asList(operationSpec));
+ Operation operationSpec = new Operation();
+ operationSpec.setMethod("POST");
+ operationSpec.setNotes(rpcDefn.getDescription());
+ operationSpec.setNickname(rpcDefn.getQName().getLocalName());
+ rpc.setOperations(Arrays.asList(operationSpec));
- apis.add(rpc);
- }
+ apis.add(rpc);
+ }
- /**
- * @param node
- * @param pathParams
- * @return
- */
- private List<Operation> operations(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig) {
- List<Operation> operations = new ArrayList<>();
+ /**
+ * @param node
+ * @param pathParams
+ * @return
+ */
+ private List<Operation> operations(final DataSchemaNode node, final List<Parameter> pathParams, final boolean isConfig) {
+ List<Operation> operations = new ArrayList<>();
- OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
- operations.add(getBuilder.pathParams(pathParams).build());
+ OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
+ operations.add(getBuilder.pathParams(pathParams).build());
- if (isConfig) {
- OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
- operations.add(postBuilder.pathParams(pathParams).build());
+ if (isConfig) {
+ OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
+ operations.add(postBuilder.pathParams(pathParams).build());
- OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
- operations.add(putBuilder.pathParams(pathParams).build());
+ OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
+ operations.add(putBuilder.pathParams(pathParams).build());
- OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
- operations.add(deleteBuilder.pathParams(pathParams).build());
+ OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
+ operations.add(deleteBuilder.pathParams(pathParams).build());
+ }
+ return operations;
}
- return operations;
- }
-
- private String createPath(final DataSchemaNode schemaNode, List<Parameter> pathParams) {
- ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
- StringBuilder path = new StringBuilder();
- QName _qName = schemaNode.getQName();
- String localName = _qName.getLocalName();
- path.append(localName);
-
- if ((schemaNode instanceof ListSchemaNode)) {
- final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
- for (final QName listKey : listKeys) {
- {
- DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
- pathListParams.add(((LeafSchemaNode) _dataChildByName));
-
- String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
- path.append(pathParamIdentifier);
-
- Parameter pathParam = new Parameter();
- pathParam.setName(listKey.getLocalName());
- pathParam.setDescription(_dataChildByName.getDescription());
- pathParam.setType("string");
- pathParam.setParamType("path");
-
- pathParams.add(pathParam);
+
+ private String createPath(final DataSchemaNode schemaNode, final List<Parameter> pathParams) {
+ ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
+ StringBuilder path = new StringBuilder();
+ QName _qName = schemaNode.getQName();
+ String localName = _qName.getLocalName();
+ path.append(localName);
+
+ if ((schemaNode instanceof ListSchemaNode)) {
+ final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
+ for (final QName listKey : listKeys) {
+ {
+ DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
+ pathListParams.add(((LeafSchemaNode) _dataChildByName));
+
+ String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
+ path.append(pathParamIdentifier);
+
+ Parameter pathParam = new Parameter();
+ pathParam.setName(listKey.getLocalName());
+ pathParam.setDescription(_dataChildByName.getDescription());
+ pathParam.setType("string");
+ pathParam.setParamType("path");
+
+ pathParams.add(pathParam);
+ }
+ }
}
- }
+ return path.toString();
}
- return path.toString();
- }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
-import org.opendaylight.yangtools.yang.model.api.*;
-import org.opendaylight.yangtools.yang.model.api.type.*;
+import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceCaseNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceNode;
+import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition.Bit;
import org.opendaylight.yangtools.yang.model.api.type.EnumTypeDefinition.EnumPair;
-import org.opendaylight.yangtools.yang.model.util.*;
+import org.opendaylight.yangtools.yang.model.api.type.IdentityrefTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.LengthConstraint;
+import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
+import org.opendaylight.yangtools.yang.model.util.BooleanType;
+import org.opendaylight.yangtools.yang.model.util.Decimal64;
+import org.opendaylight.yangtools.yang.model.util.EnumerationType;
+import org.opendaylight.yangtools.yang.model.util.ExtendedType;
+import org.opendaylight.yangtools.yang.model.util.Int16;
+import org.opendaylight.yangtools.yang.model.util.Int32;
+import org.opendaylight.yangtools.yang.model.util.Int64;
+import org.opendaylight.yangtools.yang.model.util.Int8;
+import org.opendaylight.yangtools.yang.model.util.StringType;
+import org.opendaylight.yangtools.yang.model.util.Uint16;
+import org.opendaylight.yangtools.yang.model.util.Uint32;
+import org.opendaylight.yangtools.yang.model.util.Uint64;
+import org.opendaylight.yangtools.yang.model.util.Uint8;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.*;
-
/**
* Generates JSON Schema for data defined in Yang
*/
public class ModelGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
-
- private static final String BASE_64 = "base64";
- private static final String BINARY_ENCODING_KEY = "binaryEncoding";
- private static final String MEDIA_KEY = "media";
- private static final String ONE_OF_KEY = "oneOf";
- private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
- private static final String MAX_ITEMS = "maxItems";
- private static final String MIN_ITEMS = "minItems";
- private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
- private static final String SCHEMA_KEY = "$schema";
- private static final String MAX_LENGTH_KEY = "maxLength";
- private static final String MIN_LENGTH_KEY = "minLength";
- private static final String REQUIRED_KEY = "required";
- private static final String REF_KEY = "$ref";
- private static final String ITEMS_KEY = "items";
- private static final String TYPE_KEY = "type";
- private static final String PROPERTIES_KEY = "properties";
- private static final String DESCRIPTION_KEY = "description";
- private static final String OBJECT_TYPE = "object";
- private static final String ARRAY_TYPE = "array";
- private static final String ENUM = "enum";
- private static final String INTEGER = "integer";
- private static final String NUMBER = "number";
- private static final String BOOLEAN = "boolean";
- private static final String STRING = "string";
-
- private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
-
- static {
- Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
- tempMap1.put(StringType.class , STRING);
- tempMap1.put(BooleanType.class , BOOLEAN);
- tempMap1.put(Int8.class , INTEGER);
- tempMap1.put(Int16.class , INTEGER);
- tempMap1.put(Int32.class , INTEGER);
- tempMap1.put(Int64.class , INTEGER);
- tempMap1.put(Uint16.class , INTEGER);
- tempMap1.put(Uint32.class , INTEGER);
- tempMap1.put(Uint64.class , INTEGER);
- tempMap1.put(Uint8.class , INTEGER);
- tempMap1.put(Decimal64.class , NUMBER);
- tempMap1.put(EnumerationType.class , ENUM);
- //TODO: Binary type
-
- YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
- }
-
- public ModelGenerator(){
- }
-
- public JSONObject convertToJsonSchema(Module module) throws IOException, JSONException {
- JSONObject models = new JSONObject();
- processContainers(module, models);
- processRPCs(module, models);
-
- return models;
- }
-
-
-
- private void processContainers(Module module, JSONObject models) throws IOException, JSONException {
-
- String moduleName = module.getName();
- Set<DataSchemaNode> childNodes = module.getChildNodes();
-
- for(DataSchemaNode childNode : childNodes){
- JSONObject moduleJSON=null;
- String filename = childNode.getQName().getLocalName();
+ private static final Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
+
+ private static final String BASE_64 = "base64";
+ private static final String BINARY_ENCODING_KEY = "binaryEncoding";
+ private static final String MEDIA_KEY = "media";
+ private static final String ONE_OF_KEY = "oneOf";
+ private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
+ private static final String MAX_ITEMS = "maxItems";
+ private static final String MIN_ITEMS = "minItems";
+ private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
+ private static final String SCHEMA_KEY = "$schema";
+ private static final String MAX_LENGTH_KEY = "maxLength";
+ private static final String MIN_LENGTH_KEY = "minLength";
+ private static final String REQUIRED_KEY = "required";
+ private static final String REF_KEY = "$ref";
+ private static final String ITEMS_KEY = "items";
+ private static final String TYPE_KEY = "type";
+ private static final String PROPERTIES_KEY = "properties";
+ private static final String DESCRIPTION_KEY = "description";
+ private static final String OBJECT_TYPE = "object";
+ private static final String ARRAY_TYPE = "array";
+ private static final String ENUM = "enum";
+ private static final String INTEGER = "integer";
+ private static final String NUMBER = "number";
+ private static final String BOOLEAN = "boolean";
+ private static final String STRING = "string";
+
+ private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
+
+ static {
+ Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
+ tempMap1.put(StringType.class , STRING);
+ tempMap1.put(BooleanType.class , BOOLEAN);
+ tempMap1.put(Int8.class , INTEGER);
+ tempMap1.put(Int16.class , INTEGER);
+ tempMap1.put(Int32.class , INTEGER);
+ tempMap1.put(Int64.class , INTEGER);
+ tempMap1.put(Uint16.class , INTEGER);
+ tempMap1.put(Uint32.class , INTEGER);
+ tempMap1.put(Uint64.class , INTEGER);
+ tempMap1.put(Uint8.class , INTEGER);
+ tempMap1.put(Decimal64.class , NUMBER);
+ tempMap1.put(EnumerationType.class , ENUM);
+ //TODO: Binary type
+
+ YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
+ }
+
+ public ModelGenerator(){
+ }
+
+ public JSONObject convertToJsonSchema(final Module module) throws IOException, JSONException {
+ JSONObject models = new JSONObject();
+ processContainers(module, models);
+ processRPCs(module, models);
+
+ return models;
+ }
+
+
+
+ private void processContainers(final Module module, final JSONObject models) throws IOException, JSONException {
+
+ String moduleName = module.getName();
+ Set<DataSchemaNode> childNodes = module.getChildNodes();
+
+ for(DataSchemaNode childNode : childNodes){
+ JSONObject moduleJSON=null;
+ String filename = childNode.getQName().getLocalName();
/*
* For every container in the module
*/
- if(childNode instanceof ContainerSchemaNode) {
- moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
- }
-
- if(moduleJSON!=null) {
- _logger.debug("Adding model for [{}]", filename);
- moduleJSON.put("id", filename);
- models.put(filename, moduleJSON);
- }
- }
+ if(childNode instanceof ContainerSchemaNode) {
+ moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
+ }
+
+ if(moduleJSON!=null) {
+ _logger.debug("Adding model for [{}]", filename);
+ moduleJSON.put("id", filename);
+ models.put(filename, moduleJSON);
+ }
+ }
- }
-
-
- /**
- * Process the RPCs for a Module
- * Spits out a file each of the name <rpcName>-input.json
- * and <rpcName>-output.json for each RPC that contains
- * input & output elements
- *
- * @param module
- * @throws JSONException
- * @throws IOException
- */
- private void processRPCs(Module module, JSONObject models) throws JSONException, IOException {
-
- Set<RpcDefinition> rpcs = module.getRpcs();
- String moduleName = module.getName();
- for(RpcDefinition rpc: rpcs) {
-
- ContainerSchemaNode input = rpc.getInput();
- if(input!=null) {
- JSONObject inputJSON = processContainer(input, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-input";
- inputJSON.put("id", filename);
- //writeToFile(filename, inputJSON.toString(2), moduleName);
- models.put(filename, inputJSON);
- }
-
- ContainerSchemaNode output = rpc.getOutput();
- if(output!=null) {
- JSONObject outputJSON = processContainer(output, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-output";
- outputJSON.put("id", filename);
- models.put(filename, outputJSON);
- }
}
- }
-
-
- /**
- * Processes the container node and populates the moduleJSON
- *
- * @param container
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt, JSONObject models) throws JSONException, IOException{
- JSONObject moduleJSON = getSchemaTemplate();
- if(addSchemaStmt) {
- moduleJSON = getSchemaTemplate();
- } else {
- moduleJSON = new JSONObject();
+
+
+ /**
+ * Process the RPCs for a Module
+ * Spits out a file each of the name <rpcName>-input.json
+ * and <rpcName>-output.json for each RPC that contains
+ * input & output elements
+ *
+ * @param module
+ * @throws JSONException
+ * @throws IOException
+ */
+ private void processRPCs(final Module module, final JSONObject models) throws JSONException, IOException {
+
+ Set<RpcDefinition> rpcs = module.getRpcs();
+ String moduleName = module.getName();
+ for(RpcDefinition rpc: rpcs) {
+
+ ContainerSchemaNode input = rpc.getInput();
+ if(input!=null) {
+ JSONObject inputJSON = processContainer(input, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-input";
+ inputJSON.put("id", filename);
+ //writeToFile(filename, inputJSON.toString(2), moduleName);
+ models.put(filename, inputJSON);
+ }
+
+ ContainerSchemaNode output = rpc.getOutput();
+ if(output!=null) {
+ JSONObject outputJSON = processContainer(output, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-output";
+ outputJSON.put("id", filename);
+ models.put(filename, outputJSON);
+ }
+ }
}
- moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
-
- String containerDescription = container.getDescription();
- moduleJSON.put(DESCRIPTION_KEY, containerDescription);
-
- Set<DataSchemaNode> containerChildren = ((ContainerSchemaNode)container).getChildNodes();
- JSONObject properties = processChildren(containerChildren, moduleName, models);
- moduleJSON.put(PROPERTIES_KEY, properties);
- return moduleJSON;
- }
-
- /**
- * Processes the nodes
- * @param nodes
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChildren(Set<DataSchemaNode> nodes, String moduleName, JSONObject models) throws JSONException, IOException {
-
- JSONObject properties = new JSONObject();
-
- for(DataSchemaNode node : nodes){
- String name = node.getQName().getLocalName();
- JSONObject property = null;
- if(node instanceof LeafSchemaNode) {
- property = processLeafNode((LeafSchemaNode)node);
- } else if (node instanceof ListSchemaNode) {
- property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
-
- } else if (node instanceof LeafListSchemaNode) {
- property = processLeafListNode((LeafListSchemaNode)node);
-
- } else if (node instanceof ChoiceNode) {
- property = processChoiceNode((ChoiceNode)node, moduleName, models);
-
- } else if (node instanceof AnyXmlSchemaNode) {
- property = processAnyXMLNode((AnyXmlSchemaNode)node);
-
- } else if (node instanceof ContainerSchemaNode) {
- property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
-
- } else {
- throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
- }
-
- property.putOpt(DESCRIPTION_KEY, node.getDescription());
- properties.put(name, property);
+
+
+ /**
+ * Processes the container node and populates the moduleJSON
+ *
+ * @param container
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processContainer(final ContainerSchemaNode container, final String moduleName, final boolean addSchemaStmt, final JSONObject models) throws JSONException, IOException{
+ JSONObject moduleJSON = getSchemaTemplate();
+ if(addSchemaStmt) {
+ moduleJSON = getSchemaTemplate();
+ } else {
+ moduleJSON = new JSONObject();
+ }
+ moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
+
+ String containerDescription = container.getDescription();
+ moduleJSON.put(DESCRIPTION_KEY, containerDescription);
+
+ Set<DataSchemaNode> containerChildren = container.getChildNodes();
+ JSONObject properties = processChildren(containerChildren, moduleName, models);
+ moduleJSON.put(PROPERTIES_KEY, properties);
+ return moduleJSON;
}
- return properties;
- }
-
- /**
- *
- * @param listNode
- * @throws JSONException
- */
- private JSONObject processLeafListNode(LeafListSchemaNode listNode) throws JSONException {
- JSONObject props = new JSONObject();
- props.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject itemsVal = new JSONObject();
- processTypeDef(listNode.getType(), itemsVal);
- props.put(ITEMS_KEY, itemsVal);
-
- ConstraintDefinition constraints = listNode.getConstraints();
- processConstraints(constraints, props);
-
- return props;
- }
-
- /**
- *
- * @param choiceNode
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChoiceNode(ChoiceNode choiceNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<ChoiceCaseNode> cases = choiceNode.getCases();
-
- JSONArray choiceProps = new JSONArray();
- for(ChoiceCaseNode choiceCase: cases) {
- String choiceName = choiceCase.getQName().getLocalName();
- JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
- JSONObject choiceObj = new JSONObject();
- choiceObj.put(choiceName, choiceProp);
- choiceObj.put(TYPE_KEY, OBJECT_TYPE);
- choiceProps.put(choiceObj);
+
+ /**
+ * Processes the nodes
+ * @param nodes
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChildren(final Set<DataSchemaNode> nodes, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ JSONObject properties = new JSONObject();
+
+ for(DataSchemaNode node : nodes){
+ String name = node.getQName().getLocalName();
+ JSONObject property = null;
+ if(node instanceof LeafSchemaNode) {
+ property = processLeafNode((LeafSchemaNode)node);
+ } else if (node instanceof ListSchemaNode) {
+ property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
+
+ } else if (node instanceof LeafListSchemaNode) {
+ property = processLeafListNode((LeafListSchemaNode)node);
+
+ } else if (node instanceof ChoiceNode) {
+ property = processChoiceNode((ChoiceNode)node, moduleName, models);
+
+ } else if (node instanceof AnyXmlSchemaNode) {
+ property = processAnyXMLNode((AnyXmlSchemaNode)node);
+
+ } else if (node instanceof ContainerSchemaNode) {
+ property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
+
+ } else {
+ throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
+ }
+
+ property.putOpt(DESCRIPTION_KEY, node.getDescription());
+ properties.put(name, property);
+ }
+ return properties;
}
- JSONObject oneOfProps = new JSONObject();
- oneOfProps.put(ONE_OF_KEY, choiceProps);
- oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+ /**
+ *
+ * @param listNode
+ * @throws JSONException
+ */
+ private JSONObject processLeafListNode(final LeafListSchemaNode listNode) throws JSONException {
+ JSONObject props = new JSONObject();
+ props.put(TYPE_KEY, ARRAY_TYPE);
- return oneOfProps;
- }
+ JSONObject itemsVal = new JSONObject();
+ processTypeDef(listNode.getType(), itemsVal);
+ props.put(ITEMS_KEY, itemsVal);
+ ConstraintDefinition constraints = listNode.getConstraints();
+ processConstraints(constraints, props);
- /**
- *
- * @param constraints
- * @param props
- * @throws JSONException
- */
- private void processConstraints(ConstraintDefinition constraints, JSONObject props) throws JSONException {
- boolean isMandatory = constraints.isMandatory();
- props.put(REQUIRED_KEY, isMandatory);
+ return props;
+ }
- Integer minElements = constraints.getMinElements();
- Integer maxElements = constraints.getMaxElements();
- if(minElements !=null) {
- props.put(MIN_ITEMS, minElements);
+ /**
+ *
+ * @param choiceNode
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChoiceNode(final ChoiceNode choiceNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<ChoiceCaseNode> cases = choiceNode.getCases();
+
+ JSONArray choiceProps = new JSONArray();
+ for(ChoiceCaseNode choiceCase: cases) {
+ String choiceName = choiceCase.getQName().getLocalName();
+ JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
+ JSONObject choiceObj = new JSONObject();
+ choiceObj.put(choiceName, choiceProp);
+ choiceObj.put(TYPE_KEY, OBJECT_TYPE);
+ choiceProps.put(choiceObj);
+ }
+
+ JSONObject oneOfProps = new JSONObject();
+ oneOfProps.put(ONE_OF_KEY, choiceProps);
+ oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+
+ return oneOfProps;
}
- if(maxElements !=null) {
- props.put(MAX_ITEMS, maxElements);
+
+
+ /**
+ *
+ * @param constraints
+ * @param props
+ * @throws JSONException
+ */
+ private void processConstraints(final ConstraintDefinition constraints, final JSONObject props) throws JSONException {
+ boolean isMandatory = constraints.isMandatory();
+ props.put(REQUIRED_KEY, isMandatory);
+
+ Integer minElements = constraints.getMinElements();
+ Integer maxElements = constraints.getMaxElements();
+ if(minElements !=null) {
+ props.put(MIN_ITEMS, minElements);
+ }
+ if(maxElements !=null) {
+ props.put(MAX_ITEMS, maxElements);
+ }
}
- }
-
- /**
- * Parses a ListSchema node.
- *
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
- * must be in a separate JSON schema file. Hence, we have to write
- * some properties to a new file, while continuing to process the rest.
- *
- * @param listNode
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<DataSchemaNode> listChildren = listNode.getChildNodes();
- String fileName = listNode.getQName().getLocalName();
-
- JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
- JSONObject childSchema = getSchemaTemplate();
- childSchema.put(TYPE_KEY, OBJECT_TYPE);
- childSchema.put(PROPERTIES_KEY, childSchemaProperties);
+
+ /**
+ * Parses a ListSchema node.
+ *
+ * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
+ * must be in a separate JSON schema file. Hence, we have to write
+ * some properties to a new file, while continuing to process the rest.
+ *
+ * @param listNode
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processListSchemaNode(final ListSchemaNode listNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<DataSchemaNode> listChildren = listNode.getChildNodes();
+ String fileName = listNode.getQName().getLocalName();
+
+ JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
+ JSONObject childSchema = getSchemaTemplate();
+ childSchema.put(TYPE_KEY, OBJECT_TYPE);
+ childSchema.put(PROPERTIES_KEY, childSchemaProperties);
/*
* Due to a limitation of the RAML--->JAX-RS tool, sub-properties
* must be in a separate JSON schema file. Hence, we have to write
* some properties to a new file, while continuing to process the rest.
*/
- //writeToFile(fileName, childSchema.toString(2), moduleName);
- childSchema.put("id", fileName);
- models.put(fileName, childSchema);
-
-
- JSONObject listNodeProperties = new JSONObject();
- listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject items = new JSONObject();
- items.put(REF_KEY,fileName );
- listNodeProperties.put(ITEMS_KEY, items);
-
- return listNodeProperties;
-
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processLeafNode(LeafSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
- processTypeDef(leafNode.getType(), property);
-
- return property;
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processAnyXMLNode(AnyXmlSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
-
- return property;
- }
-
- /**
- * @param property
- * @throws JSONException
- */
- private void processTypeDef(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
-
- if(leafTypeDef instanceof ExtendedType){
- processExtendedType(leafTypeDef, property);
- } else if (leafTypeDef instanceof EnumerationType) {
- processEnumType((EnumerationType)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof BitsTypeDefinition) {
- processBitsType((BitsTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof UnionTypeDefinition) {
- processUnionType((UnionTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
- property.putOpt(TYPE_KEY, "object");
- } else if (leafTypeDef instanceof BinaryTypeDefinition) {
- processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
- } else {
- //System.out.println("In else: " + leafTypeDef.getClass());
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
- if(jsonType==null) {
- jsonType = "object";
- }
- property.putOpt(TYPE_KEY, jsonType);
+ //writeToFile(fileName, childSchema.toString(2), moduleName);
+ childSchema.put("id", fileName);
+ models.put(fileName, childSchema);
+
+
+ JSONObject listNodeProperties = new JSONObject();
+ listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
+
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY,fileName );
+ listNodeProperties.put(ITEMS_KEY, items);
+
+ return listNodeProperties;
+
}
- }
-
- /**
- *
- * @param leafTypeDef
- * @param property
- * @throws JSONException
- */
- private void processExtendedType(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
- Object leafBaseType = leafTypeDef.getBaseType();
- if(leafBaseType instanceof ExtendedType){
- //recursively process an extended type until we hit a base type
- processExtendedType((TypeDefinition<?>)leafBaseType, property);
- } else {
- List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
- for(LengthConstraint lengthConstraint: lengthConstraints) {
- Number min = lengthConstraint.getMin();
- Number max = lengthConstraint.getMax();
- property.putOpt(MIN_LENGTH_KEY, min);
- property.putOpt(MAX_LENGTH_KEY, max);
- }
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
- property.putOpt(TYPE_KEY,jsonType );
+
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processLeafNode(final LeafSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
+
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+
+ processConstraints(leafNode.getConstraints(), property);
+ processTypeDef(leafNode.getType(), property);
+
+ return property;
}
- }
-
- /*
- *
- */
- private void processBinaryType(BinaryTypeDefinition binaryType, JSONObject property) throws JSONException {
- property.put(TYPE_KEY, STRING);
- JSONObject media = new JSONObject();
- media.put(BINARY_ENCODING_KEY, BASE_64);
- property.put(MEDIA_KEY, media);
- }
-
- /**
- *
- * @param enumLeafType
- * @param property
- * @throws JSONException
- */
- private void processEnumType(EnumerationType enumLeafType, JSONObject property) throws JSONException {
- List<EnumPair> enumPairs = enumLeafType.getValues();
- List<String> enumNames = new ArrayList<String>();
- for(EnumPair enumPair: enumPairs) {
- enumNames.add(enumPair.getName());
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processAnyXMLNode(final AnyXmlSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
+
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+
+ processConstraints(leafNode.getConstraints(), property);
+
+ return property;
}
- property.putOpt(ENUM, new JSONArray(enumNames));
- }
-
- /**
- *
- * @param bitsType
- * @param property
- * @throws JSONException
- */
- private void processBitsType(BitsTypeDefinition bitsType, JSONObject property) throws JSONException{
- property.put(TYPE_KEY, ARRAY_TYPE);
- property.put(MIN_ITEMS, 0);
- property.put(UNIQUE_ITEMS_KEY, true);
- JSONArray enumValues = new JSONArray();
-
- List<Bit> bits = bitsType.getBits();
- for(Bit bit: bits) {
- enumValues.put(bit.getName());
+
+ /**
+ * @param property
+ * @throws JSONException
+ */
+ private void processTypeDef(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+
+ if(leafTypeDef instanceof ExtendedType){
+ processExtendedType(leafTypeDef, property);
+ } else if (leafTypeDef instanceof EnumerationType) {
+ processEnumType((EnumerationType)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof BitsTypeDefinition) {
+ processBitsType((BitsTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof UnionTypeDefinition) {
+ processUnionType((UnionTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
+ property.putOpt(TYPE_KEY, "object");
+ } else if (leafTypeDef instanceof BinaryTypeDefinition) {
+ processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
+ } else {
+ //System.out.println("In else: " + leafTypeDef.getClass());
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
+ if(jsonType==null) {
+ jsonType = "object";
+ }
+ property.putOpt(TYPE_KEY, jsonType);
+ }
}
- JSONObject itemsValue = new JSONObject();
- itemsValue.put(ENUM, enumValues);
- property.put(ITEMS_KEY, itemsValue);
- }
-
-
- /**
- *
- * @param unionType
- * @param property
- * @throws JSONException
- */
- private void processUnionType(UnionTypeDefinition unionType, JSONObject property) throws JSONException{
-
- List<TypeDefinition<?>> unionTypes = unionType.getTypes();
- JSONArray unionArray = new JSONArray();
- for(TypeDefinition<?> typeDef: unionTypes) {
- unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+
+ /**
+ *
+ * @param leafTypeDef
+ * @param property
+ * @throws JSONException
+ */
+ private void processExtendedType(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+ Object leafBaseType = leafTypeDef.getBaseType();
+ if(leafBaseType instanceof ExtendedType){
+ //recursively process an extended type until we hit a base type
+ processExtendedType((TypeDefinition<?>)leafBaseType, property);
+ } else {
+ List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
+ for(LengthConstraint lengthConstraint: lengthConstraints) {
+ Number min = lengthConstraint.getMin();
+ Number max = lengthConstraint.getMax();
+ property.putOpt(MIN_LENGTH_KEY, min);
+ property.putOpt(MAX_LENGTH_KEY, max);
+ }
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
+ property.putOpt(TYPE_KEY,jsonType );
+ }
+
+ }
+
+ /*
+ *
+ */
+ private void processBinaryType(final BinaryTypeDefinition binaryType, final JSONObject property) throws JSONException {
+ property.put(TYPE_KEY, STRING);
+ JSONObject media = new JSONObject();
+ media.put(BINARY_ENCODING_KEY, BASE_64);
+ property.put(MEDIA_KEY, media);
+ }
+
+ /**
+ *
+ * @param enumLeafType
+ * @param property
+ * @throws JSONException
+ */
+ private void processEnumType(final EnumerationType enumLeafType, final JSONObject property) throws JSONException {
+ List<EnumPair> enumPairs = enumLeafType.getValues();
+ List<String> enumNames = new ArrayList<String>();
+ for(EnumPair enumPair: enumPairs) {
+ enumNames.add(enumPair.getName());
+ }
+ property.putOpt(ENUM, new JSONArray(enumNames));
+ }
+
+ /**
+ *
+ * @param bitsType
+ * @param property
+ * @throws JSONException
+ */
+ private void processBitsType(final BitsTypeDefinition bitsType, final JSONObject property) throws JSONException{
+ property.put(TYPE_KEY, ARRAY_TYPE);
+ property.put(MIN_ITEMS, 0);
+ property.put(UNIQUE_ITEMS_KEY, true);
+ JSONArray enumValues = new JSONArray();
+
+ List<Bit> bits = bitsType.getBits();
+ for(Bit bit: bits) {
+ enumValues.put(bit.getName());
+ }
+ JSONObject itemsValue = new JSONObject();
+ itemsValue.put(ENUM, enumValues);
+ property.put(ITEMS_KEY, itemsValue);
+ }
+
+
+ /**
+ *
+ * @param unionType
+ * @param property
+ * @throws JSONException
+ */
+ private void processUnionType(final UnionTypeDefinition unionType, final JSONObject property) throws JSONException{
+
+ List<TypeDefinition<?>> unionTypes = unionType.getTypes();
+ JSONArray unionArray = new JSONArray();
+ for(TypeDefinition<?> typeDef: unionTypes) {
+ unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+ }
+ property.put(TYPE_KEY, unionArray);
+ }
+
+
+ /**
+ * Helper method to generate a pre-filled
+ * JSON schema object.
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject getSchemaTemplate() throws JSONException {
+ JSONObject schemaJSON = new JSONObject();
+ schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
+
+ return schemaJSON;
}
- property.put(TYPE_KEY, unionArray);
- }
-
-
- /**
- * Helper method to generate a pre-filled
- * JSON schema object.
- * @return
- * @throws JSONException
- */
- private JSONObject getSchemaTemplate() throws JSONException {
- JSONObject schemaJSON = new JSONObject();
- schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
-
- return schemaJSON;
- }
}
/**
-* Generated file
+ * Generated file
-* Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
-* Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
-* Generated at: Wed Feb 05 11:31:30 CET 2014
-*
-* Do not modify this file unless it is present under src/main directory
-*/
+ * Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
+ * Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
+ * Generated at: Wed Feb 05 11:31:30 CET 2014
+ *
+ * Do not modify this file unless it is present under src/main directory
+ */
package org.opendaylight.controller.config.yang.config.kitchen_service.impl;
-import org.opendaylight.controller.config.yang.config.kitchen_service.impl.AbstractKitchenServiceModule;
import org.opendaylight.controller.sample.kitchen.api.EggsType;
import org.opendaylight.controller.sample.kitchen.api.KitchenService;
import org.opendaylight.controller.sample.kitchen.impl.KitchenServiceImpl;
import org.slf4j.LoggerFactory;
/**
-*
-*/
+ *
+ */
public final class KitchenServiceModule extends AbstractKitchenServiceModule {
private static final Logger log = LoggerFactory.getLogger(KitchenServiceModule.class);
- public KitchenServiceModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ public KitchenServiceModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
super(identifier, dependencyResolver);
}
- public KitchenServiceModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- KitchenServiceModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public KitchenServiceModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final KitchenServiceModule oldModule, final java.lang.AutoCloseable oldInstance) {
super(identifier, dependencyResolver, oldModule, oldInstance);
}
}
@Override
- public boolean makeBreakfast( EggsType eggs, Class<? extends ToastType> toast, int toastDoneness ) {
+ public boolean makeBreakfast( final EggsType eggs, final Class<? extends ToastType> toast, final int toastDoneness ) {
return kitchenService.makeBreakfast( eggs, toast, toastDoneness );
}
}
/**
-* Generated file
+ * Generated file
-* Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
-* Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
-* Generated at: Wed Feb 05 11:31:30 CET 2014
-*
-* Do not modify this file unless it is present under src/main directory
-*/
+ * Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
+ * Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
+ * Generated at: Wed Feb 05 11:31:30 CET 2014
+ *
+ * Do not modify this file unless it is present under src/main directory
+ */
package org.opendaylight.controller.config.yang.config.kitchen_service.impl;
-import org.opendaylight.controller.config.yang.config.kitchen_service.impl.AbstractKitchenServiceModuleFactory;
-
/**
-*
-*/
-public class KitchenServiceModuleFactory extends AbstractKitchenServiceModuleFactory
-{
-
+ *
+ */
+public class KitchenServiceModuleFactory extends AbstractKitchenServiceModuleFactory {
}
<module>opendaylight/commons/opendaylight</module>
<module>opendaylight/commons/parent</module>
<module>opendaylight/commons/logback_settings</module>
+ <module>opendaylight/commons/filter-valve</module>
<!-- Karaf Distribution -->
<module>features/base</module>