<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>base-features</artifactId>
- <packaging>kar</packaging>
+ <packaging>pom</packaging>
<name>${project.artifactId}</name>
<description>Base Features POM</description>
<properties>
</resource>
</resources>
<plugins>
- <plugin>
- <groupId>org.apache.karaf.tooling</groupId>
- <artifactId>karaf-maven-plugin</artifactId>
- <version>${karaf.version}</version>
- <extensions>true</extensions>
- <executions>
- <execution>
- <id>features-create-kar</id>
- <goals>
- <goal>features-create-kar</goal>
- </goals>
- <configuration>
- <featuresFile>${project.build.directory}/classes/${features.file}</featuresFile>
- </configuration>
- </execution>
- </executions>
- <!-- There is no useful configuration for the kar mojo. The features-generate-descriptor mojo configuration may be useful -->
- </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<bundle start="true" start-level="35">mvn:orbit/org.apache.juli.extras/7.0.32.v201211081135</bundle>
<bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.api/7.0.32.v201211081135</bundle>
<bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.util/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">wrap:mvn:virgomirror/org.eclipse.jdt.core.compiler.batch/3.8.0.I20120518-2145</bundle>
</feature>
<feature name="base-spring" description="Opendaylight Spring Support" version="${spring.version}">
<bundle>mvn:org.ow2.asm/asm-all/${asm.version}</bundle>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../opendaylight/commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>controller-features</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <description>Features POM</description>
+ <properties>
+ <features.file>features.xml</features.file>
+ </properties>
+ <build>
+ <resources>
+ <resource>
+ <filtering>true</filtering>
+ <directory>src/main/resources</directory>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>filter</id>
+ <goals>
+ <goal>resources</goal>
+ </goals>
+ <phase>generate-resources</phase>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>attach-artifacts</id>
+ <goals>
+ <goal>attach-artifact</goal>
+ </goals>
+ <phase>package</phase>
+ <configuration>
+ <artifacts>
+ <artifact>
+ <file>${project.build.directory}/classes/${features.file}</file>
+ <type>xml</type>
+ <classifier>features</classifier>
+ </artifact>
+ </artifacts>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<features name="controller-${project.version}" xmlns="http://karaf.apache.org/xmlns/features/v1.2.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://karaf.apache.org/xmlns/features/v1.2.0 http://karaf.apache.org/xmlns/features/v1.2.0">
+
+ <feature name="odl-hosttracker" description="Controller Service: Host Tracker">
+ <feature>odl-clustering</feature>
+ <feature>odl-managers</feature>
+ <feature>odl-sal</feature>
+ <bundle>mvn:org.opendaylight.controller/hosttracker/${hosttracker.api.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/hosttracker.implementation/${hosttracker.implementation.version}</bundle>
+ </feature>
+ <feature name="odl-sal" description="Service Abstraction Layer"
+ version="${sal.version}">
+ <feature>base-felix-dm</feature>
+ <bundle start="true" start-level="35">mvn:org.apache.commons/commons-lang3/${commons.lang.version}</bundle>
+ <bundle>mvn:org.osgi/org.osgi.compendium/${osgi.compendium.version}</bundle>
+ <bundle>mvn:org.apache.felix/org.apache.felix.dependencymanager/${felix.dependencymanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/sal/${sal.version}</bundle>
+ <!-- The SAL Implementation doesn't follow API versioning, should be revisited in the bundle -->
+ <bundle>mvn:org.opendaylight.controller/sal.implementation/${sal.implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/sal.networkconfiguration/${sal.networkconfiguration.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/sal.networkconfiguration.implementation/${sal.networkconfiguration.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/sal.connection/${sal.connection.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/sal.connection.implementation/${sal.connection.version}</bundle>
+ </feature>
+ <feature name="odl-clustering" description="Controller Service: Clustering">
+ <feature>transaction</feature>
+ <feature>base-felix-dm</feature>
+ <feature>base-eclipselink-persistence</feature>
+ <feature>odl-sal</feature>
+ <bundle>mvn:org.opendaylight.controller/clustering.services/${clustering.services.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/clustering.services-implementation/${clustering.services_implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/clustering.stub/${clustering.stub.version}</bundle>
+ </feature>
+ <feature name="odl-legacy-configuration">
+ <feature>odl-sal</feature>
+ <bundle>mvn:org.opendaylight.controller/configuration/${configuration.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/configuration.implementation/${configuration.implementation.version}</bundle>
+ </feature>
+ <feature name="odl-configuration" description="Controller Service: Configuration">
+ <!-- org.opendaylight.controller.config.yangjmxgenerator is missing -->
+ <bundle>mvn:org.opendaylight.controller/config-api/${config.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-manager/${config.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-netconf-connector/${netconf.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-persister-api/${config.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-persister-directory-xml-adapter/${config.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-persister-file-xml-adapter/${config.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/config-persister-impl/${netconf.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/yang-jmx-generator/${yang-jmx-generator.version}</bundle>
+ </feature>
+ <feature name="odl-managers" description="Big boss">
+ <feature>odl-legacy-configuration</feature>
+ <feature>base-spring-security</feature>
+ <feature>base-felix-dm</feature>
+ <feature>odl-clustering</feature>
+ <bundle>mvn:org.opendaylight.controller.thirdparty/net.sf.jung2/2.0.1</bundle>
+ <bundle>mvn:org.opendaylight.controller/appauth/${appauth.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/hosttracker/${hosttracker.api.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/hosttracker.implementation/${hosttracker.implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/switchmanager/${switchmanager.api.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/switchmanager.implementation/${switchmanager.implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/statisticsmanager/${statisticsmanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/statisticsmanager.implementation/${statisticsmanager.implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/forwardingrulesmanager/${forwardingrulesmanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/forwardingrulesmanager.implementation/${forwardingrulesmanager.implementation.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/usermanager/${usermanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/usermanager.implementation/${usermanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/containermanager/${containermanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/containermanager.implementation/${containermanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/topologymanager/${topologymanager.version}</bundle>
+ <bundle>mvn:org.opendaylight.controller/forwarding.staticrouting</bundle>
+ <bundle>mvn:org.opendaylight.controller/routing.dijkstra_implementation</bundle>
+ <bundle>mvn:org.opendaylight.controller/connectionmanager</bundle>
+ <bundle>mvn:org.opendaylight.controller/connectionmanager.implementation</bundle>
+ </feature>
+</features>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../opendaylight</relativePath>
+ </parent>
+ <artifactId>filter-valve</artifactId>
+ <packaging>bundle</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>equinoxSDK381</groupId>
+ <artifactId>javax.servlet</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>orbit</groupId>
+ <artifactId>org.apache.catalina</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Fragment-Host>org.eclipse.gemini.web.tomcat</Fragment-Host>
+ <Import-Package>javax.servlet,
+ org.apache.catalina,
+ org.apache.catalina.connector,
+ org.apache.catalina.valves,
+ org.slf4j,
+ javax.xml.bind,
+ javax.xml.bind.annotation,
+ org.apache.commons.io,
+ com.google.common.base,
+ com.google.common.collect</Import-Package>
+ </instructions>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Objects;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.apache.catalina.valves.ValveBase;
+import org.apache.commons.io.FileUtils;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Host;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Parser;
+import org.opendaylight.controller.filtervalve.cors.model.FilterProcessor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Valve that allows adding filters per context. Each context can have its own filter definitions.
+ * Main purpose is to allow externalizing security filters from application bundles to a single
+ * file per OSGi distribution.
+ */
+public class FilterValve extends ValveBase {
+ private static final Logger logger = LoggerFactory.getLogger(FilterValve.class);
+ private FilterProcessor filterProcessor;
+
+ public void invoke(final Request request, final Response response) throws IOException, ServletException {
+ if (filterProcessor == null) {
+ throw new IllegalStateException("Initialization error");
+ }
+
+ FilterChain nextValveFilterChain = new FilterChain() {
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse resp) throws IOException, ServletException {
+ boolean reqEquals = Objects.equals(request, req);
+ boolean respEquals = Objects.equals(response, resp);
+ if (reqEquals == false || respEquals == false) {
+ logger.error("Illegal change was detected by valve - request {} or " +
+ "response {} was replaced by a filter. This is not supported by this valve",
+ reqEquals, respEquals);
+ throw new IllegalStateException("Request or response was replaced in a filter");
+ }
+ getNext().invoke(request, response);
+ }
+ };
+ filterProcessor.process(request, response, nextValveFilterChain);
+ }
+
+ /**
+ * Called by Tomcat when configurationFile attribute is set.
+ * @param fileName path to xml file containing valve configuration
+ * @throws Exception
+ */
+ @SuppressWarnings("UnusedDeclaration")
+ public void setConfigurationFile(String fileName) throws Exception {
+ File configurationFile = new File(fileName);
+ if (configurationFile.exists() == false || configurationFile.canRead() == false) {
+ throw new IllegalArgumentException(
+ "Cannot read 'configurationFile' of this valve defined in tomcat-server.xml: " + fileName);
+ }
+ String xmlContent;
+ try {
+ xmlContent = FileUtils.readFileToString(configurationFile);
+ } catch (IOException e) {
+ logger.error("Cannot read {} of this valve defined in tomcat-server.xml", fileName, e);
+ throw new IllegalStateException("Cannot read " + fileName, e);
+ }
+ Host host;
+ try {
+ host = Parser.parse(xmlContent, fileName);
+ } catch (Exception e) {
+ logger.error("Cannot parse {} of this valve defined in tomcat-server.xml", fileName, e);
+ throw new IllegalStateException("Error while parsing " + fileName, e);
+ }
+ filterProcessor = new FilterProcessor(host);
+ }
+
+ /**
+ * @see org.apache.catalina.valves.ValveBase#getInfo()
+ */
+ public String getInfo() {
+ return getClass() + "/1.0";
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+import static java.lang.String.format;
+
+import com.google.common.base.Optional;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import org.opendaylight.controller.filtervalve.cors.model.UrlMatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@XmlRootElement
+public class Context {
+ private static final Logger logger = LoggerFactory.getLogger(Context.class);
+
+ private String path;
+ private List<Filter> filters = new ArrayList<>();
+ private List<FilterMapping> filterMappings = new ArrayList<>();
+ private boolean initialized;
+ private UrlMatcher<Filter> urlMatcher;
+
+
+ public synchronized void initialize(String fileName, Map<String, Filter> namesToTemplates) {
+ checkState(initialized == false, "Already initialized");
+ Map<String, Filter> namesToFilters = new HashMap<>();
+ for (Filter filter : filters) {
+ try {
+ filter.initialize(fileName, Optional.fromNullable(namesToTemplates.get(filter.getFilterName())));
+ } catch (Exception e) {
+ throw new IllegalStateException(format("Error while processing filter %s of context %s, defined in %s",
+ filter.getFilterName(), path, fileName), e);
+ }
+ namesToFilters.put(filter.getFilterName(), filter);
+ }
+ filters = Collections.unmodifiableList(new ArrayList<>(filters));
+ LinkedHashMap<String, Filter> patternMap = new LinkedHashMap<>();
+ for (FilterMapping filterMapping : filterMappings) {
+ filterMapping.initialize();
+ Filter found = namesToFilters.get(filterMapping.getFilterName());
+ if (found != null) {
+ patternMap.put(filterMapping.getUrlPattern(), found);
+ } else {
+ logger.error("Cannot find matching filter for filter-mapping {} of context {}, defined in {}",
+ filterMapping.getFilterName(), path, fileName);
+ throw new IllegalStateException(format(
+ "Cannot find filter for filter-mapping %s of context %s, defined in %s",
+ filterMapping.getFilterName(), path, fileName));
+ }
+ }
+ filterMappings = Collections.unmodifiableList(new ArrayList<>(filterMappings));
+ urlMatcher = new UrlMatcher<>(patternMap);
+ initialized = true;
+ }
+
+ public List<Filter> findMatchingFilters(String pathInfo) {
+ checkState(initialized, "Not initialized");
+ return urlMatcher.findMatchingFilters(pathInfo);
+ }
+
+ @XmlAttribute(name = "path")
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ checkArgument(initialized == false, "Already initialized");
+ this.path = path;
+ }
+
+ @XmlElement(name = "filter")
+ public List<Filter> getFilters() {
+ return filters;
+ }
+
+ public void setFilters(List<Filter> filters) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filters = filters;
+ }
+
+ @XmlElement(name = "filter-mapping")
+ public List<FilterMapping> getFilterMappings() {
+ return filterMappings;
+ }
+
+ public void setFilterMappings(List<FilterMapping> filterMappings) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterMappings = filterMappings;
+ }
+
+ @Override
+ public String toString() {
+ return "Context{" +
+ "path='" + path + '\'' +
+ '}';
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@XmlRootElement
+public class Filter implements FilterConfig {
+ private static final Logger logger = LoggerFactory.getLogger(Filter.class);
+
+ private String filterName;
+ private String filterClass;
+ private List<InitParam> initParams = new ArrayList<>();
+ private javax.servlet.Filter actualFilter;
+ private boolean initialized, isTemplate;
+
+
+ /**
+ * Called in filter-template nodes defined in <Host/> node - do not actually initialize the filter.
+ * In this case filter is only used to hold values of init params to be merged with
+ * filter defined in <Context/>
+ */
+ public synchronized void initializeTemplate(){
+ checkState(initialized == false, "Already initialized");
+ for (InitParam initParam : initParams) {
+ initParam.inititialize();
+ }
+ isTemplate = true;
+ initialized = true;
+ }
+
+
+ public synchronized void initialize(String fileName, Optional<Filter> maybeTemplate) {
+ checkState(initialized == false, "Already initialized");
+ logger.trace("Initializing filter {} : {}", filterName, filterClass);
+ for (InitParam initParam : initParams) {
+ initParam.inititialize();
+ }
+ if (maybeTemplate.isPresent()) {
+ // merge non conflicting init params
+ Filter template = maybeTemplate.get();
+ checkArgument(template.isTemplate);
+ Map<String, InitParam> templateParams = template.getInitParamsMap();
+ Map<String, InitParam> currentParams = getInitParamsMap();
+ // add values of template that are not present in current
+ MapDifference<String, InitParam> difference = Maps.difference(templateParams, currentParams);
+ for (Entry<String, InitParam> templateUnique : difference.entriesOnlyOnLeft().entrySet()) {
+ initParams.add(templateUnique.getValue());
+ }
+ // merge filterClass
+ if (filterClass == null) {
+ filterClass = template.filterClass;
+ } else if (Objects.equals(filterClass, template.filterClass) == false) {
+ logger.error("Conflict detected in filter-class of {} defined in {}, template class {}, child class {}" ,
+ filterName, fileName, template.filterClass, filterClass);
+ throw new IllegalStateException("Conflict detected in template/filter filter-class definitions," +
+ " filter name: " + filterName + " in file " + fileName);
+ }
+ }
+ initParams = Collections.unmodifiableList(new ArrayList<>(initParams));
+ Class<?> clazz;
+ try {
+ clazz = Class.forName(filterClass);
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot instantiate class defined in filter " + filterName
+ + " in file " + fileName, e);
+ }
+ try {
+ actualFilter = (javax.servlet.Filter) clazz.newInstance();
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot instantiate class defined in filter " + filterName
+ + " in file " + fileName, e);
+ }
+ logger.trace("Initializing {} with following init-params:{}", filterName, getInitParams());
+ try {
+ actualFilter.init(this);
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot initialize filter " + filterName
+ + " in file " + fileName, e);
+ }
+ initialized = true;
+ }
+
+ @Override
+ public ServletContext getServletContext() {
+ throw new UnsupportedOperationException("Getting ServletContext is currently not supported");
+ }
+
+ @Override
+ public String getInitParameter(String name) {
+ for (InitParam initParam : initParams) {
+ if (Objects.equals(name, initParam.getParamName())) {
+ return initParam.getParamValue();
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ final Iterator<InitParam> iterator = initParams.iterator();
+ return new Enumeration<String>() {
+ @Override
+ public boolean hasMoreElements() {
+ return iterator.hasNext();
+ }
+
+ @Override
+ public String nextElement() {
+ return iterator.next().getParamName();
+ }
+ };
+ }
+
+ public javax.servlet.Filter getActualFilter() {
+ checkState(initialized, "Not initialized");
+ return actualFilter;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+
+ @XmlElement(name = "filter-name")
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public void setFilterName(String filterName) {
+ this.filterName = filterName;
+ }
+
+ @XmlElement(name = "filter-class")
+ public String getFilterClass() {
+ return filterClass;
+ }
+
+ public void setFilterClass(String filterClass) {
+ this.filterClass = filterClass;
+ }
+
+ @XmlElement(name = "init-param")
+ public List<InitParam> getInitParams() {
+ return initParams;
+ }
+
+ public void setInitParams(List<InitParam> initParams) {
+ this.initParams = initParams;
+ }
+
+
+ @Override
+ public String toString() {
+ return "Filter{" +
+ "filterName='" + filterName + '\'' +
+ '}';
+ }
+
+ public Map<String, InitParam> getInitParamsMap() {
+ Map<String, InitParam> result = new HashMap<>();
+ for (InitParam initParam : initParams) {
+ checkState(initParam.isInitialized());
+ result.put(initParam.getParamName(), initParam);
+ }
+ return result;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement
+public class FilterMapping {
+ private String filterName;
+ private String urlPattern;
+ private boolean initialized;
+
+ @XmlElement(name = "filter-name")
+ public String getFilterName() {
+ return filterName;
+ }
+
+ public void setFilterName(String filterName) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterName = filterName;
+ }
+
+ @XmlElement(name = "url-pattern")
+ public String getUrlPattern() {
+ return urlPattern;
+ }
+
+ public void setUrlPattern(String urlPattern) {
+ checkArgument(initialized == false, "Already initialized");
+ this.urlPattern = urlPattern;
+ }
+
+ public synchronized void initialize() {
+ checkArgument(initialized == false, "Already initialized");
+ initialized = true;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.base.Optional;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+
+/**
+ * Root element, arbitrarily named Host to match tomcat-server.xml, but does not allow specifying which host
+ * name to be matched.
+ */
+@XmlRootElement(name = "Host")
+public class Host {
+ private List<Context> contexts = new ArrayList<>();
+ private List<Filter> filterTemplates = new ArrayList<>();
+ private boolean initialized;
+ private Map<String, Context> contextMap;
+
+
+ public synchronized void initialize(String fileName) {
+ checkState(initialized == false, "Already initialized");
+ Map<String, Filter> namesToTemplates = new HashMap<>();
+ for (Filter template : filterTemplates) {
+ template.initializeTemplate();
+ namesToTemplates.put(template.getFilterName(), template);
+ }
+ contextMap = new HashMap<>();
+ for (Context context : getContexts()) {
+ checkState(contextMap.containsKey(context.getPath()) == false,
+ "Context {} already defined in {}", context.getPath(), fileName);
+ context.initialize(fileName, namesToTemplates);
+ contextMap.put(context.getPath(), context);
+ }
+ contextMap = Collections.unmodifiableMap(new HashMap<>(contextMap));
+ contexts = Collections.unmodifiableList(new ArrayList<>(contexts));
+ initialized = true;
+ }
+
+ public Optional<Context> findContext(String contextPath) {
+ checkState(initialized, "Not initialized");
+ Context context = contextMap.get(contextPath);
+ return Optional.fromNullable(context);
+ }
+
+ @XmlElement(name = "Context")
+ public List<Context> getContexts() {
+ return contexts;
+ }
+
+ public void setContexts(List<Context> contexts) {
+ checkArgument(initialized == false, "Already initialized");
+ this.contexts = contexts;
+ }
+
+ @XmlElement(name = "filter-template")
+ public List<Filter> getFilterTemplates() {
+ return filterTemplates;
+ }
+
+ public void setFilterTemplates(List<Filter> filterTemplates) {
+ checkArgument(initialized == false, "Already initialized");
+ this.filterTemplates = filterTemplates;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement
+public class InitParam {
+ private String paramName;
+ private String paramValue;
+ private boolean initialized;
+
+ public synchronized void inititialize() {
+ checkState(initialized == false, "Already initialized");
+ initialized = true;
+ }
+
+ @XmlElement(name = "param-name")
+ public String getParamName() {
+ return paramName;
+ }
+
+ public void setParamName(String paramName) {
+ this.paramName = paramName;
+ }
+
+ @XmlElement(name = "param-value")
+ public String getParamValue() {
+ return paramValue;
+ }
+
+ public void setParamValue(String paramValue) {
+ this.paramValue = paramValue;
+ }
+
+ public boolean isInitialized() {
+ return initialized;
+ }
+
+ @Override
+ public String toString() {
+ return "{" + paramName + '=' + paramValue + "}";
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.StringReader;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+
+public class Parser {
+
+ public static Host parse(String xmlFileContent, String fileName) throws JAXBException {
+ JAXBContext context = JAXBContext.newInstance(Host.class);
+ javax.xml.bind.Unmarshaller um = context.createUnmarshaller();
+ Host host = (Host) um.unmarshal(new StringReader(xmlFileContent));
+ host.initialize(fileName);
+ return host;
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import com.google.common.base.Optional;
+import java.io.IOException;
+import java.util.List;
+import java.util.ListIterator;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import org.apache.catalina.connector.Request;
+import org.apache.catalina.connector.Response;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Context;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Filter;
+import org.opendaylight.controller.filtervalve.cors.jaxb.Host;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class FilterProcessor {
+ private static final Logger logger = LoggerFactory.getLogger(FilterProcessor.class);
+
+ private final Host host;
+
+ public FilterProcessor(Host host) {
+ this.host = host;
+ }
+
+ public void process(Request request, Response response, FilterChain nextValveFilterChain)
+ throws IOException, ServletException {
+
+ String contextPath = request.getContext().getPath();
+ String pathInfo = request.getPathInfo();
+
+ Optional<Context> maybeContext = host.findContext(contextPath);
+ logger.trace("Processing context {} path {}, found {}", contextPath, pathInfo, maybeContext);
+ if (maybeContext.isPresent()) {
+ // process filters
+ Context context = maybeContext.get();
+ List<Filter> matchingFilters = context.findMatchingFilters(pathInfo);
+ FilterChain fromLast = nextValveFilterChain;
+ ListIterator<Filter> it = matchingFilters.listIterator(matchingFilters.size());
+ final boolean trace = logger.isTraceEnabled();
+ while (it.hasPrevious()) {
+ final Filter currentFilter = it.previous();
+ final FilterChain copy = fromLast;
+ fromLast = new FilterChain() {
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException {
+ if (trace) {
+ logger.trace("Applying {}", currentFilter);
+ }
+ javax.servlet.Filter actualFilter = currentFilter.getActualFilter();
+ actualFilter.doFilter(request, response, copy);
+ }
+ };
+ }
+ // call first filter
+ fromLast.doFilter(request, response);
+ } else {
+ // move to next valve
+ nextValveFilterChain.doFilter(request, response);
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.collect.Maps.immutableEntry;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Match incoming URL with user defined patterns according to servlet specification.
+ * In the Web application deployment descriptor, the following syntax is used to define mappings:
+ * <ul>
+ * <li>A string beginning with a ‘/’ character and ending with a ‘/*’ suffix is used for path mapping.</li>
+ * <li>A string beginning with a ‘*.’ prefix is used as an extension mapping.</li>
+ * <li>All other strings are used for exact matches only.</li>
+ * </ul>
+ */
+public class UrlMatcher<FILTER> {
+ private static final Logger logger = LoggerFactory.getLogger(UrlMatcher.class);
+ // order index for each FILTER is kept as Entry.value
+ private final Map<String, Entry<FILTER, Integer>> prefixMap = new HashMap<>(); // contains patterns ending with '/*', '*' is stripped from each key
+ private final Map<String, Entry<FILTER, Integer>> suffixMap = new HashMap<>(); // contains patterns starting with '*.' prefix, '*' is stripped from each key
+ private final Map<String, Entry<FILTER, Integer>> exactMatchMap = new HashMap<>(); // contains exact matches only
+
+ /**
+ * @param patternMap order preserving map containing path info pattern as key
+ */
+ public UrlMatcher(LinkedHashMap<String, FILTER> patternMap) {
+ int idx = 0;
+ for (Entry<String, FILTER> entry : patternMap.entrySet()) {
+ idx++;
+ String pattern = checkNotNull(entry.getKey());
+ FILTER value = entry.getValue();
+ Entry<FILTER, Integer> valueWithIdx = immutableEntry(value, idx);
+ if (pattern.startsWith("/") && pattern.endsWith("/*")) {
+ pattern = pattern.substring(0, pattern.length() - 1);
+ prefixMap.put(pattern, valueWithIdx);
+ } else if (pattern.startsWith("*.")) {
+ pattern = pattern.substring(1);
+ suffixMap.put(pattern, valueWithIdx);
+ } else {
+ exactMatchMap.put(pattern, valueWithIdx);
+ }
+ }
+ }
+
+ /**
+ * Find filters matching path
+ *
+ * @param pathInfo as returned by request.getPathInfo()
+ * @return list of matching filters
+ */
+ public List<FILTER> findMatchingFilters(String pathInfo) {
+ checkNotNull(pathInfo);
+ TreeMap<Integer, FILTER> sortedMap = new TreeMap<>();
+ // add matching prefixes
+ for (Entry<String, Entry<FILTER, Integer>> prefixEntry : prefixMap.entrySet()) {
+ if (pathInfo.startsWith(prefixEntry.getKey())) {
+ put(sortedMap, prefixEntry.getValue());
+ }
+ }
+ // add matching suffixes
+ for (Entry<String, Entry<FILTER, Integer>> suffixEntry : suffixMap.entrySet()) {
+ if (pathInfo.endsWith(suffixEntry.getKey())) {
+ put(sortedMap, suffixEntry.getValue());
+ }
+ }
+ // add exact match
+ Entry<FILTER, Integer> exactMatch = exactMatchMap.get(pathInfo);
+ if (exactMatch != null) {
+ put(sortedMap, exactMatch);
+ }
+ ArrayList<FILTER> filters = new ArrayList<>(sortedMap.values());
+ logger.trace("Matching filters for path {} are {}", pathInfo, filters);
+ return filters;
+ }
+
+ private void put(TreeMap<Integer, FILTER> sortedMap, Entry<FILTER, Integer> entry) {
+ sortedMap.put(entry.getValue(), entry.getKey());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.IOException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+public class DummyFilter implements javax.servlet.Filter {
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void destroy() {
+ throw new UnsupportedOperationException();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import java.io.IOException;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+public class MockedFilter implements javax.servlet.Filter {
+ private FilterConfig filterConfig;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ this.filterConfig = filterConfig;
+ }
+
+ public FilterConfig getFilterConfig() {
+ return filterConfig;
+ }
+
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void destroy() {
+ throw new UnsupportedOperationException();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.jaxb;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.matchers.JUnitMatchers.containsString;
+
+import com.google.common.base.Optional;
+import java.io.File;
+import javax.servlet.FilterConfig;
+import org.apache.commons.io.FileUtils;
+import org.junit.Test;
+
+public class ParserTest {
+
+ @Test
+ public void testParsing() throws Exception {
+ File xmlFile = new File(getClass().getResource("/sample-cors-config.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ Host host = Parser.parse(xmlFileContent, "fileName");
+ assertEquals(1, host.getContexts().size());
+ // check that MockedFilter has init params merged/replaced
+ Optional<Context> context = host.findContext("/restconf");
+ assertTrue(context.isPresent());
+ assertEquals(1, context.get().getFilters().size());
+ MockedFilter filter = (MockedFilter) context.get().getFilters().get(0).getActualFilter();
+ FilterConfig filterConfig = filter.getFilterConfig();
+ assertEquals("*", filterConfig.getInitParameter("cors.allowed.origins"));
+ assertEquals("11", filterConfig.getInitParameter("cors.preflight.maxage"));
+ }
+
+
+ @Test
+ public void testParsing_NoFilterDefined() throws Exception {
+ File xmlFile = new File(getClass().getResource("/no-filter-defined.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ try {
+ Parser.parse(xmlFileContent, "fileName");
+ fail();
+ }catch(Exception e){
+ assertThat(e.getMessage(), containsString("Cannot find filter for filter-mapping CorsFilter"));
+ }
+ }
+
+ @Test
+ public void testConflictingClass() throws Exception {
+ File xmlFile = new File(getClass().getResource("/conflicting-class.xml").getFile());
+ assertThat(xmlFile.canRead(), is(true));
+ String xmlFileContent = FileUtils.readFileToString(xmlFile);
+ try {
+ Parser.parse(xmlFileContent, "fileName");
+ fail();
+ } catch (RuntimeException e) {
+ assertThat(e.getMessage(), containsString("Error while processing filter CorsFilter of context /restconf"));
+ assertThat(e.getCause().getMessage(), containsString("Conflict detected in template/filter filter-class definitions, filter name: CorsFilter"));
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.filtervalve.cors.model;
+
+import static java.util.Arrays.asList;
+import static org.junit.Assert.assertEquals;
+
+import java.util.LinkedHashMap;
+import org.junit.Test;
+
+public class UrlMatcherTest {
+ UrlMatcher<String> urlMatcher;
+
+ @Test
+ public void test() throws Exception {
+ final String defaultFilter = "default";
+ final String exactMatchFilter = "someFilter";
+ final String jspFilter = "jspFilter";
+ final String exactMatch = "/somePath";
+ final String prefixFilter = "prefixFilter";
+ LinkedHashMap<String, String> patternMap = new LinkedHashMap<String, String>() {
+ {
+ put(exactMatch, exactMatchFilter);
+ put("/*", defaultFilter);
+ put("*.jsp", jspFilter);
+ put("/foo/*", prefixFilter);
+ }
+ };
+ urlMatcher = new UrlMatcher<>(patternMap);
+ assertMatches("/abc", defaultFilter);
+ assertMatches(exactMatch, exactMatchFilter, defaultFilter);
+ assertMatches("/some.jsp", defaultFilter, jspFilter);
+ assertMatches("/foo/bar", defaultFilter, prefixFilter);
+ assertMatches("/foo/bar.jsp", defaultFilter, jspFilter, prefixFilter);
+ }
+
+ public void assertMatches(String testedPath, String... filters) {
+ assertEquals(asList(filters), urlMatcher.findMatchingFilters(testedPath));
+ }
+
+}
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- conflict -->
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.DummyFilter</filter-class>
+ </filter>
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <!-- Filters are allowed here, only serving as a template -->
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.methods</param-name>
+ <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.headers</param-name>
+ <param-value>Content-Type,X-Requested-With,accept,authorization,
+ origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers
+ </param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.exposed.headers</param-name>
+ <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.support.credentials</param-name>
+ <param-value>true</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <!-- Filters are also allowed here. -->
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- init params can be added/overriden if template is used -->
+ </filter>
+ <!-- only local references are allowed -->
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+
+ <Context path="/controller/nb/v2/connectionmanager">
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.opendaylight.controller.filtervalve.cors.jaxb.MockedFilter</filter-class>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- override value -->
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>11</param-value>
+ </init-param>
+ </filter>
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+</Host>
</prerequisites>
<properties>
+ <akka.version>2.3.2</akka.version>
<aopalliance.version>1.0.0</aopalliance.version>
<appauth.version>0.4.2-SNAPSHOT</appauth.version>
+ <archetype-app-northbound>0.0.1-SNAPSHOT</archetype-app-northbound>
<aries.util.version>1.1.0</aries.util.version>
<!-- Controller Modules Versions -->
<arphandler.version>0.5.2-SNAPSHOT</arphandler.version>
+ <arphandler.version>0.5.2-SNAPSHOT</arphandler.version>
<asm.version>4.1</asm.version>
<!-- Plugin Versions -->
<bouncycastle.version>1.50</bouncycastle.version>
<bundle.plugin.version>2.4.0</bundle.plugin.version>
+ <bundlescanner.api.version>0.4.2-SNAPSHOT</bundlescanner.api.version>
+ <bundlescanner.implementation.version>0.4.2-SNAPSHOT</bundlescanner.implementation.version>
<bundlescanner.version>0.4.2-SNAPSHOT</bundlescanner.version>
<checkstyle.version>2.10</checkstyle.version>
<clustering.services.version>0.5.1-SNAPSHOT</clustering.services.version>
<clustering.stub.version>0.4.2-SNAPSHOT</clustering.stub.version>
<clustering.test.version>0.4.2-SNAPSHOT</clustering.test.version>
<commmons.northbound.version>0.4.2-SNAPSHOT</commmons.northbound.version>
+ <commons.checkstyle.version>0.0.3-SNAPSHOT</commons.checkstyle.version>
<!-- Third Party Versions -->
<commons.codec.version>1.7</commons.codec.version>
<commons.fileupload.version>1.2.2</commons.fileupload.version>
<commons.httpclient.version>0.1.2-SNAPSHOT</commons.httpclient.version>
<commons.io.version>2.4</commons.io.version>
<commons.lang.version>3.1</commons.lang.version>
+ <commons.logback_settings.version>0.0.2-SNAPSHOT</commons.logback_settings.version>
<commons.net.version>3.0.1</commons.net.version>
+ <commons.opendaylight.commons.httpclient>0.1.2-SNAPSHOT</commons.opendaylight.commons.httpclient>
+ <commons.opendaylight.concepts.version>0.5.2-SNAPSHOT</commons.opendaylight.concepts.version>
+ <commons.opendaylight.version>1.4.2-SNAPSHOT</commons.opendaylight.version>
+ <commons.parent.version>1.0.2-SNAPSHOT</commons.parent.version>
<compiler.version>2.3.2</compiler.version>
<concepts.version>0.5.2-SNAPSHOT</concepts.version>
<config.version>0.2.5-SNAPSHOT</config.version>
<connectionmanager.version>0.1.2-SNAPSHOT</connectionmanager.version>
<containermanager.it.version>0.5.2-SNAPSHOT</containermanager.it.version>
<containermanager.northbound.version>0.4.2-SNAPSHOT</containermanager.northbound.version>
+ <containermanager.shell.version>0.5.2-SNAPSHOT</containermanager.shell.version>
<containermanager.version>0.5.2-SNAPSHOT</containermanager.version>
<controllermanager.northbound.version>0.0.2-SNAPSHOT</controllermanager.northbound.version>
<corsfilter.version>7.0.42</corsfilter.version>
<ctrie.version>0.2.0</ctrie.version>
<devices.web.version>0.4.2-SNAPSHOT</devices.web.version>
+ <dummy-console.version>1.1.0-SNAPSHOT</dummy-console.version>
<eclipse.persistence.version>2.5.0</eclipse.persistence.version>
<!-- enforcer version -->
<enforcer.version>1.3.1</enforcer.version>
<felix.dependencymanager.version>3.1.0</felix.dependencymanager.version>
<felix.fileinstall.version>3.1.6</felix.fileinstall.version>
<felix.webconsole.version>4.2.0</felix.webconsole.version>
+ <filtervalve.version>1.4.2-SNAPSHOT</filtervalve.version>
<flowprogrammer.northbound.version>0.4.2-SNAPSHOT</flowprogrammer.northbound.version>
<flows.web.version>0.4.2-SNAPSHOT</flows.web.version>
<forwarding.staticrouting>0.5.2-SNAPSHOT</forwarding.staticrouting>
<hosttracker.implementation.version>0.5.2-SNAPSHOT</hosttracker.implementation.version>
<hosttracker.northbound.version>0.4.2-SNAPSHOT</hosttracker.northbound.version>
<hosttracker_new.api.version>0.4.2-SNAPSHOT</hosttracker_new.api.version>
+ <hosttracker_new.implementation.version>0.4.2-SNAPSHOT</hosttracker_new.implementation.version>
+ <httpservice-bridge.northbound.version>0.0.2-SNAPSHOT</httpservice-bridge.northbound.version>
<ietf-inet-types.version>2010.09.24.4-SNAPSHOT</ietf-inet-types.version>
<ietf-restconf.version>2013.10.19.1-SNAPSHOT</ietf-restconf.version>
<ietf-topology.version>2013.10.21.2-SNAPSHOT</ietf-topology.version>
<jersey2.version>2.8</jersey2.version>
<jettison.version>1.3.3</jettison.version>
<jmxGeneratorPath>src/main/yang-gen-config</jmxGeneratorPath>
+ <jolokia-bridge.version>0.0.2-SNAPSHOT</jolokia-bridge.version>
<jolokia.version>1.1.4</jolokia.version>
<jsr305.api.version>2.0.1</jsr305.api.version>
<jsr311.api.version>1.1.1</jsr311.api.version>
<jsr311.v2.api.version>2.0</jsr311.v2.api.version>
<junit.version>4.8.1</junit.version>
+ <karaf.branding.version>1.0.0-SNAPSHOT</karaf.branding.version>
+ <karaf.shell.version>3.0.0</karaf.shell.version>
<karaf.version>3.0.1</karaf.version>
<logback.version>1.0.9</logback.version>
<logging.bridge.version>0.4.2-SNAPSHOT</logging.bridge.version>
<networkconfig.neutron.version>0.4.2-SNAPSHOT</networkconfig.neutron.version>
<!-- ODL repository / plugin repository -->
<nexusproxy>http://nexus.opendaylight.org/content</nexusproxy>
+ <northbound.commons.version>0.4.2-SNAPSHOT</northbound.commons.version>
+ <northbound.hosttracker.version>1.4.2-SNAPSHOT</northbound.hosttracker.version>
+ <northbound.jolokia.version>1.4.2-SNAPSHOT</northbound.jolokia.version>
<opendaylight-l2-types.version>2013.08.27.4-SNAPSHOT</opendaylight-l2-types.version>
<org.json.version>20080701</org.json.version>
<osgi-brandfragment.web.version>0.0.2-SNAPSHOT</osgi-brandfragment.web.version>
<samples.loadbalancer.northbound.version>0.4.2-SNAPSHOT</samples.loadbalancer.northbound.version>
<samples.simpleforwarding.version>0.4.2-SNAPSHOT</samples.simpleforwarding.version>
<sanitytest.version>0.4.2-SNAPSHOT</sanitytest.version>
+ <scala.version>2.11</scala.version>
<security.version>0.4.2-SNAPSHOT</security.version>
<sitedeploy>dav:http://nexus.opendaylight.org/content/sites/site</sitedeploy>
<siteplugin>3.2</siteplugin>
<xtend.dstdir>src/main/xtend-gen</xtend.dstdir>
<xtend.version>2.4.3</xtend.version>
<yang-ext.version>2013.09.07.4-SNAPSHOT</yang-ext.version>
+ <yang-jmx-generator.version>1.0.0-SNAPSHOT</yang-jmx-generator.version>
<yangtools.version>0.6.2-SNAPSHOT</yangtools.version>
</properties>
<artifactId>jersey-core</artifactId>
<version>${jersey.version}</version>
</dependency>
+
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
<version>${jersey.version}</version>
</dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-actor_${scala.version}</artifactId>
+ <version>${akka.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-cluster_${scala.version}</artifactId>
+ <version>${akka.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-persistence-experimental_${scala.version}</artifactId>
+ <version>${akka.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-remote_${scala.version}</artifactId>
+ <version>${akka.version}</version>
+ </dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<artifactId>containermanager.northbound</artifactId>
<version>${containermanager.northbound.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>containermanager.shell</artifactId>
+ <version>${containermanager.shell.version}</version>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>controllermanager.northbound</artifactId>
<artifactId>devices.web</artifactId>
<version>${devices.web.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>filter-valve</artifactId>
+ <version>${filtervalve.version}</version>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>flowprogrammer.northbound</artifactId>
<artifactId>jolokia-bridge</artifactId>
<version>0.0.2-SNAPSHOT</version>
</dependency>
+ <!-- Karaf Dependencies -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <version>${karaf.branding.version}</version>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>logback-config</artifactId>
<artifactId>osgi-brandfragment.web</artifactId>
<version>${osgi-brandfragment.web.version}</version>
</dependency>
-
+ <!-- Southbound bundles -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>protocol-framework</artifactId>
<version>${protocol-framework.version}</version>
</dependency>
-
- <!-- Southbound bundles -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>protocol_plugins.openflow</artifactId>
<artifactId>chameleon-mbeans</artifactId>
<version>1.0.0</version>
</dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>${scala.version}.1</version>
+ </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectory>${project.basedir}</sourceDirectory>
<includes>**\/*.java,**\/*.xml,**\/*.ini,**\/*.sh,**\/*.bat</includes>
- <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/${jmxGeneratorPath}\/,**\/${salGeneratorPath}\/</excludes>
+ <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/${jmxGeneratorPath}\/,**\/${salGeneratorPath}\/,**\/xtend-gen\/</excludes>
</configuration>
<dependencies>
<dependency>
import com.google.common.collect.Collections2;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
// TODO: the XPath should be parsed by code generator IMO
private static final String MAGIC_STRING = "MAGIC_STRING";
private static final String MODULE_CONDITION_XPATH_TEMPLATE = "^/MAGIC_STRING:modules/MAGIC_STRING:module/MAGIC_STRING:type\\s*=\\s*['\"](.+)['\"]$";
- private static final SchemaPath expectedConfigurationAugmentationSchemaPath = new SchemaPath(
- Arrays.asList(createConfigQName("modules"),
- createConfigQName("module"),
- createConfigQName("configuration")), true);
- private static final SchemaPath expectedStateAugmentationSchemaPath = new SchemaPath(
- Arrays.asList(createConfigQName("modules"),
- createConfigQName("module"), createConfigQName("state")),
- true);
+ private static final SchemaPath expectedConfigurationAugmentationSchemaPath = SchemaPath.create(true,
+ createConfigQName("modules"), createConfigQName("module"), createConfigQName("configuration"));
+ private static final SchemaPath expectedStateAugmentationSchemaPath = SchemaPath.create(true,
+ createConfigQName("modules"), createConfigQName("module"), createConfigQName("state"));
private static final Pattern PREFIX_COLON_LOCAL_NAME = Pattern
.compile("^(.+):(.+)$");
Map<ConnectionConstants, String> params) {
if (connectionService == null)
return null;
- Node node = connectionService.connect(connectionIdentifier, params);
+ Node node = connectionService.connect(type, connectionIdentifier, params);
AbstractScheme scheme = schemes.get(activeScheme);
if (scheme != null && node != null)
scheme.addNode(node);
--- /dev/null
+package org.opendaylight.controller.containermanager;
+
+import java.util.List;
+
+public interface IContainerManagerShell {
+ public List<String> psc();
+ public List<String> pfc();
+ public List<String> psd();
+ public List<String> psp();
+ public List<String> psm();
+ public List<String> addContainer(String arg1, String arg2);
+ public List<String> createContainer(String arg1, String arg2);
+ public List<String> removeContainerShell(String arg1);
+ public List<String> addContainerEntry(String arg1, String arg2, String arg3);
+ public List<String> removeContainerEntry(String arg1, String arg2, String arg3);
+ public List<String> addContainerFlow(String arg1, String arg2, String arg3);
+ public List<String> removeContainerFlow(String arg1, String arg2);
+ public List<String> containermgrGetRoles();
+ public List<String> containermgrGetAuthorizedGroups(String arg1);
+ public List<String> containermgrGetAuthorizedResources(String arg1);
+ public List<String> containermgrGetResourcesForGroup(String arg1);
+ public List<String> containermgrGetUserLevel(String arg1);
+ public List<String> containermgrGetUserResources(String arg1);
+ public List<String> saveConfig();
+}
\ No newline at end of file
package org.opendaylight.controller.containermanager.internal;
import org.eclipse.osgi.framework.console.CommandProvider;
+
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Set;
import java.util.Hashtable;
+
import org.opendaylight.controller.containermanager.IContainerManager;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
import org.apache.felix.dm.Component;
import org.opendaylight.controller.clustering.services.ICacheUpdateAware;
import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
CommandProvider.class.getName(),
IContainerInternal.class.getName(),
IContainerAuthorization.class.getName(),
- ICacheUpdateAware.class.getName()}, props);
+ ICacheUpdateAware.class.getName(),
+ IContainerManagerShell.class.getName()}, props);
c.add(createServiceDependency()
.setService(IClusterGlobalServices.class)
import org.opendaylight.controller.containermanager.ContainerFlowConfig;
import org.opendaylight.controller.containermanager.IContainerAuthorization;
import org.opendaylight.controller.containermanager.IContainerManager;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
import org.opendaylight.controller.containermanager.NodeConnectorsChangeEvent;
import org.opendaylight.controller.sal.authorization.AppRoleLevel;
import org.opendaylight.controller.sal.authorization.Privilege;
public class ContainerManager extends Authorization<String> implements IContainerManager, IObjectReader,
CommandProvider, ICacheUpdateAware<String, Object>, IContainerInternal, IContainerAuthorization,
- IConfigurationAware {
+ IConfigurationAware, IContainerManagerShell {
private static final Logger logger = LoggerFactory.getLogger(ContainerManager.class);
private static String CONTAINERS_FILE_NAME = "containers.conf";
private static final String allContainersGroup = "allContainers";
public boolean inContainerMode() {
return this.containerConfigs.size() > 0;
}
+
+ public List<String> psc() {
+ List<String> result = new ArrayList<String>();
+ for (Map.Entry<String, ContainerConfig> entry : containerConfigs.entrySet()) {
+ ContainerConfig sc = entry.getValue();
+ result.add(String.format("%s: %s", sc.getContainerName(), sc.toString()));
+ }
+ result.add("Total number of containers: " + containerConfigs.entrySet().size());
+ return result;
+ }
+
+ public List<String> pfc() {
+ List<String> result = new ArrayList<String>();
+ for (Map.Entry<String, ContainerConfig> entry : containerConfigs.entrySet()) {
+ ContainerConfig sc = entry.getValue();
+ result.add(String.format("%s: %s", sc.getContainerName(), sc.getContainerFlowConfigs()));
+ }
+ return result;
+ }
+
+ public List<String> psd() {
+ List<String> result = new ArrayList<String>();
+ for (String containerName : containerData.keySet()) {
+ ContainerData sd = containerData.get(containerName);
+ for (Node sid : sd.getSwPorts().keySet()) {
+ Set<NodeConnector> s = sd.getSwPorts().get(sid);
+ result.add("\t" + sid + " : " + s);
+ }
+
+ for (ContainerFlow s : sd.getContainerFlowSpecs()) {
+ result.add("\t" + s.toString());
+ }
+ }
+ return result;
+ }
+
+ public List<String> psp() {
+ List<String> result = new ArrayList<String>();
+ for (NodeConnector sp : nodeConnectorToContainers.keySet()) {
+ result.add(nodeConnectorToContainers.get(sp).toString());
+ }
+ return result;
+ }
+
+ public List<String> psm() {
+ List<String> result = new ArrayList<String>();
+ for (Node sp : nodeToContainers.keySet()) {
+ result.add(nodeToContainers.get(sp).toString());
+ }
+ return result;
+ }
+
+ public List<String> addContainer(String arg1, String arg2) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String staticVlan = arg2;
+ ContainerConfig containerConfig = new ContainerConfig(containerName, staticVlan, null, null);
+ result.add((this.addRemoveContainer(containerConfig, false)).toString());
+ return result;
+ }
+
+ public List<String> createContainer(String arg1, String arg2) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String staticVlan = arg2;
+ if (staticVlan == null) {
+ result.add("Static Vlan not specified");
+ return result;
+ }
+ List<String> ports = new ArrayList<String>();
+ for (long l = 1L; l < 10L; l++) {
+ ports.add(NodeConnectorCreator.createOFNodeConnector((short) 1, NodeCreator.createOFNode(l)).toString());
+ }
+ List<ContainerFlowConfig> cFlowList = new ArrayList<ContainerFlowConfig>();
+ cFlowList.add(this.createSampleContainerFlowConfig("tcp", true));
+ ContainerConfig containerConfig = new ContainerConfig(containerName, staticVlan, ports, cFlowList);
+ result.add((this.addRemoveContainer(containerConfig, false)).toString());
+ return result;
+ }
+
+ public List<String> removeContainerShell(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ ContainerConfig containerConfig = new ContainerConfig(containerName, "", null, null);
+ result.add((this.addRemoveContainer(containerConfig, true)).toString());
+ return result;
+ }
+
+ public List<String> addContainerEntry(String arg1, String arg2, String arg3) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String nodeId = arg2;
+ if (nodeId == null) {
+ result.add("Node Id not specified");
+ return result;
+ }
+ String portId = arg3;
+ if (portId == null) {
+ result.add("Port not specified");
+ return result;
+ }
+ Node node = NodeCreator.createOFNode(Long.valueOf(nodeId));
+ Short port = Short.valueOf(portId);
+ NodeConnector nc = NodeConnectorCreator.createOFNodeConnector(port, node);
+ List<String> portList = new ArrayList<String>(1);
+ portList.add(nc.toString());
+ result.add((this.addRemoveContainerEntries(containerName, portList, false)).toString());
+ return result;
+ }
+
+ public List<String> removeContainerEntry(String arg1, String arg2, String arg3) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String nodeId = arg2;
+ if (nodeId == null) {
+ result.add("Node Id not specified");
+ return result;
+ }
+ String portId = arg3;
+ if (portId == null) {
+ result.add("Port not specified");
+ return result;
+ }
+ Node node = NodeCreator.createOFNode(Long.valueOf(nodeId));
+ Short port = Short.valueOf(portId);
+ NodeConnector nc = NodeConnectorCreator.createOFNodeConnector(port, node);
+ List<String> portList = new ArrayList<String>(1);
+ portList.add(nc.toString());
+ result.add((this.addRemoveContainerEntries(containerName, portList, true)).toString());
+ return result;
+ }
+ public List<String> addContainerFlow(String arg1, String arg2, String arg3) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String cflowName = arg2;
+ if (cflowName == null) {
+ result.add("cflowName not specified");
+ return result;
+ }
+ String unidirectional = arg3;
+ boolean boolUnidirectional = Boolean.parseBoolean(unidirectional);
+ List<ContainerFlowConfig> list = new ArrayList<ContainerFlowConfig>();
+ list.add(createSampleContainerFlowConfig(cflowName, boolUnidirectional));
+ result.add((this.addRemoveContainerFlow(containerName, list, false)).toString());
+ return result;
+ }
+
+ public List<String> removeContainerFlow(String arg1, String arg2) {
+ List<String> result = new ArrayList<String>();
+ String containerName = arg1;
+ if (containerName == null) {
+ result.add("Container Name not specified");
+ return result;
+ }
+ String cflowName = arg2;
+ if (cflowName == null) {
+ result.add("cflowName not specified");
+ return result;
+ }
+ Set<String> set = new HashSet<String>(1);
+ set.add(cflowName);
+ result.add((this.removeContainerFlows(containerName, set)).toString());
+ return result;
+ }
+
+ public List<String> containermgrGetRoles() {
+ List<String> result = new ArrayList<String>();
+ result.add("Configured roles for Container Mgr:");
+ List<String> list = this.getRoles();
+ for (String role : list) {
+ result.add(role + "\t" + roles.get(role));
+ }
+ return result;
+ }
+
+ public List<String> containermgrGetAuthorizedGroups(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String roleName = arg1;
+ if (roleName == null || roleName.trim().isEmpty()) {
+ result.add("Invalid argument");
+ result.add("mmGetAuthorizedGroups <role_name>");
+ return result;
+ }
+ result.add("Resource Groups associated to role " + roleName + ":");
+ List<ResourceGroup> list = this.getAuthorizedGroups(roleName);
+ for (ResourceGroup group : list) {
+ result.add(group.toString());
+ }
+ return result;
+ }
+ public List<String> containermgrGetAuthorizedResources(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String roleName = arg1;
+ if (roleName == null || roleName.trim().isEmpty()) {
+ result.add("Invalid argument");
+ result.add("mmGetAuthorizedResources <role_name>");
+ return result;
+ }
+ result.add("Resource associated to role " + roleName + ":");
+ List<Resource> list = this.getAuthorizedResources(roleName);
+ for (Resource resource : list) {
+ result.add(resource.toString());
+ }
+ return result;
+ }
+ public List<String> containermgrGetResourcesForGroup(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String groupName = arg1;
+ if (groupName == null || groupName.trim().isEmpty()) {
+ result.add("Invalid argument");
+ result.add("containermgrResourcesForGroup <group_name>");
+ return result;
+ }
+ result.add("Group " + groupName + " contains the following resources:");
+ List<Object> resources = this.getResources(groupName);
+ for (Object resource : resources) {
+ result.add(resource.toString());
+ }
+ return result;
+ }
+ public List<String> containermgrGetUserLevel(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String userName = arg1;
+ if (userName == null || userName.trim().isEmpty()) {
+ result.add("Invalid argument");
+ result.add("containermgrGetUserLevel <user_name>");
+ return result;
+ }
+ result.add("User " + userName + " has level: " + this.getUserLevel(userName));
+ return result;
+ }
+ public List<String> containermgrGetUserResources(String arg1) {
+ List<String> result = new ArrayList<String>();
+ String userName = arg1;
+ if (userName == null || userName.trim().isEmpty()) {
+ result.add("Invalid argument");
+ result.add("containermgrGetUserResources <user_name>");
+ return result;
+ }
+ result.add("User " + userName + " owns the following resources: ");
+ Set<Resource> resources = this.getAllResourcesforUser(userName);
+ for (Resource resource : resources) {
+ result.add(resource.toString());
+ }
+ return result;
+ }
+ public List<String> saveConfig() {
+ List<String> result = new ArrayList<String>();
+ Status status = new Status(StatusCode.NOSERVICE, "Configuration service not reachable");
+
+ IConfigurationService configService = (IConfigurationService) ServiceHelper.getGlobalInstance(
+ IConfigurationService.class, this);
+ if (configService != null) {
+ status = configService.saveConfigurations();
+ }
+ result.add(status.toString());
+ return result;
+ }
}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>containermanager.shell</artifactId>
+ <version>${containermanager.shell.version}</version>
+ <packaging>bundle</packaging>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.karaf.shell</groupId>
+ <artifactId>org.apache.karaf.shell.console</artifactId>
+ <version>${karaf.shell.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>containermanager</artifactId>
+ <version>${containermanager.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <version>${bundle.plugin.version}</version>
+ <configuration>
+ <instructions>
+ <Import-Package>org.apache.felix.service.command,
+ org.apache.karaf.shell.commands,
+ org.apache.karaf.shell.console,
+ *</Import-Package>
+ </instructions>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "addcontainer", description="Add Container")
+public class AddContainer extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="staticVlan", description="staticVlan", required=true, multiValued=false)
+ String staticVlan = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.addContainer(containerName, staticVlan)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "AddContainerEntry", description="add container entry")
+public class AddContainerEntry extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="nodeId", description="node ID", required=true, multiValued=false)
+ String nodeId = null;
+
+ @Argument(index=2, name="portId", description="portId", required=true, multiValued=false)
+ String portId = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.addContainerEntry(containerName, nodeId, portId)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "addContainerFlow", description="adds container flow")
+public class AddContainerFlow extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="cflowName", description="c Flow name", required=true, multiValued=false)
+ String cflowName = null;
+
+ @Argument(index=2, name="unidirectional", description="unidirectional", required=true, multiValued=false)
+ String unidirectional = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.addContainerFlow(containerName, cflowName, unidirectional)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetAuthorizedGroups", description="Get authorized groups")
+public class ContainermgrGetAuthorizedGroups extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="roleName", description="role name", required=true, multiValued=false)
+ String roleName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetAuthorizedGroups(roleName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetAuthorizedResources", description="Get authorized resources")
+public class ContainermgrGetAuthorizedResources extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="roleName", description="role name", required=true, multiValued=false)
+ String roleName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetAuthorizedResources(roleName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetResourcesForGroup", description="Get resources for group")
+public class ContainermgrGetResourcesForGroup extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="groupName", description="group name", required=true, multiValued=false)
+ String groupName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetResourcesForGroup(groupName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetRoles", description="Get container mgr roles")
+public class ContainermgrGetRoles extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetRoles()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetUserLevel", description="Get user level")
+public class ContainermgrGetUserLevel extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="userName", description="user name", required=true, multiValued=false)
+ String userName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetUserLevel(userName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "containermgrGetUserResources", description="Get user resources")
+public class ContainermgrGetUserResources extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="userName", description="user name", required=true, multiValued=false)
+ String userName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.containermgrGetUserResources(userName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "createcontainer", description="create container")
+public class CreateContainer extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="staticVlan", description="staticVlan", required=true, multiValued=false)
+ String staticVlan = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.createContainer(containerName, staticVlan)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "pfc", description="Display pfc")
+public class Pfc extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.pfc()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "psc", description="Display ")
+public class Psc extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.psc()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "psd", description="Display psd")
+public class Psd extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.psd()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "psm", description="Display psm")
+public class Psm extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.psm()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "psp", description="Display psp")
+public class Psp extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.psp()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "removecontainer", description="remove container")
+public class RemoveContainer extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.removeContainerShell(containerName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "removeContainerEntry", description="remove container entry")
+public class RemoveContainerEntry extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="nodeId", description="node ID", required=true, multiValued=false)
+ String nodeId = null;
+
+ @Argument(index=2, name="portId", description="portId", required=true, multiValued=false)
+ String portId = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.removeContainerEntry(containerName, nodeId, portId)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.felix.gogo.commands.Argument;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "removeContainerFlow", description="removes container flow")
+public class RemoveContainerFlow extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Argument(index=0, name="containerName", description="container name", required=true, multiValued=false)
+ String containerName = null;
+
+ @Argument(index=1, name="cflowName", description="c Flow name", required=true, multiValued=false)
+ String cflowName = null;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.removeContainerFlow(containerName, cflowName)) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import org.apache.felix.gogo.commands.Command;
+import org.apache.karaf.shell.console.OsgiCommandSupport;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+@Command(scope = "containermanager", name = "saveConfig", description="Save config")
+public class SaveConfig extends OsgiCommandSupport{
+ private IContainerManagerShell containerManager;
+
+ @Override
+ protected Object doExecute() throws Exception {
+ for(String p : containerManager.saveConfig()) {
+ System.out.println(p);
+ }
+ return null;
+ }
+
+ public void setContainerManager(IContainerManagerShell containerManager){
+ this.containerManager = containerManager;
+ }
+}
\ No newline at end of file
--- /dev/null
+<blueprint xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0">
+
+ <reference id="containerManagerRef" interface="org.opendaylight.controller.containermanager.IContainerManagerShell"/>
+ <command-bundle xmlns="http://karaf.apache.org/xmlns/shell/v1.1.0">
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.AddContainer">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.AddContainerEntry">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.AddContainerFlow">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetAuthorizedGroups">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetAuthorizedResources">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetResourcesForGroup">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetRoles">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetUserLevel">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.ContainermgrGetUserResources">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.CreateContainer">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.Pfc">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.Psc">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.Psd">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.Psm">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.Psp">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.RemoveContainer">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.RemoveContainerEntry">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.RemoveContainerFlow">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ <command>
+ <action class="org.opendaylight.controller.containermanager.shell.SaveConfig">
+ <property name="containerManager" ref="containerManagerRef"/>
+ </action>
+ </command>
+
+ </command-bundle>
+
+
+</blueprint>
--- /dev/null
+package org.opendaylight.controller.containermanager.shell;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.lang.reflect.Field;
+
+import org.junit.Assert;
+import org.junit.Test;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import org.opendaylight.controller.containermanager.IContainerManagerShell;
+
+
+public class ContainerManagerShellTest {
+ private IContainerManagerShell containerManager;
+
+ @Test
+ public void testAddContainer() throws Exception {
+ String containerName = "test", staticVlan = "1234";
+ AddContainer addConTest = new AddContainer();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ List<String> result2 = new ArrayList<String>(Arrays.asList("Container Name not specified"));
+ when(containerManager.addContainer(containerName, staticVlan)).thenReturn(result);
+ when(containerManager.addContainer(null, null)).thenReturn(result2);
+
+ Field cNField = addConTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ Field sVField = addConTest.getClass().getDeclaredField("staticVlan");
+ sVField.setAccessible(true);
+
+ cNField.set(addConTest, "test");
+ sVField.set(addConTest, "1234");
+
+ addConTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ addConTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ baos.reset();
+
+ cNField.set(addConTest, null);
+ sVField.set(addConTest, null);
+ addConTest.doExecute();
+ Assert.assertEquals("Container Name not specified\n", baos.toString());
+ }
+
+ @Test
+ public void testAddContainerEntry() throws Exception {
+ String containerName = "test", nodeId = "1234", portId = "5678";
+ AddContainerEntry addConEntTest = new AddContainerEntry();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.addContainerEntry(containerName, nodeId, portId)).thenReturn(result);
+
+ Field cNField = addConEntTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ Field nIField = addConEntTest.getClass().getDeclaredField("nodeId");
+ nIField.setAccessible(true);
+ Field pIField = addConEntTest.getClass().getDeclaredField("portId");
+ pIField.setAccessible(true);
+
+ cNField.set(addConEntTest, "test");
+ nIField.set(addConEntTest, "1234");
+ pIField.set(addConEntTest, "5678");
+
+ addConEntTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ addConEntTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testAddContainerFlow() throws Exception {
+ String containerName = "test", cflowName = "1234", unidirectional = "5678";
+ AddContainerFlow addConFlowTest = new AddContainerFlow();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.addContainerFlow(containerName, cflowName, unidirectional)).thenReturn(result);
+
+ Field cNField = addConFlowTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ Field cfField = addConFlowTest.getClass().getDeclaredField("cflowName");
+ cfField.setAccessible(true);
+ Field unField = addConFlowTest.getClass().getDeclaredField("unidirectional");
+ unField.setAccessible(true);
+
+ cNField.set(addConFlowTest, "test");
+ cfField.set(addConFlowTest, "1234");
+ unField.set(addConFlowTest, "5678");
+
+ addConFlowTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ addConFlowTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetAuthorizedGroups() throws Exception {
+ String roleName = "test";
+ ContainermgrGetAuthorizedGroups contmgrGTest = new ContainermgrGetAuthorizedGroups();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetAuthorizedGroups(roleName)).thenReturn(result);
+
+ Field rNField = contmgrGTest.getClass().getDeclaredField("roleName");
+ rNField.setAccessible(true);
+
+ rNField.set(contmgrGTest, "test");
+
+ contmgrGTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrGTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetAuthorizedResources() throws Exception {
+ String roleName = "test";
+ ContainermgrGetAuthorizedResources contmgrRTest = new ContainermgrGetAuthorizedResources();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetAuthorizedResources(roleName)).thenReturn(result);
+
+ Field rNField = contmgrRTest.getClass().getDeclaredField("roleName");
+ rNField.setAccessible(true);
+
+ rNField.set(contmgrRTest, "test");
+
+ contmgrRTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrRTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetResourcesForGroup() throws Exception {
+ String groupName = "test";
+ ContainermgrGetResourcesForGroup contmgrRTest = new ContainermgrGetResourcesForGroup();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetResourcesForGroup(groupName)).thenReturn(result);
+
+ Field gNField = contmgrRTest.getClass().getDeclaredField("groupName");
+ gNField.setAccessible(true);
+
+ gNField.set(contmgrRTest, groupName);
+
+ contmgrRTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrRTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetRoles() throws Exception {
+ ContainermgrGetRoles contmgrRTest = new ContainermgrGetRoles();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetRoles()).thenReturn(result);
+
+ contmgrRTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrRTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetUserLevel() throws Exception {
+ String userName = "test";
+ ContainermgrGetUserLevel contmgrUTest = new ContainermgrGetUserLevel();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetUserLevel(userName)).thenReturn(result);
+
+ Field gNField = contmgrUTest.getClass().getDeclaredField("userName");
+ gNField.setAccessible(true);
+
+ gNField.set(contmgrUTest, userName);
+
+ contmgrUTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrUTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testContainermgrGetUserResources() throws Exception {
+ String userName = "test";
+ ContainermgrGetUserResources contmgrUTest = new ContainermgrGetUserResources();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.containermgrGetUserResources(userName)).thenReturn(result);
+
+ Field gNField = contmgrUTest.getClass().getDeclaredField("userName");
+ gNField.setAccessible(true);
+
+ gNField.set(contmgrUTest, userName);
+
+ contmgrUTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ contmgrUTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testPfc() throws Exception {
+ Pfc pfc = new Pfc();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.pfc()).thenReturn(result);
+
+ pfc.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ pfc.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testPsc() throws Exception {
+ Psc psc = new Psc();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.psc()).thenReturn(result);
+
+ psc.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ psc.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testPsd() throws Exception {
+ Psd psd = new Psd();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.psd()).thenReturn(result);
+
+ psd.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ psd.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testPsm() throws Exception {
+ Psm psm = new Psm();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.psm()).thenReturn(result);
+
+ psm.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ psm.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testPsp() throws Exception {
+ Psp psp = new Psp();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.psp()).thenReturn(result);
+
+ psp.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ psp.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testRemoveContainer() throws Exception {
+ String containerName = "test";
+ RemoveContainer remConTest = new RemoveContainer();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.removeContainerShell(containerName)).thenReturn(result);
+
+ Field cNField = remConTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ cNField.set(remConTest, "test");
+
+ remConTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ remConTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testRemoveContainerEntry() throws Exception {
+ String containerName = "test", nodeId = "1234", portId = "5678";
+ RemoveContainerEntry remConEntTest = new RemoveContainerEntry();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.removeContainerEntry(containerName, nodeId, portId)).thenReturn(result);
+
+ Field cNField = remConEntTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ Field nIField = remConEntTest.getClass().getDeclaredField("nodeId");
+ nIField.setAccessible(true);
+ Field pIField = remConEntTest.getClass().getDeclaredField("portId");
+ pIField.setAccessible(true);
+
+ cNField.set(remConEntTest, "test");
+ nIField.set(remConEntTest, "1234");
+ pIField.set(remConEntTest, "5678");
+
+ remConEntTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ remConEntTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testRemoveContainerFlow() throws Exception {
+ String containerName = "test", cflowName = "1234";
+ RemoveContainerFlow remConFlowTest = new RemoveContainerFlow();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.removeContainerFlow(containerName, cflowName)).thenReturn(result);
+
+ Field cNField = remConFlowTest.getClass().getDeclaredField("containerName");
+ cNField.setAccessible(true);
+ Field cfField = remConFlowTest.getClass().getDeclaredField("cflowName");
+ cfField.setAccessible(true);
+
+ cNField.set(remConFlowTest, "test");
+ cfField.set(remConFlowTest, "1234");
+
+ remConFlowTest.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ remConFlowTest.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+
+ @Test
+ public void testSaveConfig() throws Exception {
+ SaveConfig saveConfig = new SaveConfig();
+ containerManager = mock(IContainerManagerShell.class);
+ List<String> result = new ArrayList<String>(Arrays.asList("status"));
+ when(containerManager.saveConfig()).thenReturn(result);
+
+ saveConfig.setContainerManager(containerManager);
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ System.setOut(new PrintStream(baos));
+ saveConfig.doExecute();
+ Assert.assertEquals("status\n", baos.toString());
+ }
+}
\ No newline at end of file
<version>${karaf.version}</version>
<type>kar</type>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <scope>compile</scope>
+ </dependency>
<!-- scope is runtime so the feature repo is listed in the features
service config file, and features may be installed using the
karaf-maven-plugin configuration -->
<type>xml</type>
<scope>runtime</scope>
</dependency>
- <dependency>
- <!-- scope is compile so all features (there is only one) are installed
+ <!-- scope is compile so all features (there is only one) are installed
into startup.properties and the feature repo itself is not installed -->
+ <dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>base-features</artifactId>
<version>${project.version}</version>
- <type>kar</type>
+ <type>pom</type>
<scope>runtime</scope>
</dependency>
<dependency>
</configuration>
</plugin>
<plugin>
- <artifactId>maven-resources-plugin</artifactId>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
<version>2.6</version>
<executions>
<execution>
- <id>copy-resources</id>
+ <id>copy</id>
<goals>
- <goal>copy-resources</goal>
+ <goal>copy</goal>
</goals>
<!-- here the phase you need -->
- <phase>process-resources</phase>
+ <phase>generate-resources</phase>
<configuration>
- <outputDirectory>${basedir}/target/assembly</outputDirectory>
- <overwrite>true</overwrite>
- <resources>
- <resource>
- <directory>${basedir}/src/main/resources</directory>
- </resource>
- </resources>
+ <artifactItems>
+ <artifactItem>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <version>${karaf.branding.version}</version>
+ <outputDirectory>target/assembly/lib</outputDirectory>
+ <destFileName>karaf.branding-${branding.version}.jar</destFileName>
+ </artifactItem>
+ </artifactItems>
</configuration>
</execution>
</executions>
# Extra packages to import from the boot class loader
-org.osgi.framework.system.packages.extra=sun.reflect,sun.reflect.misc,sun.misc,sun.nio.ch
+org.osgi.framework.system.packages.extra=org.apache.karaf.branding,sun.reflect,sun.reflect.misc,sun.misc,sun.nio.ch
-# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
-# Extend the framework to avoid the resources to be presented with
-# a URL of type bundleresource: but to be presented as file:
-osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
# Embedded Tomcat configuration File
org.eclipse.gemini.web.tomcat.config.path=configuration/tomcat-server.xml
org.apache.tomcat.util.buf.UDecoder.ALLOW_ENCODED_SLASH=true
-# Use Equinox as default OSGi Framework Implementation
-karaf.framework=equinox
# Netconf startup configuration
netconf.tcp.address=127.0.0.1
java.util.logging.config.file=configuration/tomcat-logging.properties
#Hosttracker hostsdb key scheme setting
-hosttracker.keyscheme=IP
\ No newline at end of file
+hosttracker.keyscheme=IP
mvn\:org.ops4j.pax.url/pax-url-maven-commons/1.6.0 = 5
mvn\:org.ops4j.pax.url/pax-url-aether/1.6.0 = 5
mvn\:org.ops4j.pax.url/pax-url-wrap/1.6.0 = 5
-mvn\:javax.annotation/javax.annotation-api/1.2 = 5
+#mvn\:javax.annotation/javax.annotation-api/1.2 = 5
mvn\:org.ops4j.pax.logging/pax-logging-api/1.7.2 = 8
mvn\:org.ops4j.pax.logging/pax-logging-service/1.7.2 = 8
mvn\:org.apache.karaf.service/org.apache.karaf.service.guard/3.0.1 = 10
--- /dev/null
+#
+# The properties defined in this file will be made available through system
+# properties at the very beginning of the Karaf's boot process.
+#
+
+# Use Equinox as default OSGi Framework Implementation
+karaf.framework=equinox
+
+# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
+# Extend the framework to avoid the resources to be presented with
+# a URL of type bundleresource: but to be presented as file:
+osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
+
+
+# Log level when the pax-logging service is not available
+# This level will only be used while the pax-logging service bundle
+# is not fully available.
+# To change log levels, please refer to the org.ops4j.pax.logging.cfg file
+# instead.
+org.ops4j.pax.logging.DefaultServiceLog.level = ERROR
+
+#
+# Name of this Karaf instance.
+#
+karaf.name = root
+
+#
+# Default repository where bundles will be loaded from before using
+# other Maven repositories. For the full Maven configuration, see
+# the org.ops4j.pax.url.mvn.cfg file.
+#
+karaf.default.repository = system
+
+#
+# Location of a shell script that will be run when starting a shell
+# session. This script can be used to create aliases and define
+# additional commands.
+#
+karaf.shell.init.script = ${karaf.etc}/shell.init.script
+
+#
+# Sets the maximum size of the shell command history. If not set,
+# defaults to 500 entries. Setting to 0 will disable history.
+#
+# karaf.shell.history.maxSize = 0
+
+#
+# Deletes the entire karaf.data directory at every start
+#
+karaf.clean.all = false
+
+#
+# Deletes the karaf.data/cache directory at every start
+#
+karaf.clean.cache = false
+
+#
+# Roles to use when logging into a local Karaf console.
+#
+# The syntax is the following:
+# [classname:]principal
+# where classname is the class name of the principal object
+# (defaults to org.apache.karaf.jaas.modules.RolePrincipal)
+# and principal is the name of the principal of that class
+# (defaults to instance).
+#
+karaf.local.roles = admin,manager,viewer
+
+#
+# Set this empty property to avoid errors when validating xml documents.
+#
+xml.catalog.files =
+
+#
+# Suppress the bell in the console when hitting backspace too many times
+# for example
+#
+jline.nobell = true
+
+#
+# ServiceMix specs options
+#
+org.apache.servicemix.specs.debug = false
+org.apache.servicemix.specs.timeout = 0
+
+#
+# Settings for the OSGi 4.3 Weaving
+# By default, we will not weave any classes. Change this setting to include classes
+# that you application needs to have woven.
+#
+org.apache.aries.proxy.weaving.enabled = none
+# Classes not to weave - Aries default + Xerces which is known to have issues.
+org.apache.aries.proxy.weaving.disabled = org.objectweb.asm.*,org.slf4j.*,org.apache.log4j.*,javax.*,org.apache.xerces.*
+
+#
+# By default, only Karaf shell commands are secured, but additional services can be
+# secured by expanding this filter
+#
+karaf.secured.services = (&(osgi.command.scope=*)(osgi.command.function=*))
+
+#
+# Security properties
+#
+# To enable OSGi security, uncomment the properties below,
+# install the framework-security feature and restart.
+#
+#java.security.policy=${karaf.etc}/all.policy
+#org.osgi.framework.security=osgi
+#org.osgi.framework.trust.repositories=${karaf.etc}/trustStore.ks
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-persister-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>filter-valve</artifactId>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>logback-config</artifactId>
--- /dev/null
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<Host>
+ <!-- Filters are allowed here, only serving as a template -->
+ <filter-template>
+ <filter-name>CorsFilter</filter-name>
+ <filter-class>org.apache.catalina.filters.CorsFilter</filter-class>
+ <init-param>
+ <param-name>cors.allowed.origins</param-name>
+ <param-value>*</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.methods</param-name>
+ <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.allowed.headers</param-name>
+ <param-value>Content-Type,X-Requested-With,accept,authorization,
+ origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers
+ </param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.exposed.headers</param-name>
+ <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.support.credentials</param-name>
+ <param-value>true</param-value>
+ </init-param>
+ <init-param>
+ <param-name>cors.preflight.maxage</param-name>
+ <param-value>10</param-value>
+ </init-param>
+ </filter-template>
+
+ <Context path="/restconf">
+ <filter>
+ <filter-name>CorsFilter</filter-name>
+ <!-- init params can be added/overriden if template is used -->
+ </filter>
+ <!-- references to templates without <filter> declaration are not allowed -->
+ <filter-mapping>
+ <filter-name>CorsFilter</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+ </Context>
+
+</Host>
rotatable="true" fileDateFormat="yyyy-MM"
pattern="%{yyyy-MM-dd HH:mm:ss.SSS z}t - [%a] - %r"/>
+ <Valve className="org.opendaylight.controller.filtervalve.cors.FilterValve"
+ configurationFile="configuration/cors-config.xml"
+ />
</Host>
</Engine>
</Service>
if [[ -z ${JAVA_HOME} ]]; then
# Find the actual location of the Java launcher:
- java_launcher=`which java`
+ java_launcher=`command -v java`
java_launcher=`readlink -f "${java_launcher}"`
# Compute the Java home from the location of the Java launcher:
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <packaging>bundle</packaging>
+ <name>OpenDaylight :: Karaf :: Branding</name>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <version>2.4.0</version>
+ <extensions>true</extensions>
+ <configuration>
+ <instructions>
+ <Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
+ <Import-Package>*</Import-Package>
+ <Private-Package>!*</Private-Package>
+ <Export-Package>
+ org.apache.karaf.branding
+ </Export-Package>
+ <Spring-Context>*;public-context:=false</Spring-Context>
+ </instructions>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+welcome = \
+\u001B[33m \r\n\
+\u001B[33m ________ ________ .__ .__ .__ __ \r\n\
+\u001B[33m \\_____ \\ ______ ____ ____ \\______ \\ _____ ___.__.| | |__| ____ | |___/ |_ \r\n\
+\u001B[33m / | \\\\____ \\_/ __ \\ / \\ | | \\\\__ \\< | || | | |/ ___\\| | \\ __\\ \r\n\
+\u001B[33m / | \\ |_> > ___/| | \\| ` \\/ __ \\\\___ || |_| / /_/ > Y \\ | \r\n\
+\u001B[33m \\_______ / __/ \\___ >___| /_______ (____ / ____||____/__\\___ /|___| /__| \r\n\
+\u001B[33m \\/|__| \\/ \\/ \\/ \\/\\/ /_____/ \\/ \r\n\
+\u001B[33m \r\n\
+\r\n\
+Hit '\u001B[1m<tab>\u001B[0m' for a list of available commands\r\n\
+ and '\u001B[1m[cmd] --help\u001B[0m' for help on a specific command.\r\n\
+Hit '\u001B[1m<ctrl-d>\u001B[0m' or type '\u001B[1msystem:shutdown\u001B[0m' or '\u001B[1mlogout\u001B[0m' to shutdown OpenDaylight.\r\n
+prompt = \u001B[36mopendaylight-user\u001B[0m\u001B[1m@\u001B[0m\u001B[34m${APPLICATION}\u001B[0m>
.setType(etherType);
targetEthMatchBuild.setEthernetType(ethType.build());
}
- if((sourceMatch.getField(DL_SRC) != null && sourceMatch.getField(DL_SRC).getValue() != null) ||
- (sourceMatch.getField(DL_DST) != null && sourceMatch.getField(DL_DST).getValue() != null)||
+ if((sourceMatch.getField(DL_SRC) != null && sourceMatch.getField(DL_SRC).getValue() != null) ||
+ (sourceMatch.getField(DL_DST) != null && sourceMatch.getField(DL_DST).getValue() != null)||
dataLinkType != null ) {
- return targetEthMatchBuild.build();
- }
+ return targetEthMatchBuild.build();
+ }
return null;
}
.toAddrString(inetDestAddress);
layer4MatchBuild
.setIpv4Destination(new Ipv4Prefix(inetDstAddressString));
- }
+ }
return layer4MatchBuild.build();
}
}
if(inetDestAddress != null) {
String inetDstAddressString = InetAddresses
- .toAddrString(inetDestAddress);
+ .toAddrString(inetDestAddress);
layer6MatchBuild
.setIpv6Destination(new Ipv6Prefix(inetDstAddressString));
}
return layer6MatchBuild.build();
}
-
+
public static boolean flowEquals(Flow statsFlow, Flow storedFlow) {
if (statsFlow.getClass() != storedFlow.getClass()) {
return false;
*/
package org.opendaylight.controller.sal.compatibility;
-import java.math.BigInteger;
-import java.net.Inet4Address;
-import java.net.Inet6Address;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
+import com.google.common.base.Preconditions;
+import com.google.common.net.InetAddresses;
import org.opendaylight.controller.sal.action.Action;
import org.opendaylight.controller.sal.action.Controller;
import org.opendaylight.controller.sal.action.Drop;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.hw.path.action._case.HwPathActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.loopback.action._case.LoopbackActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.output.action._case.OutputActionBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.pop.vlan.action._case.PopVlanActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.push.vlan.action._case.PushVlanActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.set.dl.dst.action._case.SetDlDstActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.set.dl.src.action._case.SetDlSrcActionBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.types.rev130827.VlanId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.types.rev130827.VlanPcp;
-import com.google.common.base.Preconditions;
-import com.google.common.net.InetAddresses;
+import java.math.BigInteger;
+import java.net.Inet4Address;
+import java.net.Inet6Address;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
public final class MDFlowMapping {
private MDFlowMapping() {
}
private static PopVlanActionCase _toAction(final PopVlan sourceAction) {
- return new PopVlanActionCaseBuilder().build();
+ PopVlanActionBuilder popVlanActionBuilder = new PopVlanActionBuilder();
+ return new PopVlanActionCaseBuilder().setPopVlanAction(popVlanActionBuilder.build()).build();
}
private static PushVlanActionCase _toAction(final PushVlan sourceAction) {
public static List<org.opendaylight.controller.sal.action.Action> actionFrom(List<Action> actions, Node node) {
List<org.opendaylight.controller.sal.action.Action> targetAction = new ArrayList<>();
for (Action action : actions) {
- org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.Action sourceAction = action
+ org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.Action sourceAction = action
.getAction();
if (sourceAction instanceof ControllerActionCase) {
}
return macAddress;
}
-
+
public static byte[] bytesFromDpid(long dpid) {
byte[] mac = new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
{(byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff},
{(byte) 0x76, (byte) 0x4a, (byte) 0xe9, (byte) 0xac, (byte) 0xe6, (byte) 0x5a}
};
-
+
Assert.assertEquals(expectedMacs.length, nodeIds.length);
for (int i = 0; i < expectedMacs.length; i++) {
boolean b) {
int numOfFoundActions = 0;
for (org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.list.Action action : actions) {
- org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.Action innerAction = action
+ org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.Action innerAction = action
.getAction();
if (cl.isInstance(innerAction)) {
numOfFoundActions++;
public void testToSalConversion() throws ConstructionException {
FlowAddedBuilder odNodeFlowBuilder = new FlowAddedBuilder();
odNodeFlowBuilder = prepareOdFlowCommon();
-
+
Node node = new Node(NodeIDType.OPENFLOW,(long)1);
-
+
Flow salFlow = ToSalConversionsUtils.toFlow(prepareOdFlow(odNodeFlowBuilder, MtchType.other), node);
checkSalMatch(salFlow.getMatch(), MtchType.other);
private void checkSalMatch(org.opendaylight.controller.sal.match.Match match, MtchType mt) {
switch (mt) {
case other:
- /*assertNotNull("DL_DST isn't equal.", "3C:A9:F4:00:E0:C8",
+ /*assertNotNull("DL_DST isn't equal.", "3C:A9:F4:00:E0:C8",
new String((byte[]) match.getField(MatchType.DL_DST).getValue()));
assertEquals("DL_SRC isn't equal.", "24:77:03:7C:C5:F1",
new String((byte[]) match.getField(MatchType.DL_SRC).getValue()));
//assertEquals("Wrong value for action SetDlSrc for MAC address.", "24:77:03:7C:C5:F1", new String(
// ((SetDlSrc) action).getDlAddress()));
} else if (action instanceof SetDlType) {
- assertEquals("Wrong value for action SetDlType for.", 513l, ((SetDlType) action).getDlType());
+ assertEquals("Wrong value for action SetDlType for.", 513L, ((SetDlType) action).getDlType());
} else if (action instanceof SetNextHop) {
InetAddress inetAddress = ((SetNextHop) action).getAddress();
checkIpAddresses(inetAddress, "192.168.100.100", "2001:db8:85a3::8a2e:370:7334");
odActions.add(new ActionBuilder().setAction(setVlanPcpActionBuilder.build()).build());
odActions.add(new ActionBuilder().setAction(swPathActionBuilder.build()).build());
-
+
ApplyActionsCase innerInst = new ApplyActionsCaseBuilder().setApplyActions(new ApplyActionsBuilder().setAction(odActions).build()).build();
Instruction applyActions = new InstructionBuilder().setInstruction(innerInst).build();
List<Instruction> instructions = Collections.singletonList(applyActions );
InstructionsBuilder instBuilder = new InstructionsBuilder();
-
+
instBuilder.setInstruction(instructions);
-
+
return instBuilder.build();
}
private void prepareActionSetNwDst(List<Action> odActions) {
// test case for IPv4
-
+
SetNwDstActionBuilder setNwDstActionBuilderIpv4 = new SetNwDstActionBuilder();
setNwDstActionBuilderIpv4.setAddress(prapareIpv4Address("192.168.100.101"));
odActions.add(new ActionBuilder().setAction(new SetNwDstActionCaseBuilder().setSetNwDstAction(setNwDstActionBuilderIpv4.build()).build()).build());
private void prepareActionSetDlType(SetDlTypeActionCaseBuilder wrapper) {
SetDlTypeActionBuilder setDlTypeActionBuilder = new SetDlTypeActionBuilder();
- setDlTypeActionBuilder.setDlType(new EtherType(513l));
+ setDlTypeActionBuilder.setDlType(new EtherType(513L));
wrapper.setSetDlTypeAction(setDlTypeActionBuilder.build());
}
private EthernetType prepEthType() {
EthernetTypeBuilder ethTypeBuild = new EthernetTypeBuilder();
- ethTypeBuild.setType(new EtherType(0xffffl));
+ ethTypeBuild.setType(new EtherType(0xffffL));
return ethTypeBuild.build();
}
<features name="mdsal-${project.version}" xmlns="http://karaf.apache.org/xmlns/features/v1.2.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://karaf.apache.org/xmlns/features/v1.2.0 http://karaf.apache.org/xmlns/features/v1.2.0">
- <feature name='mdsal-all' version='${project.version}'>
+ <feature name='odl-mdsal-all' version='${project.version}'>
<feature version='${project.version}'>odl-mdsal-commons</feature>
<feature version='${project.version}'>odl-mdsal-broker</feature>
<feature version='${project.version}'>odl-mdsal-restconf</feature>
<bundle>wrap:mvn:io.netty/netty-handler/${netty.version}</bundle>
<bundle>wrap:mvn:io.netty/netty-transport/${netty.version}</bundle>
</feature>
-</features>
\ No newline at end of file
+</features>
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
/**
- *
+ *
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public void onDataChanged(DataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
this.transactionId = this.newTransactionIdentifier().toString();
- final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
+ final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
changeEvent.getCreatedConfigurationData().entrySet();
- final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries =
+ final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries =
new HashSet<Entry<InstanceIdentifier<? extends DataObject>, DataObject>>();
- Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updateConfigEntrySet =
+ Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updateConfigEntrySet =
changeEvent.getUpdatedConfigurationData().entrySet();
updatedEntries.addAll(updateConfigEntrySet);
updatedEntries.removeAll(createdEntries);
- final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
+ final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
changeEvent.getRemovedConfigurationData();
for (final Entry<InstanceIdentifier<? extends DataObject>, DataObject> createdEntry : createdEntries) {
}
for (final Entry<InstanceIdentifier<?>, DataObject> updatedEntrie : updatedEntries) {
- Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
+ Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
changeEvent.getOriginalConfigurationData();
InstanceIdentifier<? extends Object> u_key = updatedEntrie.getKey();
}
for (final InstanceIdentifier<?> instanceId : removeEntriesInstanceIdentifiers) {
- Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
+ Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
changeEvent.getOriginalConfigurationData();
final DataObject removeValue = origConfigData.get(instanceId);
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
private final static Logger LOG = LoggerFactory.getLogger(FRMActivator.class);
- private static FlowProvider flowProvider = new FlowProvider();
+ private static FlowProvider flowProvider = new FlowProvider();
private static GroupProvider groupProvider = new GroupProvider();
private static MeterProvider meterProvider = new MeterProvider();
-
+
@Override
public void onSessionInitiated(final ProviderContext session) {
DataProviderService flowSalService = session.<DataProviderService>getSALService(DataProviderService.class);
FRMActivator.meterProvider.setSalMeterService(rpcMeterSalService);
FRMActivator.meterProvider.start();
}
-
+
@Override
protected void stopImpl(final BundleContext context) {
try {
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
import org.slf4j.LoggerFactory;
/**
- *
+ *
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public SalFlowService getSalFlowService() {
return this.salFlowService;
}
-
+
public FlowChangeListener(final SalFlowService manager) {
this.salFlowService = manager;
}
@Override
protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
if ((removeDataObj instanceof Flow)) {
-
+
final Flow flow = ((Flow) removeDataObj);
final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final RemoveFlowInputBuilder builder = new RemoveFlowInputBuilder(flow);
-
+
builder.setFlowRef(new FlowRef(identifier));
builder.setNode(new NodeRef(nodeInstanceId));
builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salFlowService.removeFlow((RemoveFlowInput) builder.build());
@Override
protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
if (original instanceof Flow && update instanceof Flow) {
-
+
final Flow originalFlow = ((Flow) original);
final Flow updatedFlow = ((Flow) update);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node>firstIdentifierOf(Node.class);
final UpdateFlowInputBuilder builder = new UpdateFlowInputBuilder();
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setFlowRef(new FlowRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
-
+
builder.setUpdatedFlow((UpdatedFlow) (new UpdatedFlowBuilder(updatedFlow)).build());
builder.setOriginalFlow((OriginalFlow) (new OriginalFlowBuilder(originalFlow)).build());
-
+
this.salFlowService.updateFlow((UpdateFlowInput) builder.build());
LOG.debug("Transaction {} - Update Flow has updated flow {} with {}", new Object[]{uri, original, update});
}
@Override
protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
if ((addDataObj instanceof Flow)) {
-
+
final Flow flow = ((Flow) addDataObj);
final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setFlowRef(new FlowRef(identifier));
builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salFlowService.addFlow((AddFlowInput) builder.build());
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
InstanceIdentifierBuilder<Table> tableChild = augmentFlowCapNode.<Table> child(Table.class);
InstanceIdentifierBuilder<Flow> flowChild = tableChild.<Flow> child(Flow.class);
final InstanceIdentifier<? extends DataObject> flowDataObjectPath = flowChild.toInstance();
-
+
/* DataChangeListener registration */
this.flowDataChangeListener = new FlowChangeListener(this.salFlowService);
this.flowDataChangeListenerRegistration = this.dataService.registerDataChangeListener(flowDataObjectPath, flowDataChangeListener);
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
import org.slf4j.LoggerFactory;
/**
- *
+ *
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public SalGroupService getSalGroupService() {
return this.salGroupService;
}
-
+
public GroupChangeListener(final SalGroupService manager) {
this.salGroupService = manager;
}
@Override
protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
if ((removeDataObj instanceof Group)) {
-
+
final Group group = ((Group) removeDataObj);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final RemoveGroupInputBuilder builder = new RemoveGroupInputBuilder(group);
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setGroupRef(new GroupRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salGroupService.removeGroup((RemoveGroupInput) builder.build());
@Override
protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
if (original instanceof Group && update instanceof Group) {
-
+
final Group originalGroup = ((Group) original);
final Group updatedGroup = ((Group) update);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final UpdateGroupInputBuilder builder = new UpdateGroupInputBuilder();
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setGroupRef(new GroupRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
-
+
builder.setUpdatedGroup((UpdatedGroup) (new UpdatedGroupBuilder(updatedGroup)).build());
builder.setOriginalGroup((OriginalGroup) (new OriginalGroupBuilder(originalGroup)).build());
-
+
this.salGroupService.updateGroup((UpdateGroupInput) builder.build());
LOG.debug("Transaction {} - Update Group has updated group {} with group {}", new Object[]{uri, original, update});
}
final Group group = ((Group) addDataObj);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final AddGroupInputBuilder builder = new AddGroupInputBuilder(group);
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setGroupRef(new GroupRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salGroupService.addGroup((AddGroupInput) builder.build());
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
this.groupDataChangeListenerRegistration = this.dataService.registerDataChangeListener(groupDataObjectPath, groupDataChangeListener);
LOG.info("Group Config Provider started.");
}
-
+
protected DataModificationTransaction startChange() {
return this.dataService.beginTransaction();
}
-
+
public void close() throws Exception {
if(groupDataChangeListenerRegistration != null){
groupDataChangeListenerRegistration.close();
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
import org.slf4j.LoggerFactory;
/**
- *
+ *
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public SalMeterService getSalMeterService() {
return this.salMeterService;
}
-
+
public MeterChangeListener(final SalMeterService manager) {
this.salMeterService = manager;
}
@Override
protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
if ((removeDataObj instanceof Meter)) {
-
+
final Meter meter = ((Meter) removeDataObj);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final RemoveMeterInputBuilder builder = new RemoveMeterInputBuilder(meter);
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setMeterRef(new MeterRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salMeterService.removeMeter((RemoveMeterInput) builder.build());
@Override
protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
if (original instanceof Meter && update instanceof Meter) {
-
+
final Meter originalMeter = ((Meter) original);
final Meter updatedMeter = ((Meter) update);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final UpdateMeterInputBuilder builder = new UpdateMeterInputBuilder();
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setMeterRef(new MeterRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
-
+
builder.setUpdatedMeter((UpdatedMeter) (new UpdatedMeterBuilder(updatedMeter)).build());
builder.setOriginalMeter((OriginalMeter) (new OriginalMeterBuilder(originalMeter)).build());
-
+
this.salMeterService.updateMeter((UpdateMeterInput) builder.build());
LOG.debug("Transaction {} - Update Meter has updated meter {} with {}", new Object[]{uri, original, update});
}
@Override
protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
if ((addDataObj instanceof Meter)) {
-
+
final Meter meter = ((Meter) addDataObj);
final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
final AddMeterInputBuilder builder = new AddMeterInputBuilder(meter);
-
+
builder.setNode(new NodeRef(nodeInstanceId));
builder.setMeterRef(new MeterRef(identifier));
-
+
Uri uri = new Uri(this.getTransactionId());
builder.setTransactionUri(uri);
this.salMeterService.addMeter((AddMeterInput) builder.build());
/**
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
this.meterDataChangeListenerRegistration = this.dataService.registerDataChangeListener(meterDataObjectPath, meterDataChangeListener);
LOG.info("Meter Config Provider started.");
}
-
+
protected DataModificationTransaction startChange() {
return this.dataService.beginTransaction();
}
Future<RpcResult<TransactionStatus>> commitResult = it.commit();
listenOnTransactionState(it.getIdentifier(), commitResult, "node update", ref.getValue());
}
-
+
/**
* @param txId transaction identificator
* @param future transaction result
private static void listenOnTransactionState(final Object txId, Future<RpcResult<TransactionStatus>> future,
final String action, final InstanceIdentifier<?> nodeConnectorPath) {
Futures.addCallback(JdkFutureAdapters.listenInPoolThread(future),new FutureCallback<RpcResult<TransactionStatus>>() {
-
+
@Override
public void onFailure(Throwable t) {
LOG.error("Action {} [{}] failed for Tx:{}", action, nodeConnectorPath, txId, t);
-
+
}
-
+
@Override
public void onSuccess(RpcResult<TransactionStatus> result) {
if(!result.isSuccessful()) {
}
}
+ container action-types {
+ uses action-list;
+ }
+
grouping action-list {
list action {
key "order";
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
</plugin>
- <plugin>
- <!-- FIXME: BUG-272: remove this configuration override -->
- <!-- replaced with new configuration -->
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.12</version>
- <configuration>
- <failsOnError>false</failsOnError>
- <failOnViolation>false</failOnViolation>
- <configLocation>checkstyle-logging.xml</configLocation>
- <consoleOutput>true</consoleOutput>
- <includeTestSourceDirectory>true</includeTestSourceDirectory>
- <sourceDirectory>${project.basedir}</sourceDirectory>
- <includes>**\/*.java,**\/*.xml,**\/*.ini,**\/*.sh,**\/*.bat,**\/*.yang</includes>
- <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/${jmxGeneratorPath}\/,**\/${salGeneratorPath}\/</excludes>
- </configuration>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>checkstyle-logging</artifactId>
- <version>${yangtools.version}</version>
- </dependency>
- </dependencies>
- <executions>
- <execution>
- <goals>
- <goal>check</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
package org.opendaylight.controller.sal.connector.remoterpc.impl;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableSet;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+import javax.transaction.HeuristicMixedException;
+import javax.transaction.HeuristicRollbackException;
+import javax.transaction.NotSupportedException;
+import javax.transaction.RollbackException;
+
import org.apache.felix.dm.Component;
-import org.opendaylight.controller.clustering.services.*;
+import org.opendaylight.controller.clustering.services.CacheConfigException;
+import org.opendaylight.controller.clustering.services.CacheExistException;
+import org.opendaylight.controller.clustering.services.CacheListenerAddException;
+import org.opendaylight.controller.clustering.services.ICacheUpdateAware;
+import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
+import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.transaction.HeuristicMixedException;
-import javax.transaction.HeuristicRollbackException;
-import javax.transaction.NotSupportedException;
-import javax.transaction.RollbackException;
-import java.util.*;
-import java.util.concurrent.ConcurrentMap;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableSet;
public class RoutingTableImpl<I, R> implements RoutingTable<I, R>, ICacheUpdateAware<I, R> {
- private Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
+ private final Logger log = LoggerFactory.getLogger(RoutingTableImpl.class);
- private IClusterGlobalServices clusterGlobalServices = null;
+ private IClusterGlobalServices clusterGlobalServices = null;
- private ConcurrentMap<I,R> globalRpcCache = null;
- private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
+ private ConcurrentMap<I,R> globalRpcCache = null;
+ private ConcurrentMap<I, LinkedHashSet<R>> rpcCache = null; //need routes to ordered by insert-order
- public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
- public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
+ public static final String GLOBALRPC_CACHE = "remoterpc_routingtable.globalrpc_cache";
+ public static final String RPC_CACHE = "remoterpc_routingtable.rpc_cache";
- public RoutingTableImpl() {
- }
-
- @Override
- public R getGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
- return globalRpcCache.get(routeId);
- }
-
- @Override
- public void addGlobalRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
- try {
-
- log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
- clusterGlobalServices.tbegin();
- if (globalRpcCache.putIfAbsent(routeId, route) != null) {
- throw new DuplicateRouteException(" There is already existing route " + routeId);
- }
- clusterGlobalServices.tcommit();
-
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to create route id="
- + routeId + "with route" + route, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to create with value", e);
+ public RoutingTableImpl() {
}
- }
+ @Override
+ public R getGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!");
+ return globalRpcCache.get(routeId);
+ }
- @Override
- public void removeGlobalRoute(I routeId) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
- try {
- log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+ @Override
+ public void addGlobalRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!");
+ try {
+
+ log.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route);
+ clusterGlobalServices.tbegin();
+ if (globalRpcCache.putIfAbsent(routeId, route) != null) {
+ throw new DuplicateRouteException(" There is already existing route " + routeId);
+ }
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to create route id="
+ + routeId + "with route" + route, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to create with value", e);
+ }
- clusterGlobalServices.tbegin();
- globalRpcCache.remove(routeId);
- clusterGlobalServices.tcommit();
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeGlobalRoute(final I routeId) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!");
+ try {
+ log.debug("removeGlobalRoute: removing a new route with id [{}]", routeId);
+
+ clusterGlobalServices.tbegin();
+ globalRpcCache.remove(routeId);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public Set<R> getRoutes(I routeId) {
- Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
- Set<R> routes = rpcCache.get(routeId);
+ @Override
+ public Set<R> getRoutes(final I routeId) {
+ Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!");
+ Set<R> routes = rpcCache.get(routeId);
- if (routes == null) return Collections.emptySet();
+ if (routes == null) {
+ return Collections.emptySet();
+ }
- return ImmutableSet.copyOf(routes);
- }
+ return ImmutableSet.copyOf(routes);
+ }
- public R getLastAddedRoute(I routeId) {
+ @Override
+ public R getLastAddedRoute(final I routeId) {
- Set<R> routes = getRoutes(routeId);
+ Set<R> routes = getRoutes(routeId);
- if (routes.isEmpty()) return null;
+ if (routes.isEmpty()) {
+ return null;
+ }
- R route = null;
- Iterator<R> iter = routes.iterator();
- while (iter.hasNext())
- route = iter.next();
+ R route = null;
+ Iterator<R> iter = routes.iterator();
+ while (iter.hasNext()) {
+ route = iter.next();
+ }
- return route;
- }
+ return route;
+ }
- @Override
- public void addRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
- Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+ @Override
+ public void addRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null");
+ Preconditions.checkNotNull(route, "addRoute: route cannot be null");
+
+ try{
+ clusterGlobalServices.tbegin();
+ log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
+ threadSafeAdd(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
+ }
- try{
- clusterGlobalServices.tbegin();
- log.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route);
- threadSafeAdd(routeId, route);
- clusterGlobalServices.tcommit();
+ @Override
+ public void addRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ addRoute(routeId, route);
+ }
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ @Override
+ public void removeRoute(final I routeId, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
+ Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+
+ LinkedHashSet<R> routes = rpcCache.get(routeId);
+ if (routes == null) {
+ return;
+ }
+
+ try {
+ log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
+
+ clusterGlobalServices.tbegin();
+ threadSafeRemove(routeId, route);
+ clusterGlobalServices.tcommit();
+
+ } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
+ throw new RoutingTableException("Transaction error - while trying to remove route id="
+ + routeId, e);
+ } catch (javax.transaction.SystemException e) {
+ throw new SystemException("System error occurred - while trying to remove with value", e);
+ }
}
- }
- @Override
- public void addRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- addRoute(routeId, route);
+ @Override
+ public void removeRoutes(final Set<I> routeIds, final R route) throws RoutingTableException, SystemException {
+ Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
+ for (I routeId : routeIds){
+ removeRoute(routeId, route);
+ }
}
- }
- @Override
- public void removeRoute(I routeId, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!");
- Preconditions.checkNotNull(route, "removeRoute: route cannot be null!");
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 100 times then throw
+ */
+ private void threadSafeAdd(final I routeId, final R route) {
+
+ for (int i=0;i<100;i++){
+
+ LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
+ updatedRoutes.add(route);
+ LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
+ if (oldRoutes == null) {
+ return;
+ }
+
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.add(route);
+
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+ }
+ //the method did not already return means it failed to add route in 10 attempts
+ throw new IllegalStateException("Failed to add route [" + routeId + "]");
+ }
- LinkedHashSet<R> routes = rpcCache.get(routeId);
- if (routes == null) return;
+ /**
+ * This method guarantees that no 2 thread over write each other's changes.
+ * Just so that we dont end up in infinite loop, it tries for 10 times then throw
+ */
+ private void threadSafeRemove(final I routeId, final R route) {
+ LinkedHashSet<R> updatedRoutes = null;
+ for (int i=0;i<10;i++){
+ LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
+
+ // if route to be deleted is the only entry in the set then remove routeId from the cache
+ if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
+ rpcCache.remove(routeId);
+ return;
+ }
+
+ // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
+ updatedRoutes = new LinkedHashSet<>(oldRoutes);
+ updatedRoutes.remove(route);
+ if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) {
+ return;
+ }
+
+ }
+ //the method did not already return means it failed to remove route in 10 attempts
+ throw new IllegalStateException("Failed to remove route [" + routeId + "]");
+ }
- try {
- log.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route);
- clusterGlobalServices.tbegin();
- threadSafeRemove(routeId, route);
- clusterGlobalServices.tcommit();
+ // /**
+ // * @deprecated doesn't do anything will be removed once listeners used
+ // * whiteboard pattern Registers listener for sending any change
+ // * notification
+ // * @param listener
+ // */
+ // @Override
+ // public void registerRouteChangeListener(RouteChangeListener listener) {
+ //
+ // }
+
+ // public void setRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.add(rcl);
+ // }else{
+ // log.warn("setRouteChangeListener called with null listener");
+ // }
+ // }
+ //
+ // public void unSetRouteChangeListener(RouteChangeListener rcl) {
+ // if(rcl != null){
+ // routeChangeListeners.remove(rcl);
+ // }else{
+ // log.warn("unSetRouteChangeListener called with null listener");
+ // }
+ // }
+
+ /**
+ * Returning the set of route change listeners for Unit testing Note: the
+ * package scope is default
+ *
+ * @return List of registered RouteChangeListener<I,R> listeners
+ */
+ // Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
+ // return routeChangeListeners;
+ // }
+ public void setClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ this.clusterGlobalServices = clusterGlobalServices;
+ }
- } catch (NotSupportedException | HeuristicRollbackException | RollbackException | HeuristicMixedException e) {
- throw new RoutingTableException("Transaction error - while trying to remove route id="
- + routeId, e);
- } catch (javax.transaction.SystemException e) {
- throw new SystemException("System error occurred - while trying to remove with value", e);
+ public void unsetClusterGlobalServices(final IClusterGlobalServices clusterGlobalServices) {
+ if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
+ this.clusterGlobalServices = null;
+ }
}
- }
- @Override
- public void removeRoutes(Set<I> routeIds, R route) throws RoutingTableException, SystemException {
- Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null");
- for (I routeId : routeIds){
- removeRoute(routeId, route);
+ /**
+ * Finds OR Creates clustered cache for Global RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ // let us check here if the cache already exists -- if so don't create
+ if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
+
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
+
+ } else {
+ globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
+ log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
+ }
}
- }
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 100 times then throw
- */
- private void threadSafeAdd(I routeId, R route) {
+ /**
+ * Finds OR Creates clustered cache for Routed RPCs
+ *
+ * @throws CacheExistException -- cluster global services exception when cache exist
+ * @throws CacheConfigException -- cluster global services exception during cache config
+ * @throws CacheListenerAddException -- cluster global services exception during adding of listener
+ */
+
+ @SuppressWarnings("unchecked")
+ void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
+ CacheListenerAddException {
+ // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
+ // should be caching?
+
+ if (clusterGlobalServices.existCache(RPC_CACHE)){
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
+ log.debug("Cache exists [{}] ", RPC_CACHE);
+ return;
+ }
+
+ //cache doesnt exist, create one
+ rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
+ log.debug("Cache created [{}] ", RPC_CACHE);
+ }
- for (int i=0;i<100;i++){
- LinkedHashSet<R> updatedRoutes = new LinkedHashSet<>();
- updatedRoutes.add(route);
- LinkedHashSet<R> oldRoutes = rpcCache.putIfAbsent(routeId, updatedRoutes);
- if (oldRoutes == null) return;
+ /**
+ * Function called by the dependency manager when all the required
+ * dependencies are satisfied
+ */
+ void init(final Component c) {
+ try {
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.add(route);
+ findOrCreateGlobalRpcCache();
+ findOrCreateRpcCache();
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
+ throw new IllegalStateException("could not construct routing table cache");
+ }
}
- //the method did not already return means it failed to add route in 10 attempts
- throw new IllegalStateException("Failed to add route [" + routeId + "]");
- }
-
- /**
- * This method guarantees that no 2 thread over write each other's changes.
- * Just so that we dont end up in infinite loop, it tries for 10 times then throw
- */
- private void threadSafeRemove(I routeId, R route) {
- LinkedHashSet<R> updatedRoutes = null;
- for (int i=0;i<10;i++){
- LinkedHashSet<R> oldRoutes = rpcCache.get(routeId);
-
- // if route to be deleted is the only entry in the set then remove routeId from the cache
- if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){
- rpcCache.remove(routeId);
- return;
- }
-
- // if there are multiple routes for this routeId, remove the route to be deleted only from the set.
- updatedRoutes = new LinkedHashSet<>(oldRoutes);
- updatedRoutes.remove(route);
- if (rpcCache.replace(routeId, oldRoutes, updatedRoutes)) return;
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, R> getGlobalRpcCache() {
+ return this.globalRpcCache;
}
- //the method did not already return means it failed to remove route in 10 attempts
- throw new IllegalStateException("Failed to remove route [" + routeId + "]");
- }
-
-
-// /**
-// * @deprecated doesn't do anything will be removed once listeners used
-// * whiteboard pattern Registers listener for sending any change
-// * notification
-// * @param listener
-// */
-// @Override
-// public void registerRouteChangeListener(RouteChangeListener listener) {
-//
-// }
-
-// public void setRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.add(rcl);
-// }else{
-// log.warn("setRouteChangeListener called with null listener");
-// }
-// }
-//
-// public void unSetRouteChangeListener(RouteChangeListener rcl) {
-// if(rcl != null){
-// routeChangeListeners.remove(rcl);
-// }else{
-// log.warn("unSetRouteChangeListener called with null listener");
-// }
-// }
-
- /**
- * Returning the set of route change listeners for Unit testing Note: the
- * package scope is default
- *
- * @return List of registered RouteChangeListener<I,R> listeners
- */
-// Set<RouteChangeListener> getRegisteredRouteChangeListeners() {
-// return routeChangeListeners;
-// }
- public void setClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- this.clusterGlobalServices = clusterGlobalServices;
- }
-
- public void unsetClusterGlobalServices(IClusterGlobalServices clusterGlobalServices) {
- if ((clusterGlobalServices != null) && (this.clusterGlobalServices.equals(clusterGlobalServices))) {
- this.clusterGlobalServices = null;
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Global RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateGlobalRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- // let us check here if the cache already exists -- if so don't create
- if (!clusterGlobalServices.existCache(GLOBALRPC_CACHE)) {
-
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.createCache(GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", GLOBALRPC_CACHE);
-
- } else {
- globalRpcCache = (ConcurrentMap<I,R>) clusterGlobalServices.getCache(GLOBALRPC_CACHE);
- log.debug("Cache exists [{}] ", GLOBALRPC_CACHE);
- }
- }
-
- /**
- * Finds OR Creates clustered cache for Routed RPCs
- *
- * @throws CacheExistException -- cluster global services exception when cache exist
- * @throws CacheConfigException -- cluster global services exception during cache config
- * @throws CacheListenerAddException -- cluster global services exception during adding of listener
- */
-
- void findOrCreateRpcCache() throws CacheExistException, CacheConfigException,
- CacheListenerAddException {
- // TBD: HOW DO WE DECIDE ON PROPERTIES OF THE CACHE i.e. what duration it
- // should be caching?
-
- if (clusterGlobalServices.existCache(RPC_CACHE)){
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.getCache(RPC_CACHE);
- log.debug("Cache exists [{}] ", RPC_CACHE);
- return;
- }
-
- //cache doesnt exist, create one
- rpcCache = (ConcurrentMap<I,LinkedHashSet<R>>) clusterGlobalServices.createCache(RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL));
- log.debug("Cache created [{}] ", RPC_CACHE);
- }
-
-
- /**
- * Function called by the dependency manager when all the required
- * dependencies are satisfied
- */
- void init(Component c) {
- try {
- findOrCreateGlobalRpcCache();
- findOrCreateRpcCache();
-
- } catch (CacheExistException|CacheConfigException|CacheListenerAddException e) {
- throw new IllegalStateException("could not construct routing table cache");
+ /**
+ * Useful for unit testing <note>It has package
+ * scope</note>
+ */
+ ConcurrentMap<I, LinkedHashSet<R>> getRpcCache() {
+ return this.rpcCache;
}
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getGlobalRpcCache() {
- return this.globalRpcCache;
- }
-
- /**
- * Useful for unit testing <note>It has package
- * scope</note>
- */
- ConcurrentMap getRpcCache() {
- return this.rpcCache;
- }
-
- /**
- * This is used from integration test NP rest API to check out the result of the
- * cache population
- * <Note> For testing purpose only-- use it wisely</Note>
- *
- * @return
- */
- public String dumpGlobalRpcCache() {
- Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, R> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ /**
+ * This is used from integration test NP rest API to check out the result of the
+ * cache population
+ * <Note> For testing purpose only-- use it wisely</Note>
+ *
+ * @return
+ */
+ public String dumpGlobalRpcCache() {
+ Set<Map.Entry<I, R>> cacheEntrySet = this.globalRpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, R> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
-
- public String dumpRpcCache() {
- Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
- StringBuilder sb = new StringBuilder();
- for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
- sb.append("Key:").append(entry.getKey()).append("---->Value:")
- .append((entry.getValue() != null) ? entry.getValue() : "null")
- .append("\n");
+
+ public String dumpRpcCache() {
+ Set<Map.Entry<I, LinkedHashSet<R>>> cacheEntrySet = this.rpcCache.entrySet();
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<I, LinkedHashSet<R>> entry : cacheEntrySet) {
+ sb.append("Key:").append(entry.getKey()).append("---->Value:")
+ .append((entry.getValue() != null) ? entry.getValue() : "null")
+ .append("\n");
+ }
+ return sb.toString();
}
- return sb.toString();
- }
- /**
- * Invoked when a new entry is available in the cache, the key is only
- * provided, the value will come as an entryUpdate invocation
- *
- * @param key Key for the entry just created
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryCreated(I key, String cacheName, boolean originLocal) {
- // TBD: do we require this.
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ /**
+ * Invoked when a new entry is available in the cache, the key is only
+ * provided, the value will come as an entryUpdate invocation
+ *
+ * @param key Key for the entry just created
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryCreated(final I key, final String cacheName, final boolean originLocal) {
+ // TBD: do we require this.
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryCreated routeId = " + key + " cacheName=" + cacheName);
+ }
}
- }
-
- /**
- * Called anytime a given entry is updated
- *
- * @param key Key for the entry modified
- * @param new_value the new value the key will have
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryUpdated(I key, R new_value, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
- + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given entry is updated
+ *
+ * @param key Key for the entry modified
+ * @param new_value the new value the key will have
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryUpdated(final I key, final R new_value, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + ",value = " + new_value
+ + " ,cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteUpdated(key, new_value);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteUpdated(key, new_value);
-// }
-// }
- }
-
- /**
- * Called anytime a given key is removed from the ConcurrentHashMap we are
- * listening to.
- *
- * @param key Key of the entry removed
- * @param cacheName name of the cache for which update has been received
- * @param originLocal true if the event is generated from this node
- */
- @Override
- public void entryDeleted(I key, String cacheName, boolean originLocal) {
- if (log.isDebugEnabled()) {
- log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
- + " cacheName=" + cacheName + " originLocal=" + originLocal);
+
+ /**
+ * Called anytime a given key is removed from the ConcurrentHashMap we are
+ * listening to.
+ *
+ * @param key Key of the entry removed
+ * @param cacheName name of the cache for which update has been received
+ * @param originLocal true if the event is generated from this node
+ */
+ @Override
+ public void entryDeleted(final I key, final String cacheName, final boolean originLocal) {
+ if (log.isDebugEnabled()) {
+ log.debug("RoutingTableUpdates: entryUpdated routeId = " + key + " local = " + originLocal
+ + " cacheName=" + cacheName + " originLocal=" + originLocal);
+ }
+ // if (!originLocal) {
+ // for (RouteChangeListener rcl : routeChangeListeners) {
+ // rcl.onRouteDeleted(key);
+ // }
+ // }
}
-// if (!originLocal) {
-// for (RouteChangeListener rcl : routeChangeListeners) {
-// rcl.onRouteDeleted(key);
-// }
-// }
- }
-}
\ No newline at end of file
+}
package org.opendaylight.controller.sal.connector.remoterpc.impl;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.when;
+
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
import junit.framework.Assert;
+
import org.apache.felix.dm.Component;
import org.junit.After;
import org.junit.Before;
import org.opendaylight.controller.clustering.services.IClusterServices;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTable;
-import org.opendaylight.controller.sal.connector.remoterpc.api.RoutingTableException;
-import org.opendaylight.controller.sal.connector.remoterpc.api.SystemException;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.Set;
-import java.util.concurrent.*;
-
-import static org.mockito.Mockito.*;
-
public class RoutingTableImplTest {
- private final URI namespace = URI.create("http://cisco.com/example");
- private final QName QNAME = new QName(namespace, "global");
-
- private IClusterGlobalServices clusterService;
- private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
- ConcurrentMap mockGlobalRpcCache;
- ConcurrentMap mockRpcCache;
-
- @Before
- public void setUp() throws Exception{
- clusterService = mock(IClusterGlobalServices.class);
- routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
- mockGlobalRpcCache = new ConcurrentHashMap<>();
- mockRpcCache = new ConcurrentHashMap<>();
- createRoutingTableCache();
- }
-
- @After
- public void tearDown(){
- reset(clusterService);
- mockGlobalRpcCache = null;
- mockRpcCache = null;
- }
+ private final URI namespace = URI.create("http://cisco.com/example");
+ private final QName QNAME = new QName(namespace, "global");
+
+ private IClusterGlobalServices clusterService;
+ private RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String> routingTable;
+ ConcurrentMap mockGlobalRpcCache;
+ ConcurrentMap mockRpcCache;
+
+ @Before
+ public void setUp() throws Exception{
+ clusterService = mock(IClusterGlobalServices.class);
+ routingTable = new RoutingTableImpl<RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier>, String>();
+ mockGlobalRpcCache = new ConcurrentHashMap<>();
+ mockRpcCache = new ConcurrentHashMap<>();
+ createRoutingTableCache();
+ }
- @Test
- public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
+ @After
+ public void tearDown(){
+ reset(clusterService);
+ mockGlobalRpcCache = null;
+ mockRpcCache = null;
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void addGlobalRoute_ValidArguments_ShouldAdd() throws Exception {
- final String expectedRoute = "172.27.12.1:5000";
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
- Assert.assertEquals(mockGlobalRpcCache, latestCache);
- Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
- }
+ final String expectedRoute = "172.27.12.1:5000";
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test (expected = RoutingTable.DuplicateRouteException.class)
- public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
+ ConcurrentMap latestCache = routingTable.getGlobalRpcCache();
+ Assert.assertEquals(mockGlobalRpcCache, latestCache);
+ Assert.assertEquals(expectedRoute, latestCache.get(routeIdentifier));
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
+ @Test (expected = RoutingTable.DuplicateRouteException.class)
+ public void addGlobalRoute_DuplicateRoute_ShouldThrow() throws Exception{
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.addGlobalRoute(routeIdentifier, new String());
- routingTable.addGlobalRoute(routeIdentifier, new String());
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
- @Test
- public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ routingTable.addGlobalRoute(routeIdentifier, new String());
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- String expectedRoute = "172.27.12.1:5000";
+ @Test
+ public void getGlobalRoute_ExistingRouteId_ShouldReturnRoute() throws Exception {
- routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ String expectedRoute = "172.27.12.1:5000";
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertEquals(expectedRoute, actualRoute);
- }
+ routingTable.addGlobalRoute(routeIdentifier, expectedRoute);
- @Test
- public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertEquals(expectedRoute, actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void getGlobalRoute_NonExistentRouteId_ShouldReturnNull() throws Exception {
- String actualRoute = (String) routingTable.getGlobalRoute(routeIdentifier);
- Assert.assertNull(actualRoute);
- }
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- @Test
- public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
+ String actualRoute = routingTable.getGlobalRoute(routeIdentifier);
+ Assert.assertNull(actualRoute);
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_ExistingRouteId_ShouldRemove() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
- routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
- Assert.assertTrue(cache.size() == 1);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
+ routingTable.addGlobalRoute(routeIdentifier, "172.27.12.1:5000");
+ Assert.assertTrue(cache.size() == 1);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
+ }
- Assert.assertNotNull(mockGlobalRpcCache);
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
+ @Test
+ public void removeGlobalRoute_NonExistentRouteId_ShouldDoNothing() throws Exception {
- ConcurrentMap cache = routingTable.getGlobalRpcCache();
- Assert.assertTrue(cache.size() == 0);
+ Assert.assertNotNull(mockGlobalRpcCache);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = getRouteIdentifier();
- routingTable.removeGlobalRoute(routeIdentifier);
- Assert.assertTrue(cache.size() == 0);
+ ConcurrentMap cache = routingTable.getGlobalRpcCache();
+ Assert.assertTrue(cache.size() == 0);
- }
+ routingTable.removeGlobalRoute(routeIdentifier);
+ Assert.assertTrue(cache.size() == 0);
- @Test
- public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
- Assert.assertTrue(mockRpcCache.size() == 0);
+ }
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ @Test
+ public void addRoute_ForNewRouteId_ShouldAddRoute() throws Exception {
+ Assert.assertTrue(mockRpcCache.size() == 0);
- routingTable.addRoute(routeId, new String());
- Assert.assertTrue(mockRpcCache.size() == 1);
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(1, routes.size());
- }
+ routingTable.addRoute(routeId, new String());
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(1, routes.size());
+ }
- Assert.assertTrue(mockRpcCache.size() == 0);
+ @Test
+ public void addRoute_ForExistingRouteId_ShouldAppendRoute() throws Exception {
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
+ Assert.assertTrue(mockRpcCache.size() == 0);
- String route_1 = "10.0.0.1:5955";
- String route_2 = "10.0.0.2:5955";
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ String route_1 = "10.0.0.1:5955";
+ String route_2 = "10.0.0.2:5955";
- Assert.assertTrue(mockRpcCache.size() == 1);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Set<String> routes = routingTable.getRoutes(routeId);
- Assert.assertEquals(2, routes.size());
- Assert.assertTrue(routes.contains(route_1));
- Assert.assertTrue(routes.contains(route_2));
- }
+ Assert.assertTrue(mockRpcCache.size() == 1);
- @Test
- public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
- ExecutorService threadPool = Executors.newCachedThreadPool();
+ Set<String> routes = routingTable.getRoutes(routeId);
+ Assert.assertEquals(2, routes.size());
+ Assert.assertTrue(routes.contains(route_1));
+ Assert.assertTrue(routes.contains(route_2));
+ }
- int numOfRoutesToAdd = 100;
- String routePrefix_1 = "10.0.0.1:555";
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
- String routePrefix_2 = "10.0.0.1:556";
- threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+ @Test
+ public void addRoute_UsingMultipleThreads_ShouldNotOverwrite(){
+ ExecutorService threadPool = Executors.newCachedThreadPool();
+
+ int numOfRoutesToAdd = 100;
+ String routePrefix_1 = "10.0.0.1:555";
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_1, routeId));
+ String routePrefix_2 = "10.0.0.1:556";
+ threadPool.submit(addRoutes(numOfRoutesToAdd, routePrefix_2, routeId));
+
+ // wait for all tasks to complete; timeout in 10 sec
+ threadPool.shutdown();
+ try {
+ threadPool.awaitTermination(10, TimeUnit.SECONDS); //
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
- // wait for all tasks to complete; timeout in 10 sec
- threadPool.shutdown();
- try {
- threadPool.awaitTermination(10, TimeUnit.SECONDS); //
- } catch (InterruptedException e) {
- e.printStackTrace();
+ Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
}
- Assert.assertEquals(2*numOfRoutesToAdd, routingTable.getRoutes(routeId).size());
- }
-
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRouteId_shouldThrowNpe() throws Exception {
- routingTable.addRoute(null, new String());
- }
+ routingTable.addRoute(null, new String());
+ }
- @Test(expected = NullPointerException.class)
- public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
+ @Test(expected = NullPointerException.class)
+ public void addRoute_NullRoute_shouldThrowNpe() throws Exception{
- routingTable.addRoute(getRouteIdentifier(), null);
- }
+ routingTable.addRoute(getRouteIdentifier(), null);
+ }
- @Test (expected = UnsupportedOperationException.class)
- public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, new String());
+ @Test (expected = UnsupportedOperationException.class)
+ public void getRoutes_Call_ShouldReturnImmutableCopy() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, new String());
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- routes.add(new String()); //can not be modified; should throw
- }
+ routes.add(new String()); //can not be modified; should throw
+ }
- @Test
- public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- routingTable.addRoute(routeId, "10.0.0.1:5555");
- routingTable.addRoute(routeId, "10.0.0.2:5555");
+ @Test
+ public void getRoutes_With2RoutesFor1RouteId_ShouldReturnASetWithSize2() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ routingTable.addRoute(routeId, "10.0.0.1:5555");
+ routingTable.addRoute(routeId, "10.0.0.2:5555");
- Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
+ Set<String> routes = routingTable.getRoutes(routeId); //returns Immutable Set
- Assert.assertEquals(2, routes.size());
- }
+ Assert.assertEquals(2, routes.size());
+ }
- @Test
- public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
- throws Exception {
+ @Test
+ public void getLastAddedRoute_WhenMultipleRoutesExists_ShouldReturnLatestRoute()
+ throws Exception {
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
- }
+ Assert.assertEquals(route_2, routingTable.getLastAddedRoute(routeId));
+ }
- @Test
- public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
- String route_2 = "10.0.0.2:5555";
+ @Test
+ public void removeRoute_WhenMultipleRoutesExist_RemovesGivenRoute() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
+ String route_2 = "10.0.0.2:5555";
- routingTable.addRoute(routeId, route_1);
- routingTable.addRoute(routeId, route_2);
+ routingTable.addRoute(routeId, route_1);
+ routingTable.addRoute(routeId, route_2);
- Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
+ Assert.assertEquals(2, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.removeRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- }
+ }
- @Test
- public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
- Assert.assertNotNull(routingTable);
- RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
- String route_1 = "10.0.0.1:5555";
+ @Test
+ public void removeRoute_WhenOnlyOneRouteExists_RemovesRouteId() throws Exception{
+ Assert.assertNotNull(routingTable);
+ RpcRouter.RouteIdentifier routeId = getRouteIdentifier();
+ String route_1 = "10.0.0.1:5555";
- routingTable.addRoute(routeId, route_1);
- Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
+ routingTable.addRoute(routeId, route_1);
+ Assert.assertEquals(1, routingTable.getRoutes(routeId).size());
- routingTable.removeRoute(routeId, route_1);
- ConcurrentMap cache = routingTable.getRpcCache();
- Assert.assertFalse(cache.containsKey(routeId));
+ routingTable.removeRoute(routeId, route_1);
+ ConcurrentMap cache = routingTable.getRpcCache();
+ Assert.assertFalse(cache.containsKey(routeId));
- }
+ }
- /*
- * Private helper methods
- */
- private void createRoutingTableCache() throws Exception {
+ /*
+ * Private helper methods
+ */
+ private void createRoutingTableCache() throws Exception {
- //here init
- Component c = mock(Component.class);
+ //here init
+ Component c = mock(Component.class);
- when(clusterService.existCache(
- RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.GLOBALRPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockGlobalRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.GLOBALRPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockGlobalRpcCache);
- when(clusterService.existCache(
- RoutingTableImpl.RPC_CACHE)).thenReturn(false);
+ when(clusterService.existCache(
+ RoutingTableImpl.RPC_CACHE)).thenReturn(false);
- when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
- EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
- thenReturn(mockRpcCache);
+ when(clusterService.createCache(RoutingTableImpl.RPC_CACHE,
+ EnumSet.of(IClusterServices.cacheMode.TRANSACTIONAL))).
+ thenReturn(mockRpcCache);
- doNothing().when(clusterService).tbegin();
- doNothing().when(clusterService).tcommit();
+ doNothing().when(clusterService).tbegin();
+ doNothing().when(clusterService).tcommit();
- routingTable.setClusterGlobalServices(this.clusterService);
- routingTable.init(c);
+ routingTable.setClusterGlobalServices(this.clusterService);
+ routingTable.init(c);
- Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
- Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
- }
+ Assert.assertEquals(mockGlobalRpcCache, routingTable.getGlobalRpcCache());
+ Assert.assertEquals(mockRpcCache, routingTable.getRpcCache());
+ }
- private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
- RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
- InstanceIdentifier identifier = mock(InstanceIdentifier.class);
- when(routeIdentifier.getType()).thenReturn(QNAME);
- when(routeIdentifier.getRoute()).thenReturn(identifier);
+ private RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> getRouteIdentifier(){
+ RpcRouter.RouteIdentifier<QName, QName, InstanceIdentifier> routeIdentifier = mock(RpcRouter.RouteIdentifier.class);
+ InstanceIdentifier identifier = mock(InstanceIdentifier.class);
+ when(routeIdentifier.getType()).thenReturn(QNAME);
+ when(routeIdentifier.getRoute()).thenReturn(identifier);
- return routeIdentifier;
- }
+ return routeIdentifier;
+ }
- private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
- return new Runnable() {
- @Override
- public void run() {
- for (int i=0;i<numRoutes;i++){
- String route = routePrefix + i;
- try {
- routingTable.addRoute(routeId, route);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- };
- }
+ private Runnable addRoutes(final int numRoutes, final String routePrefix, final RpcRouter.RouteIdentifier routeId){
+ return new Runnable() {
+ @Override
+ public void run() {
+ for (int i=0;i<numRoutes;i++){
+ String route = routePrefix + i;
+ try {
+ routingTable.addRoute(routeId, route);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ };
+ }
}
private ServiceTracker<BindingAwareBroker, BindingAwareBroker> tracker;
private BindingAwareBroker broker;
private ServiceTrackerCustomizer<BindingAwareBroker, BindingAwareBroker> customizer = new ServiceTrackerCustomizer<BindingAwareBroker, BindingAwareBroker>() {
-
+
@Override
public BindingAwareBroker addingService(ServiceReference<BindingAwareBroker> reference) {
broker = context.getService(reference);
mdActivationPool.execute(new Runnable() {
-
+
@Override
public void run() {
- onBrokerAvailable(broker, context);;
+ onBrokerAvailable(broker, context);
}
});
return broker;
}
-
+
@Override
public void modifiedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
// TODO Auto-generated method stub
-
+
}
@Override
public void removedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
// TODO Auto-generated method stub
-
+
}
};
-
-
+
+
@Override
public final void start(BundleContext context) throws Exception {
this.context = context;
startImpl(context);
tracker = new ServiceTracker<>(context, BindingAwareBroker.class, customizer);
tracker.open();
-
+
}
-
+
@Override
public final void stop(BundleContext context) throws Exception {
tracker.close();
stopImpl(context);
}
-
-
+
+
/**
* Called when this bundle is started (before
* {@link #onSessionInitiated(ProviderContext)} so the Framework can perform
* the bundle-specific activities necessary to start this bundle. This
* method can be used to register services or to allocate any resources that
* this bundle needs.
- *
+ *
* <p>
* This method must complete and return to its caller in a timely manner.
- *
+ *
* @param context
* The execution context of the bundle being started.
* @throws Exception
* started. There should be no active threads that were started by this
* bundle when this bundle returns. A stopped bundle must not call any
* Framework objects.
- *
+ *
* <p>
* This method must complete and return to its caller in a timely manner.
- *
+ *
* @param context The execution context of the bundle being stopped.
* @throws Exception If this method throws an exception, the bundle is still
* marked as stopped, and the Framework will remove the bundle's
protected void stopImpl(BundleContext context) {
// NOOP
}
-
+
protected abstract void onBrokerAvailable(BindingAwareBroker broker, BundleContext context);
-
+
protected void onBrokerRemoved(BindingAwareBroker broker, BundleContext context) {
-
+
}
}
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
import org.opendaylight.controller.sal.binding.api.BindingAwareProvider.ProviderFunctionality;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
* <li>Notification Service - see {@link NotificationService} and
* {@link NotificationProviderService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.binding.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.binding.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
*
* For more information about session-based access see {@link ConsumerContext}
* and {@link ProviderContext}
- *
- *
- *
*/
public interface BindingAwareBroker {
/**
* functionality) for the consumer and provides access to the SAL
* infrastructure services and other functionality provided by
* {@link Provider}s.
- *
- *
- *
*/
public interface ConsumerContext extends RpcConsumerRegistry {
* @return Session specific implementation of service
*/
<T extends BindingAwareService> T getSALService(Class<T> service);
-
-
}
/**
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext;
/**
- *
+ *
* Defines the component of controller and supplies additional metadata. A
* component of the controller or application supplies a concrete implementation
* of this interface.
- *
+ *
* A user-implemented component (application) which facilitates the SAL and SAL
* services to access infrastructure services or providers' functionality.
- *
- *
- *
+ *
+ *
+ *
*/
public interface BindingAwareConsumer {
/**
* Callback signaling initialization of the consumer session to the SAL.
- *
+ *
* The consumer MUST use the session for all communication with SAL or
* retrieving SAL infrastructure services.
- *
+ *
* This method is invoked by
* {@link BindingAwareBroker#registerConsumer(BindingAwareConsumer)}
- *
+ *
* @param session
* Unique session between consumer and SAL.
*/
import org.opendaylight.yangtools.yang.binding.RpcService;
/**
- *
+ *
* Defines the component of controller and supplies additional metadata. A
* component of the controller or application supplies a concrete implementation
* of this interface.
- *
+ *
* <p>
* A user-implemented component (application) which facilitates the SAL and SAL
* services to access infrastructure services and to provide functionality to
* {@link Consumer}s and other providers.
- *
- *
+ *
+ *
*/
public interface BindingAwareProvider {
/**
* Returns a set of provided implementations of YANG modules and their rpcs.
- *
- *
+ *
+ *
* @return Set of provided implementation of YANG modules and their Rpcs
*/
Collection<? extends RpcService> getImplementations();
/**
* Gets a set of implementations of provider functionality to be registered
* into system during the provider registration to the SAL.
- *
+ *
* <p>
* This method is invoked by {@link Broker#registerProvider(Provider)} to
* learn the initial provided functionality
- *
+ *
* @return Set of provider's functionality.
*/
Collection<? extends ProviderFunctionality> getFunctionality();
/**
* Functionality provided by the {@link BindingAwareProvider}
- *
+ *
* <p>
* Marker interface used to mark the interfaces describing specific
* functionality which could be exposed by providers to other components.
- *
- *
- *
+ *
+ *
+ *
*/
public interface ProviderFunctionality {
*/
package org.opendaylight.controller.sal.binding.api;
-import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext;
-
/**
- *
* Session-specific instance of the broker functionality.
- *
+ *
* <p>
* BindingAwareService is marker interface for infrastructure services provided
* by the SAL. These services are session-specific, each
* {@link BindingAwareConsumer} and {@link BindingAwareProvider} usually has own
* instance of the service with it's own context.
- *
+ *
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerContext#getSALService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext#getSALService(Class)} method on session
* assigned to the consumer.
- *
+ *
* <p>
- * {@link BindingAwareService} and {@link BindingAwareProvider} may seem
+ * {@link org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext} and {@link BindingAwareProvider} may seem
* similar, but provider provides YANG model-based functionality and
* {@link BindingAwareProvider} exposes the necessary supporting functionality
* to implement specific functionality of YANG and to reuse it in the
- * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}
- * s.
- *
- *
- *
+ * development of {@link BindingAwareConsumer}s and {@link BindingAwareProvider}s.
*/
public interface BindingAwareService {
* @param <T> Notification type
*/
public interface NotificationListener<T extends Notification> extends EventListener {
- /**
- * Invoked to deliver the notification. Note that this method may be invoked
- * from a shared thread pool, so implementations SHOULD NOT perform CPU-intensive
- * operations and they definitely MUST NOT invoke any potentially blocking
- * operations.
- *
- * @param notification Notification being delivered.
- */
+ /**
+ * Invoked to deliver the notification. Note that this method may be invoked
+ * from a shared thread pool, so implementations SHOULD NOT perform CPU-intensive
+ * operations and they definitely MUST NOT invoke any potentially blocking
+ * operations.
+ *
+ * @param notification Notification being delivered.
+ */
void onNotification(T notification);
}
/**
* Base interface defining contract for retrieving MD-SAL
* version of RpcServices
- *
+ *
*/
public interface RpcConsumerRegistry extends BindingAwareService {
/**
* Returns a session specific instance (implementation) of requested
* YANG module implementation / service provided by consumer.
- *
+ *
* @return Session specific implementation of service
*/
<T extends RpcService> T getRpcService(Class<T> module);
* @return new blank data modification transaction.
*/
@Override
- DataModificationTransaction beginTransaction();
+ DataModificationTransaction beginTransaction();
/**
* Reads data subtree from configurational store.
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler;
import org.opendaylight.controller.md.sal.common.api.data.DataModification;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.common.RpcResult;
public interface DataModificationTransaction extends DataModification<InstanceIdentifier<? extends DataObject>, DataObject> {
-
/**
* Returns an unique identifier for transaction
*
*/
@Override
- public Object getIdentifier();
+ Object getIdentifier();
/**
* Initiates a two-phase commit of candidate data.
* of this changes.
*
*
- * @see DataCommitHandler for further information how two-phase commit is
+ * @see org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler for further information how two-phase commit is
* processed.
* @param store
* Identifier of the store, where commit should occur.
* encountered errors, if commit was not successful.
*/
@Override
- public Future<RpcResult<TransactionStatus>> commit();
-
-
+ Future<RpcResult<TransactionStatus>> commit();
/**
* Register a listener for transaction
*/
ListenerRegistration<DataTransactionListener> registerListener(DataTransactionListener listener);
-
-
/**
* Listener for transaction state changes
- *
- *
*/
public interface DataTransactionListener extends EventListener {
/**
*/
package org.opendaylight.controller.sal.binding.api.data;
-
import org.opendaylight.controller.md.sal.common.api.data.DataProvisionService;
import org.opendaylight.controller.md.sal.common.api.data.DataReader;
-import org.opendaylight.controller.sal.binding.api.BindingAwareProvider;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
/**
- * DataProviderService is common access point for {@link BindingAwareProvider} providers
+ * DataProviderService is common access point for {@link org.opendaylight.controller.sal.binding.api.BindingAwareProvider} providers
* to access data trees described by the YANG model.
- *
*/
public interface DataProviderService extends DataBrokerService, DataProvisionService<InstanceIdentifier<? extends DataObject>, DataObject> {
-
-
/**
* Registers a data reader for particular subtree of overal YANG data tree.
*
* Provider's version of Mount Point, this version allows access to MD-SAL
* services specific for this mountpoint and registration / provision of
* interfaces for mount point.
- *
+ *
* @author ttkacik
- *
+ *
*/
public interface MountProviderInstance //
extends //
public final Class<? extends RpcService> rpcService;
public final Class<? extends BaseIdentity> routingContext;
-
+
private RpcContextIdentifier(Class<? extends RpcService> rpcService, Class<? extends BaseIdentity> routingContext) {
super();
this.rpcService = rpcService;
public Class<? extends BaseIdentity> getRoutingContext() {
return routingContext;
}
-
+
public static final RpcContextIdentifier contextForGlobalRpc(Class<? extends RpcService> serviceType) {
return new RpcContextIdentifier(serviceType, null);
}
-
+
public static final RpcContextIdentifier contextFor(Class<? extends RpcService> serviceType,Class<? extends BaseIdentity> routingContext) {
return new RpcContextIdentifier(serviceType, routingContext);
}
/**
* Updates route for particular path to specified instance of
* {@link RpcService}.
- *
+ *
* @param path
* Path for which RpcService routing is to be updated
* @param service
/**
* Deletes a route for particular path
- *
+ *
* @param path
* Path for which
*/
void removeRoute(InstanceIdentifier<?> path);
/**
- *
+ *
*/
S getRoute(InstanceIdentifier<?> nodeInstance);
/**
- *
+ *
* @return
*/
Map<InstanceIdentifier<?>, S> getRoutes();
*/
public class BindingBrokerImplModuleFactory extends org.opendaylight.controller.config.yang.md.sal.binding.impl.AbstractBindingBrokerImplModuleFactory {
-
+
@Override
public Module createModule(String instanceName, DependencyResolver dependencyResolver, BundleContext bundleContext) {
BindingBrokerImplModule module = (BindingBrokerImplModule) super.createModule(instanceName, dependencyResolver, bundleContext);
module.setBundleContext(bundleContext);
return module;
}
-
+
@Override
public Module createModule(String instanceName, DependencyResolver dependencyResolver,
DynamicMBeanWithInstance old, BundleContext bundleContext) throws Exception {
import org.opendaylight.controller.sal.binding.api.rpc.RpcRouter;
import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.RpcService;
-import org.opendaylight.yangtools.yang.binding.annotations.RoutingContext;
public interface RuntimeCodeGenerator {
/**
* Returns an instance of provided RpcService type which delegates all calls
* to the delegate.
- *
+ *
* <p>
* Returned instance:
* <ul>
* <li>implements provided subclass of RpcService type and
- * {@link DelegateProxy} interface.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy} interface.
* <li>
* <p>
* delegates all invocations of methods, which are defined in RpcService
* subtype to delegate which is defined by
- * {@link DelegateProxy#setDelegate(Object)}.
+ * {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)}.
* <p>
* If delegate is not defined (<code>getDelegate() == null</code>)
* implementation throws {@link IllegalStateException}
- * <li>{@link DelegateProxy#getDelegate()} - returns the delegate to which
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#getDelegate()} - returns the delegate to which
* all calls are delegated.
- * <li>{@link DelegateProxy#setDelegate(Object)} - sets the delegate for
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy#setDelegate(Object)} - sets the delegate for
* particular instance
- *
+ *
* </ul>
- *
+ *
* @param serviceType
* - Subclass of RpcService for which direct proxy is to be
* generated.
* @return Instance of RpcService of provided serviceType which implements
- * and {@link DelegateProxy}
+ * and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* @throws IllegalArgumentException
- *
+ *
*/
<T extends RpcService> T getDirectProxyFor(Class<T> serviceType) throws IllegalArgumentException;
/**
* Returns an instance of provided RpcService type which routes all calls to
* other instances selected on particular input field.
- *
+ *
* <p>
* Returned instance:
* <ul>
* <li>Implements:
* <ul>
- * <li>{@link DelegateProxy}
+ * <li>{@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
* <li>{@link RpcRouter}
* </ul>
* <li>
* <ul>
* <li>
* Implementation uses
- * {@link RpcRouter#getService(Class, InstanceIdentifier)} method to
+ * {@link RpcRouter#getService(Class, org.opendaylight.yangtools.yang.binding.InstanceIdentifier)} method to
* retrieve particular instance to which call will be routed.
* <li>
- * Instance of {@link InstanceIdentifier} is determined by first argument of
+ * Instance of {@link org.opendaylight.yangtools.yang.binding.InstanceIdentifier} is determined by first argument of
* method and is retrieved via method which is annotated with
- * {@link RoutingContext}. Class representing Routing Context Identifier is
- * retrieved by {@link RoutingContext}.
- * <li>If first argument is not defined / {@link RoutingContext} annotation
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * Class representing Routing Context Identifier is retrieved by a
+ * {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext}.
+ * <li>If first argument is not defined / {@link org.opendaylight.yangtools.yang.binding.annotations.RoutingContext} annotation
* is not present on any field invocation will be delegated to default
* service {@link RpcRouter#getDefaultService()}.
* </ul>
- *
+ *
* @param serviceType
* - Subclass of RpcService for which Router is to be generated.
* @return Instance of RpcService of provided serviceType which implements
- * also {@link RpcRouter}<T> and {@link DelegateProxy}
+ * also {@link RpcRouter}<T> and {@link org.opendaylight.controller.sal.binding.spi.DelegateProxy}
*/
<T extends RpcService> RpcRouter<T> getRouterFor(Class<T> serviceType,String name) throws IllegalArgumentException;
+++ /dev/null
-/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sal.binding.codegen;
-
-import java.lang.reflect.Method;
-
-import org.opendaylight.yangtools.yang.binding.Notification;
-
-public final class YangtoolsMappingHelper {
- private YangtoolsMappingHelper() {
- throw new UnsupportedOperationException("Utility class");
- }
-
- public static boolean isNotificationCallback(final Method it) {
- return it.getName().startsWith("on") && (it.getParameterTypes().length == 1) &&
- Notification.class.isAssignableFrom(it.getParameterTypes()[0]);
- }
-}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.binding.codegen.impl;
+
+import java.util.Map;
+import java.util.WeakHashMap;
+
+import javassist.ClassPool;
+import javassist.CtClass;
+import javassist.CtMethod;
+import javassist.NotFoundException;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import org.eclipse.xtext.xbase.lib.Extension;
+import org.opendaylight.controller.sal.binding.api.rpc.RpcRouter;
+import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory;
+import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.binding.NotificationListener;
+import org.opendaylight.yangtools.yang.binding.RpcService;
+import org.opendaylight.yangtools.yang.binding.annotations.RoutingContext;
+import org.opendaylight.yangtools.yang.binding.util.ClassLoaderUtils;
+
+import com.google.common.base.Supplier;
+
+abstract class AbstractRuntimeCodeGenerator implements org.opendaylight.controller.sal.binding.codegen.RuntimeCodeGenerator, NotificationInvokerFactory {
+ @GuardedBy("this")
+ private final Map<Class<? extends NotificationListener>, RuntimeGeneratedInvokerPrototype> invokerClasses = new WeakHashMap<>();
+ private final CtClass brokerNotificationListener;
+
+ @Extension
+ protected final JavassistUtils utils;
+
+ protected AbstractRuntimeCodeGenerator(final ClassPool pool) {
+ utils = JavassistUtils.forClassPool(pool);
+
+ /*
+ * Make sure Javassist ClassPool sees the classloader of RpcService
+ */
+ utils.ensureClassLoader(RpcService.class);
+
+ brokerNotificationListener = utils.asCtClass(org.opendaylight.controller.sal.binding.api.NotificationListener.class);
+ }
+
+ protected final CtClass getBrokerNotificationListener() {
+ return brokerNotificationListener;
+ }
+
+ protected abstract RuntimeGeneratedInvokerPrototype generateListenerInvoker(Class<? extends NotificationListener> cls);
+ protected abstract <T extends RpcService> Supplier<T> directProxySupplier(final Class<T> serviceType);
+ protected abstract <T extends RpcService> Supplier<T> routerSupplier(final Class<T> serviceType, RpcServiceMetadata metadata);
+
+ private RpcServiceMetadata getRpcMetadata(final CtClass iface) throws ClassNotFoundException, NotFoundException {
+ final RpcServiceMetadata metadata = new RpcServiceMetadata();
+
+ for (CtMethod method : iface.getMethods()) {
+ if (iface.equals(method.getDeclaringClass()) && method.getParameterTypes().length == 1) {
+ final RpcMetadata routingPair = getRpcMetadata(method);
+ if (routingPair != null) {
+ metadata.addContext(routingPair.getContext());
+ metadata.addRpcMethod(method.getName(), routingPair);
+
+ /*
+ * Force-load the RPC class representing the "input" of this RPC.
+ *
+ * FIXME: this is pre-existing side-effect of the original code, which
+ * kept a reference to the loaded class, but it did not use it.
+ *
+ * There was no explanation as to why forcing this load was
+ * necessary. As far as I can tell now is that it forces the
+ * resolution of method arguments, which would (according to
+ * my reading of JLS) occur only when the method is invoked via
+ * binding-aware class action, not when coming from
+ * binding-independent world. Whether that makes sense or not,
+ * remains to be investigated.
+ */
+ Thread.currentThread().getContextClassLoader().loadClass(routingPair.getInputType().getName());
+ }
+ }
+ }
+
+ return metadata;
+ }
+
+ private RpcMetadata getRpcMetadata(final CtMethod method) throws NotFoundException {
+ final CtClass inputClass = method.getParameterTypes()[0];
+ return rpcMethodMetadata(inputClass, inputClass, method.getName());
+ }
+
+ private RpcMetadata rpcMethodMetadata(final CtClass dataClass, final CtClass inputClass, final String rpcMethod) throws NotFoundException {
+ for (CtMethod method : dataClass.getMethods()) {
+ if (method.getName().startsWith("get") && method.getParameterTypes().length == 0) {
+ for (Object annotation : method.getAvailableAnnotations()) {
+ if (annotation instanceof RoutingContext) {
+ boolean encapsulated = !method.getReturnType().equals(utils.asCtClass(InstanceIdentifier.class));
+ return new RpcMetadata(rpcMethod, ((RoutingContext)annotation).value(), method, encapsulated, inputClass);
+ }
+ }
+ }
+ }
+
+ for (CtClass iface : dataClass.getInterfaces()) {
+ final RpcMetadata ret = rpcMethodMetadata(iface, inputClass, rpcMethod);
+ if(ret != null) {
+ return ret;
+ }
+ }
+ return null;
+ }
+
+ private synchronized RuntimeGeneratedInvokerPrototype resolveInvokerClass(final Class<? extends NotificationListener> cls) {
+ RuntimeGeneratedInvokerPrototype invoker = invokerClasses.get(cls);
+ if (invoker != null) {
+ return invoker;
+ }
+
+ utils.getLock().lock();
+ try {
+ invoker = ClassLoaderUtils.withClassLoader(cls.getClassLoader(), new Supplier<RuntimeGeneratedInvokerPrototype>() {
+ @Override
+ public RuntimeGeneratedInvokerPrototype get() {
+ return generateListenerInvoker(cls);
+ }
+ });
+
+ invokerClasses.put(cls, invoker);
+ return invoker;
+ } finally {
+ utils.getLock().unlock();
+ }
+ }
+
+ @Override
+ public final NotificationInvokerFactory getInvokerFactory() {
+ return this;
+ }
+
+ @Override
+ public final <T extends RpcService> T getDirectProxyFor(final Class<T> serviceType) {
+ utils.getLock().lock();
+ try {
+ return ClassLoaderUtils.withClassLoader(serviceType.getClassLoader(), directProxySupplier(serviceType));
+ } finally {
+ utils.getLock().unlock();
+ }
+ }
+
+ @Override
+ public final <T extends RpcService> RpcRouter<T> getRouterFor(final Class<T> serviceType, final String name) {
+ final RpcServiceMetadata metadata = ClassLoaderUtils.withClassLoader(serviceType.getClassLoader(), new Supplier<RpcServiceMetadata>() {
+ @Override
+ public RpcServiceMetadata get() {
+ try {
+ return getRpcMetadata(utils.asCtClass(serviceType));
+ } catch (ClassNotFoundException | NotFoundException e) {
+ throw new IllegalStateException(String.format("Failed to load metadata for class {}", serviceType), e);
+ }
+ }
+ });
+
+ utils.getLock().lock();
+ try {
+ final T instance = ClassLoaderUtils.withClassLoader(serviceType.getClassLoader(), routerSupplier(serviceType, metadata));
+ return new RpcRouterCodegenInstance<T>(name, serviceType, instance, metadata.getContexts());
+ } finally {
+ utils.getLock().unlock();
+ }
+ }
+
+ @Override
+ public NotificationInvoker invokerFor(final NotificationListener instance) {
+ final Class<? extends NotificationListener> cls = instance.getClass();
+ final RuntimeGeneratedInvokerPrototype prototype = resolveInvokerClass(cls);
+
+ try {
+ return RuntimeGeneratedInvoker.create(instance, prototype);
+ } catch (InstantiationException | IllegalAccessException e) {
+ throw new IllegalStateException(String.format("Failed to create invoker for %s", instance), e);
+ }
+ }
+}
--- /dev/null
+/**
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.binding.codegen.impl;
+
+import javassist.CtClass;
+import javassist.CtMethod;
+
+import org.opendaylight.yangtools.yang.binding.BaseIdentity;
+
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+
+final class RpcMetadata {
+ private final Class<? extends BaseIdentity> context;
+ private final CtMethod inputRouteGetter;
+ private final Boolean routeEncapsulated;
+ private final CtClass inputType;
+ private final String methodName;
+
+ public Class<? extends BaseIdentity> getContext() {
+ return context;
+ }
+
+ public CtMethod getInputRouteGetter() {
+ return inputRouteGetter;
+ }
+
+ public CtClass getInputType() {
+ return inputType;
+ }
+
+ public boolean isRouteEncapsulated() {
+ return routeEncapsulated;
+ }
+
+ public RpcMetadata(final String methodName, final Class<? extends BaseIdentity> context, final CtMethod inputRouteGetter, final boolean routeEncapsulated, final CtClass inputType) {
+ this.inputRouteGetter = Preconditions.checkNotNull(inputRouteGetter);
+ this.methodName = Preconditions.checkNotNull(methodName);
+ this.inputType = Preconditions.checkNotNull(inputType);
+ this.context = Preconditions.checkNotNull(context);
+ this.routeEncapsulated = routeEncapsulated;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + methodName.hashCode();
+ result = prime * result + context.hashCode();
+ result = prime * result + inputRouteGetter.hashCode();
+ result = prime * result + routeEncapsulated.hashCode();
+ result = prime * result + inputType.hashCode();
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (!(obj instanceof RpcMetadata)) {
+ return false;
+ }
+ final RpcMetadata other = (RpcMetadata) obj;
+ if (!methodName.equals(other.methodName)) {
+ return false;
+ }
+ if (!context.equals(other.context)) {
+ return false;
+ }
+ if (!inputRouteGetter.equals(other.inputRouteGetter)) {
+ return false;
+ }
+ if (!routeEncapsulated.equals(other.routeEncapsulated)) {
+ return false;
+ }
+ return inputType.equals(other.inputType);
+ }
+
+ @Override
+ public String toString() {
+ return Objects.toStringHelper(this)
+ .add("context", context)
+ .add("inputRouteGetter", inputRouteGetter)
+ .add("inputType", inputType)
+ .add("methodName", methodName)
+ .add("routeEncapsulated", routeEncapsulated)
+ .toString();
+ }
+}
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.concepts.util.ListenerRegistry;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
-import org.opendaylight.yangtools.yang.binding.DataContainer;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.RpcService;
import org.slf4j.Logger;
import com.google.common.collect.ImmutableSet;
public class RpcRouterCodegenInstance<T extends RpcService> implements //
- RpcRouter<T>, RouteChangeListener<Class<? extends BaseIdentity>, InstanceIdentifier<?>> {
+RpcRouter<T>, RouteChangeListener<Class<? extends BaseIdentity>, InstanceIdentifier<?>> {
private static final Logger LOG = LoggerFactory.getLogger(RpcRouterCodegenInstance.class);
private final String name;
@SuppressWarnings("unchecked")
- public RpcRouterCodegenInstance(String name,Class<T> type, T routerImpl, Set<Class<? extends BaseIdentity>> contexts,
- Set<Class<? extends DataContainer>> inputs) {
+ public RpcRouterCodegenInstance(final String name,final Class<T> type, final T routerImpl, final Iterable<Class<? extends BaseIdentity>> contexts) {
this.name = name;
this.listeners = ListenerRegistry.create();
this.serviceType = type;
@Override
@SuppressWarnings("unchecked")
- public <C extends BaseIdentity> RpcRoutingTable<C, T> getRoutingTable(Class<C> routeContext) {
+ public <C extends BaseIdentity> RpcRoutingTable<C, T> getRoutingTable(final Class<C> routeContext) {
return (RpcRoutingTable<C, T>) routingTables.get(routeContext);
}
@Override
public <L extends RouteChangeListener<Class<? extends BaseIdentity>, InstanceIdentifier<?>>> ListenerRegistration<L> registerRouteChangeListener(
- L listener) {
+ final L listener) {
return listeners.registerWithType(listener);
}
@Override
- public void onRouteChange(RouteChange<Class<? extends BaseIdentity>, InstanceIdentifier<?>> change) {
+ public void onRouteChange(final RouteChange<Class<? extends BaseIdentity>, InstanceIdentifier<?>> change) {
for (ListenerRegistration<RouteChangeListener<Class<? extends BaseIdentity>, InstanceIdentifier<?>>> listener : listeners) {
try {
listener.getInstance().onRouteChange(change);
}
@Override
- public T getService(Class<? extends BaseIdentity> context, InstanceIdentifier<?> path) {
+ public T getService(final Class<? extends BaseIdentity> context, final InstanceIdentifier<?> path) {
return routingTables.get(context).getRoute(path);
}
@Override
- public RoutedRpcRegistration<T> addRoutedRpcImplementation(T service) {
+ public RoutedRpcRegistration<T> addRoutedRpcImplementation(final T service) {
return new RoutedRpcRegistrationImpl(service);
}
@Override
- public RpcRegistration<T> registerDefaultService(T service) {
+ public RpcRegistration<T> registerDefaultService(final T service) {
// TODO Auto-generated method stub
RuntimeCodeHelper.setDelegate(invocationProxy, service);
return null;
private class RoutedRpcRegistrationImpl extends AbstractObjectRegistration<T> implements RoutedRpcRegistration<T> {
- public RoutedRpcRegistrationImpl(T instance) {
+ public RoutedRpcRegistrationImpl(final T instance) {
super(instance);
}
}
@Override
- public void registerPath(Class<? extends BaseIdentity> context, InstanceIdentifier<?> path) {
+ public void registerPath(final Class<? extends BaseIdentity> context, final InstanceIdentifier<?> path) {
routingTables.get(context).updateRoute(path, getInstance());
}
@Override
- public void unregisterPath(Class<? extends BaseIdentity> context, InstanceIdentifier<?> path) {
+ public void unregisterPath(final Class<? extends BaseIdentity> context, final InstanceIdentifier<?> path) {
routingTables.get(context).removeRoute(path, getInstance());
}
@Override
- public void registerInstance(Class<? extends BaseIdentity> context, InstanceIdentifier<?> instance) {
+ public void registerInstance(final Class<? extends BaseIdentity> context, final InstanceIdentifier<?> instance) {
registerPath(context, instance);
}
@Override
- public void unregisterInstance(Class<? extends BaseIdentity> context, InstanceIdentifier<?> instance) {
+ public void unregisterInstance(final Class<? extends BaseIdentity> context, final InstanceIdentifier<?> instance) {
unregisterPath(context, instance);
}
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
-import org.opendaylight.yangtools.yang.binding.DataContainer;
+
+import com.google.common.collect.Iterables;
final class RpcServiceMetadata {
- private final HashMap<Class<? extends DataContainer>, RpcMetadata> rpcInputs = new HashMap<>();
- private final HashSet<Class<? extends DataContainer>> supportedInputs = new HashSet<>();
- private final HashSet<Class<? extends BaseIdentity>> contexts = new HashSet<>();
- private final HashMap<String, RpcMetadata> rpcMethods = new HashMap<>();
+ private final Set<Class<? extends BaseIdentity>> contexts = new HashSet<>();
+ private final Map<String, RpcMetadata> rpcMethods = new HashMap<>();
+ private final Iterable<Class<? extends BaseIdentity>> roContexts = Iterables.unmodifiableIterable(contexts);
- public HashSet<Class<? extends BaseIdentity>> getContexts() {
- return this.contexts;
+ public Iterable<Class<? extends BaseIdentity>> getContexts() {
+ return roContexts;
}
- public HashMap<String, RpcMetadata> getRpcMethods() {
- return this.rpcMethods;
+ public RpcMetadata getRpcMethod(final String name) {
+ return rpcMethods.get(name);
}
- public HashMap<Class<? extends DataContainer>, RpcMetadata> getRpcInputs() {
- return this.rpcInputs;
+ public void addContext(final Class<? extends BaseIdentity> context) {
+ contexts.add(context);
}
- public HashSet<Class<? extends DataContainer>> getSupportedInputs() {
- return this.supportedInputs;
+ public void addRpcMethod(final String name, final RpcMetadata routingPair) {
+ rpcMethods.put(name, routingPair);
}
}
package org.opendaylight.controller.sal.binding.codegen.impl
import java.util.Map
-import java.util.WeakHashMap
import javassist.ClassPool
-import javassist.CtClass
-import javassist.CtMethod
-import javassist.LoaderClassPath
-import org.opendaylight.controller.sal.binding.codegen.RuntimeCodeHelper
-import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory
-import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory.NotificationInvoker
-import org.opendaylight.yangtools.sal.binding.generator.util.ClassLoaderUtils
-import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils
-import org.opendaylight.yangtools.yang.binding.BaseIdentity
-import org.opendaylight.yangtools.yang.binding.DataContainer
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier
import org.opendaylight.yangtools.yang.binding.Notification
-import org.opendaylight.yangtools.yang.binding.NotificationListener
import org.opendaylight.yangtools.yang.binding.RpcImplementation
-import org.opendaylight.yangtools.yang.binding.RpcService
-import org.opendaylight.yangtools.yang.binding.annotations.QName
-import org.opendaylight.yangtools.yang.binding.annotations.RoutingContext
-
-import static org.opendaylight.yangtools.concepts.util.ClassLoaderUtils.*
+import org.opendaylight.yangtools.yang.binding.util.BindingReflections
+import org.opendaylight.yangtools.yang.binding.util.ClassLoaderUtils
import static extension org.opendaylight.controller.sal.binding.codegen.RuntimeCodeSpecification.*
-import static extension org.opendaylight.controller.sal.binding.codegen.YangtoolsMappingHelper.*
-
-class RuntimeCodeGenerator implements org.opendaylight.controller.sal.binding.codegen.RuntimeCodeGenerator, NotificationInvokerFactory {
-
- val CtClass BROKER_NOTIFICATION_LISTENER;
- val extension JavassistUtils utils;
- val Map<Class<? extends NotificationListener>, RuntimeGeneratedInvokerPrototype> invokerClasses;
+class RuntimeCodeGenerator extends AbstractRuntimeCodeGenerator {
new(ClassPool pool) {
- utils = new JavassistUtils(pool);
- invokerClasses = new WeakHashMap();
- BROKER_NOTIFICATION_LISTENER = org.opendaylight.controller.sal.binding.api.NotificationListener.asCtClass;
- pool.appendClassPath(new LoaderClassPath(RpcService.classLoader));
+ super(pool)
}
- override <T extends RpcService> getDirectProxyFor(Class<T> iface) {
- val T instance = withClassLoaderAndLock(iface.classLoader,lock) [|
+ override directProxySupplier(Class iface) {
+ return [|
val proxyName = iface.directProxyName;
val potentialClass = ClassLoaderUtils.tryToLoadClassWithTCCL(proxyName)
if(potentialClass != null) {
- return potentialClass.newInstance as T;
+ return potentialClass.newInstance;
}
val supertype = iface.asCtClass
val createdCls = createClass(iface.directProxyName, supertype) [
'''
]
]
- return createdCls.toClass(iface.classLoader).newInstance as T
+ return createdCls.toClass(iface.classLoader).newInstance
]
- return instance;
}
- override <T extends RpcService> getRouterFor(Class<T> iface,String routerInstanceName) {
- val metadata = withClassLoader(iface.classLoader) [|
- val supertype = iface.asCtClass
- return supertype.rpcMetadata;
- ]
-
- val instance = <T>withClassLoaderAndLock(iface.classLoader,lock) [ |
+ override routerSupplier(Class iface, RpcServiceMetadata metadata) {
+ return [ |
val supertype = iface.asCtClass
val routerName = iface.routerName;
val potentialClass = ClassLoaderUtils.tryToLoadClassWithTCCL(routerName)
if(potentialClass != null) {
- return potentialClass.newInstance as T;
+ return potentialClass.newInstance;
}
val targetCls = createClass(iface.routerName, supertype) [
}
implementMethodsFrom(supertype) [
if (parameterTypes.size === 1) {
- val rpcMeta = metadata.rpcMethods.get(name);
+ val rpcMeta = metadata.getRpcMethod(name);
val bodyTmp = '''
{
final «InstanceIdentifier.name» identifier = $1.«rpcMeta.inputRouteGetter.name»()«IF rpcMeta.
'''
]
]
- return targetCls.toClass(iface.classLoader,iface.protectionDomain).newInstance as T
-
+ return targetCls.toClass(iface.classLoader,iface.protectionDomain).newInstance
];
- return new RpcRouterCodegenInstance(routerInstanceName,iface, instance, metadata.contexts,metadata.supportedInputs);
}
- private def RpcServiceMetadata getRpcMetadata(CtClass iface) {
- val metadata = new RpcServiceMetadata;
-
- iface.methods.filter[declaringClass == iface && parameterTypes.size === 1].forEach [ method |
- val routingPair = method.rpcMetadata;
- if (routingPair !== null) {
- metadata.contexts.add(routingPair.context)
- metadata.rpcMethods.put(method.name,routingPair)
- val input = routingPair.inputType.javaClass as Class<? extends DataContainer>;
- metadata.supportedInputs.add(input);
- metadata.rpcInputs.put(input,routingPair);
- }
- ]
- return metadata;
- }
-
- private def getRpcMetadata(CtMethod method) {
- val inputClass = method.parameterTypes.get(0);
- return inputClass.rpcMethodMetadata(inputClass,method.name);
- }
-
- private def RpcMetadata rpcMethodMetadata(CtClass dataClass,CtClass inputClass,String rpcMethod) {
- for (method : dataClass.methods) {
- if (method.name.startsWith("get") && method.parameterTypes.size === 0) {
- for (annotation : method.availableAnnotations) {
- if (annotation instanceof RoutingContext) {
- val encapsulated = !method.returnType.equals(InstanceIdentifier.asCtClass);
- return new RpcMetadata(null,rpcMethod,(annotation as RoutingContext).value, method, encapsulated,inputClass);
- }
- }
- }
- }
- for (iface : dataClass.interfaces) {
- val ret = rpcMethodMetadata(iface,inputClass,rpcMethod);
- if(ret != null) return ret;
- }
- return null;
- }
-
- private def getJavaClass(CtClass cls) {
- Thread.currentThread.contextClassLoader.loadClass(cls.name)
- }
-
- override getInvokerFactory() {
- return this;
- }
-
- override invokerFor(NotificationListener instance) {
- val cls = instance.class
- val prototype = resolveInvokerClass(cls);
-
- return new RuntimeGeneratedInvoker(instance, prototype)
- }
-
- protected def generateListenerInvoker(Class<? extends NotificationListener> iface) {
- val callbacks = iface.methods.filter[notificationCallback]
+ override generateListenerInvoker(Class iface) {
+ val callbacks = iface.methods.filter[BindingReflections.isNotificationCallback(it)]
val supportedNotification = callbacks.map[parameterTypes.get(0) as Class<? extends Notification>].toSet;
- val targetCls = createClass(iface.invokerName, BROKER_NOTIFICATION_LISTENER) [
+ val targetCls = createClass(iface.invokerName, brokerNotificationListener) [
field(DELEGATE_FIELD, iface)
- implementMethodsFrom(BROKER_NOTIFICATION_LISTENER) [
+ implementMethodsFrom(brokerNotificationListener) [
body = '''
{
«FOR callback : callbacks SEPARATOR " else "»
return new RuntimeGeneratedInvokerPrototype(supportedNotification,
finalClass as Class<? extends org.opendaylight.controller.sal.binding.api.NotificationListener<?>>);
}
-
-
-
-
-
- protected def resolveInvokerClass(Class<? extends NotificationListener> class1) {
- return <RuntimeGeneratedInvokerPrototype>withClassLoaderAndLock(class1.classLoader,lock) [|
- val invoker = invokerClasses.get(class1);
- if (invoker !== null) {
- return invoker;
- }
- val newInvoker = generateListenerInvoker(class1);
- invokerClasses.put(class1, newInvoker);
- return newInvoker
-
- ]
- }
-}
-
-@Data
-package class RuntimeGeneratedInvoker implements NotificationInvoker {
-
- @Property
- val NotificationListener delegate;
-
- @Property
- var org.opendaylight.controller.sal.binding.api.NotificationListener<Notification> invocationProxy;
-
- @Property
- var RuntimeGeneratedInvokerPrototype prototype;
-
- new(NotificationListener delegate, RuntimeGeneratedInvokerPrototype prototype) {
- _delegate = delegate;
- _prototype = prototype;
- _invocationProxy = prototype.protoClass.newInstance as org.opendaylight.controller.sal.binding.api.NotificationListener<Notification>;
- RuntimeCodeHelper.setDelegate(_invocationProxy, delegate);
- }
-
- override getSupportedNotifications() {
- prototype.supportedNotifications;
- }
-
- override close() {
- }
-}
-
-@Data
-package class RpcMetadata {
-
- @Property
- val QName qname;
-
- @Property
- val String methodName;
-
- @Property
- val Class<? extends BaseIdentity> context;
- @Property
- val CtMethod inputRouteGetter;
-
- @Property
- val boolean routeEncapsulated;
-
- @Property
- val CtClass inputType;
}
--- /dev/null
+/**
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.binding.codegen.impl;
+
+import java.util.Set;
+
+import org.eclipse.xtext.xbase.lib.util.ToStringHelper;
+import org.opendaylight.controller.sal.binding.codegen.RuntimeCodeHelper;
+import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory.NotificationInvoker;
+import org.opendaylight.yangtools.yang.binding.Notification;
+import org.opendaylight.yangtools.yang.binding.NotificationListener;
+
+import com.google.common.base.Preconditions;
+
+final class RuntimeGeneratedInvoker implements NotificationInvoker {
+ private final org.opendaylight.controller.sal.binding.api.NotificationListener<Notification> invocationProxy;
+ private final RuntimeGeneratedInvokerPrototype prototype;
+ private final NotificationListener delegate;
+
+ @SuppressWarnings("unchecked")
+ private RuntimeGeneratedInvoker(final NotificationListener delegate, final RuntimeGeneratedInvokerPrototype prototype, final org.opendaylight.controller.sal.binding.api.NotificationListener<?> proxy) {
+ this.invocationProxy = (org.opendaylight.controller.sal.binding.api.NotificationListener<Notification>) proxy;
+ this.delegate = Preconditions.checkNotNull(delegate);
+ this.prototype = prototype;
+ }
+
+ public static RuntimeGeneratedInvoker create(final NotificationListener delegate, final RuntimeGeneratedInvokerPrototype prototype) throws InstantiationException, IllegalAccessException {
+ final org.opendaylight.controller.sal.binding.api.NotificationListener<?> proxy = Preconditions.checkNotNull(prototype.getProtoClass().newInstance());
+ RuntimeCodeHelper.setDelegate(proxy, delegate);
+ return new RuntimeGeneratedInvoker(delegate, prototype, proxy);
+ }
+
+ @Override
+ public NotificationListener getDelegate() {
+ return delegate;
+ }
+
+ @Override
+ public org.opendaylight.controller.sal.binding.api.NotificationListener<Notification> getInvocationProxy() {
+ return invocationProxy;
+ }
+
+ @Override
+ public Set<Class<? extends Notification>> getSupportedNotifications() {
+ return prototype.getSupportedNotifications();
+ }
+
+ @Override
+ public void close() {
+ // Nothing to do
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + delegate.hashCode();
+ result = prime * result + invocationProxy.hashCode();
+ result = prime * result + prototype.hashCode();
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (!(obj instanceof RuntimeGeneratedInvoker)) {
+ return false;
+ }
+ final RuntimeGeneratedInvoker other = (RuntimeGeneratedInvoker) obj;
+ if (!delegate.equals(other.delegate)) {
+ return false;
+ }
+ if (!invocationProxy.equals(other.invocationProxy)) {
+ return false;
+ }
+ return prototype.equals(other.prototype);
+ }
+
+ @Override
+ public String toString() {
+ String result = new ToStringHelper().toString(this);
+ return result;
+ }
+}
return supportedNotifications;
}
- public Class<? extends NotificationListener<? extends Object>> getProtoClass() {
+ public Class<? extends NotificationListener<?>> getProtoClass() {
return protoClass;
}
LOG.info("Starting Binding Aware Broker: {}", identifier);
controllerRoot = new RootSalInstance(getRpcProviderRegistry(), getNotificationBroker(), getDataBroker());
-
+
supportedConsumerServices = ImmutableClassToInstanceMap.<BindingAwareService> builder()
.put(NotificationService.class, getRoot()) //
public void close() throws Exception {
// FIXME: Close all sessions
}
-
+
@Override
public <T extends RpcService> RoutedRpcRegistration<T> addRoutedRpcImplementation(Class<T> type, T implementation)
throws IllegalStateException {
return getRoot().addRoutedRpcImplementation(type, implementation);
}
-
+
@Override
public <T extends RpcService> RpcRegistration<T> addRpcImplementation(Class<T> type, T implementation)
throws IllegalStateException {
return getRoot().addRpcImplementation(type, implementation);
}
-
+
@Override
public <T extends RpcService> T getRpcService(Class<T> module) {
return getRoot().getRpcService(module);
L arg0) {
return getRoot().registerRouteChangeListener(arg0);
}
-
+
public class RootSalInstance extends
AbstractBindingSalProviderInstance<DataProviderService, NotificationProviderService, RpcProviderRegistry> {
connector.startDataForwarding();
}
- public static void startNotificationForwarding(BindingIndependentConnector connector,
+ public static void startNotificationForwarding(BindingIndependentConnector connector,
NotificationProviderService baService, NotificationPublishService domService) {
if(connector.isNotificationForwarding()) {
return;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
-import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
DomForwardedBroker forwardedSource = (DomForwardedBroker) source;
DomForwardedBroker forwardedTarget = (DomForwardedBroker) target;
reuseForwardingFrom(forwardedTarget, forwardedSource);
-
+
}
private static void reuseForwardingFrom(DomForwardedBroker target, DomForwardedBroker source) {
*/
package org.opendaylight.controller.sal.binding.impl.util;
-import java.util.Iterator;
import org.opendaylight.controller.md.sal.common.impl.routing.AbstractDataReadRouter;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-@SuppressWarnings("all")
public class BindingAwareDataReaderRouter extends AbstractDataReadRouter<InstanceIdentifier<? extends DataObject>,DataObject> {
- protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
- return data.iterator().next();
- }
+ @Override
+ protected DataObject merge(final InstanceIdentifier<? extends DataObject> path, final Iterable<DataObject> data) {
+ return data.iterator().next();
+ }
}
package org.opendaylight.controller.sal.binding.spi;
public interface DelegateProxy<T> {
-
+
void setDelegate(T delegate);
T getDelegate();
}
public interface RemoteRpcRouter {
-
-
-
-
+
+
+
+
ListenerRegistration<RouteChangeListener> registerRouteChangeListener(RouteChangeListener listener);
-
-
+
+
}
Nodes nodes = checkForNodes();
verifyNode(nodes, flowCapableNode).assertHasAugmentation(FlowCapableNode.class);
- ;
assertBindingIndependentVersion(NODE_INSTANCE_ID_BI);
// Node meterStatsNode = createTestNode(NodeMeterStatistics.class, nodeMeterStatistics());
// commitNodeAndVerifyTransaction(meterStatsNode);
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
public class UnionSerializationTest extends AbstractDataServiceTest {
-
+
public static final String PREFIX_STRING = "192.168.0.1/32";
-
-
+
+
@Test
public void testPrefixSerialization() throws Exception {
-
+
Ipv4Prefix ipv4prefix = new Ipv4Prefix(PREFIX_STRING);
IpPrefix ipPrefix = new IpPrefix(ipv4prefix);
Prefix prefix = new PrefixBuilder().setPrefix(ipPrefix).build();
-
+
CompositeNode serialized = testContext.getBindingToDomMappingService().toDataDom(prefix);
assertNotNull(serialized);
assertNotNull(serialized.getFirstSimpleByName(Prefix.QNAME));
assertEquals(PREFIX_STRING, serialized.getFirstSimpleByName(Prefix.QNAME).getValue());
-
+
Prefix deserialized = (Prefix) testContext.getBindingToDomMappingService().dataObjectFromDataDom(Prefix.class, serialized);
assertNotNull(deserialized);
assertNotNull(deserialized.getPrefix());
public interface BarListener extends NotificationListener {
void onBarUpdate(BarUpdate notification);
-
+
void onFlowDelete(FlowDelete notification);
}
public interface FooListener extends NotificationListener {
void onFooUpdate(FooUpdate notification);
-
+
}
import org.opendaylight.yangtools.yang.common.RpcResult;
public interface FooService extends RpcService {
-
+
Future<RpcResult<Void>> foo();
-
+
Future<RpcResult<Void>> simple(SimpleInput obj);
-
+
Future<RpcResult<Void>> inheritedContext(InheritedContextInput obj);
}
public class RpcRegistrationNullPointer {
-
-
-
+
+
+
}
/**
* We create transaction no 2
- *
+ *
*/
DataModificationTransaction removalTransaction = baDataService.beginTransaction();
assertNotNull(transaction);
/**
* We remove node 1
- *
+ *
*/
removalTransaction.removeConfigurationData(node1.getValue());
registration.unregisterPath(context, path);
return this;
}
-
+
public static MessageCapturingFlowService create() {
return new MessageCapturingFlowService();
}
-
+
public static MessageCapturingFlowService create(RpcProviderRegistry registry) {
MessageCapturingFlowService ret = new MessageCapturingFlowService();
ret.registerTo(registry);
return ret;
}
-
-
+
+
}
public interface RegistrationListener<T extends Registration<?>> extends EventListener {
void onRegister(T registration);
-
+
void onUnregister(T registration);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.common.api.data;
+
+import org.opendaylight.yangtools.concepts.Path;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ *
+ * Three phase Commit Cohort for subtree, which is
+ * uniquely associated with user submitted transcation.
+ *
+ * @param <P>
+ * Type of path (subtree identifier), which represents location in
+ * tree
+ * @param <D>
+ * Type of data (payload), which represents data payload
+ */
+public interface AsyncConfigurationCommitCohort<P extends Path<P>, D> {
+
+ /**
+ * Initiates a pre-commit of associated request
+ *
+ * Implementation MUST NOT do any blocking calls during this callback, all
+ * pre-commit preparation SHOULD happen asynchronously and MUST result in
+ * completing returned future object.
+ *
+ * @param rebasedTransaction
+ * Read-only view of transaction as if happened on top of actual
+ * data store
+ * @return Future which is completed once pre-commit phase for this request
+ * is finished.
+ */
+ ListenableFuture<Void> preCommit(AsyncReadTransaction<P, D> rebasedTransaction);
+
+ /**
+ *
+ * Initiates a commit phase of associated request
+ *
+ * Implementation MUST NOT do any blocking calls during this callback, all
+ * commit finalization SHOULD happen asynchronously and MUST result in
+ * completing returned future object.
+ *
+ * @return Future which is completed once commit phase for associated
+ * request is finished.
+ */
+ ListenableFuture<Void> commit();
+
+ /**
+ *
+ * Initiates abort phase of associated request
+ *
+ * Implementation MUST NOT do any blocking calls during this callback, all
+ * commit finalization SHOULD happen asynchronously and MUST result in
+ * completing returned future object.
+ *
+ * @return Future which is completed once commit phase for associated
+ * request is finished.
+ */
+ ListenableFuture<Void> abort();
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.common.api.data;
+
+import org.opendaylight.yangtools.concepts.ObjectRegistration;
+import org.opendaylight.yangtools.concepts.Path;
+
+/**
+ * Three Phase Commit Coordinator with support of user-supplied commit cohorts
+ * which participates in three-phase commit protocols
+ *
+ * @param <P>
+ * Type of path (subtree identifier), which represents location in
+ * tree
+ * @param <D>
+ * Type of data (payload), which represents data payload
+ */
+public interface AsyncConfigurationCommitCoordinator<P extends Path<P>, D> {
+
+ /**
+ * Register configuration commit handler for particular subtree
+ *
+ * Configuration commit handler is invoked for all write transactions
+ * which modifies <code>subtree</code>
+ *
+ * @param subtree Subtree which configuration commit handler is interested it
+ * @param commitHandler Instance of user-provided commit handler
+ * @return Registration object representing this registration. Invoking {@link ObjectRegistration#close()}
+ * will unregister configuration commit handler.
+ */
+ <C extends AsyncConfigurationCommitCohort<P, D>> ObjectRegistration<C> registerConfigurationCommitHandler(
+ P subtree, C commitHandler);
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.common.api.data;
+
+import org.opendaylight.yangtools.concepts.Path;
+
+import com.google.common.util.concurrent.CheckedFuture;
+
+/**
+ * User-supplied participant in three-phase commit of transaction for configuration data tree
+ *
+ * Client-supplied implementation of commit handler for subtree, which
+ * is responsible for processing CAN-COMMIT phase of three-phase commit protocol
+ * and return CommitCohort, which provides access to additional transitions
+ * such as PRE-COMMIT, COMMIT and ABORT.
+ *
+ * @param <P>
+ * Type of path (subtree identifier), which represents location in
+ * tree
+ * @param <D>
+ * Type of data (payload), which represents data payload
+ */
+public interface AsyncConfigurationCommitHandler<P extends Path<P>, D> {
+
+ /**
+ *
+ * Requests a can commit phase
+ *
+ * Implementations SHOULD NOT do any blocking operation during
+ * processing this callback.
+ *
+ * <b>Implementation Notes</b>
+ * <ul>
+ * <li>Implementation are REQUIRED to use <code>request</code> object for any data related access</li>
+ * <li>Implementations SHOULD NOT use any other state stored outside configuration subtree for validation</li>
+ * <li>Validation should happen asynchronously, outside callback call by updating returned {@link CheckedFuture}
+ * object.</li>
+ * <li>If validation (CAN_COMMIT) phase:
+ * <ul>
+ * <li><b>is successful</b> - invocation of {@link CheckedFuture#checkedGet()} on returned future MUST
+ * return {@link AsyncConfigurationCommitCohort} associated with request.</li>
+ * <li><b>is unsuccessful</b> - invocation of {@link CheckedFuture#checkedGet()} must throw instance of {@link DataValidationFailedException}
+ * with human readable explanaition of error condition.
+ * </li>
+ * </ul>
+ * </li>
+ * @param request
+ * Commit Request submitted by client, which contains
+ * information about modifications and read-only view as
+ * if transaction happened.
+ * @return CheckedFuture which contains client-supplied implementation of {@link AsyncConfigurationCommitCohort}
+ * associated with submitted request, if can commit phase is
+ * successful, if can commit was unsuccessful, future must fail with
+ * {@link TransactionCommitFailedException} exception.
+ */
+ CheckedFuture<AsyncConfigurationCommitCohort<P, D>, DataValidationFailedException> canCommit(
+ ConfigurationCommitRequest<P, D> request);
+
+ /**
+ *
+ * Commit Request as was submitted by client code
+ *
+ * Commit Request contains list view of created / updated / removed
+ * path and read-only view of proposed client transaction,
+ * which may be used to retrieve modified or referenced data.
+ *
+ *
+ * @param <P>
+ * Type of path (subtree identifier), which represents location
+ * in tree
+ * @param <D>
+ * Type of data (payload), which represents data payload
+ */
+ static interface ConfigurationCommitRequest<P extends Path<P>, D> {
+
+ /**
+ *
+ * Read-only transaction which provides access only to configuration
+ * data tree as if submitted transaction successfully happened and
+ * no other concurrent modifications happened between allocation
+ * of client transactions and write of client transactions.
+ *
+ * Implementations of Commit Handlers are REQUIRED to use this
+ * read-only view to access any data from configuration data tree,
+ * in order to capture them as preconditions for this transaction.
+ *
+ * @return Read-only transaction which provides access only to configuration
+ * data tree as if submitted transaction successfully happened
+ */
+ AsyncReadTransaction<P, D> getReadOnlyView();
+
+ /**
+ *
+ * Returns iteration of paths, to data which was introduced by this transaction.
+ *
+ * @return Iteration of paths, which was introduced by this transaction.
+ */
+ Iterable<P> getCreatedPaths();
+ /**
+ *
+ * Returns iteration of paths, to data which was updated by this transaction.
+ *
+ * @return Iteration of paths, which was updated by this transaction.
+ */
+ Iterable<P> getUpdatedPaths();
+
+ /**
+ *
+ * Returns iteration of paths, to data which was removed by this transaction.
+ *
+ * @return Iteration of paths, which was removed by this transaction.
+ */
+ Iterable<P> getRemovedPaths();
+ }
+
+}
*/
package org.opendaylight.controller.md.sal.common.api.data;
-import java.util.concurrent.Future;
-
import org.opendaylight.yangtools.concepts.Path;
import com.google.common.base.Optional;
* @param store Identifier of the store, where commit should occur.
* @return Result of the Commit, containing success information or list of
* encountered errors, if commit was not successful. The Future
- * blocks until {@link TransactionStatus#COMMITED} or
- * {@link TransactionStatus#FAILED} is reached.
+ * blocks until {@link TransactionStatus#COMMITED} is reached.
+ * Future will fail with {@link TransactionCommitFailedException}
+ * if Commit of this transaction failed.
+ *
* @throws IllegalStateException if the transaction is not {@link TransactionStatus#NEW}
*/
public ListenableFuture<RpcResult<TransactionStatus>> commit();
public interface DataProvisionService<P extends Path<P> , D> {
public Registration<DataCommitHandler<P, D>> registerCommitHandler(P path, DataCommitHandler<P, D> commitHandler);
-
- public ListenerRegistration<RegistrationListener<DataCommitHandlerRegistration<P, D>>>
+
+ public ListenerRegistration<RegistrationListener<DataCommitHandlerRegistration<P, D>>>
registerCommitHandlerListener(RegistrationListener<DataCommitHandlerRegistration<P, D>> commitHandlerListener);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.common.api.data;
+
+import org.opendaylight.yangtools.concepts.Path;
+
+import com.google.common.base.Preconditions;
+
+/**
+ *
+ * Failure of asynchronous transaction commit caused by invalid data.
+ *
+ * This exception is raised and returned when transaction commit
+ * failed, because other data submitted via transactions
+ *
+ * Clients usually are not able recover from this error condition by
+ * retrieving same transaction, since data introduced by this transaction
+ * are invalid.
+ *
+ */
+public class DataValidationFailedException extends TransactionCommitFailedException {
+
+ private static final long serialVersionUID = 1L;
+
+ private Path<?> path;
+
+ private Class<? extends Path<?>> pathType;
+
+ public <P extends Path<P>> DataValidationFailedException(final Class<P> pathType,final P path, final String message, final Throwable cause) {
+ super(message, cause);
+ this.pathType = Preconditions.checkNotNull(pathType, "path type must not be null");
+ this.path = Preconditions.checkNotNull(path,"path must not be null.");
+ }
+
+ public <P extends Path<P>> DataValidationFailedException(final Class<P> pathType,final P path,final String message) {
+ this(pathType,path,message,null);
+ }
+
+ public final Path<?> getPath() {
+ return path;
+ }
+
+ public final Class<? extends Path<?>> getPathType() {
+ return pathType;
+ }
+
+}
--- /dev/null
+package org.opendaylight.controller.md.sal.common.api.data;
+
+/**
+*
+* Failure of asynchronous transaction commit caused by failure
+* of optimistic locking.
+*
+* This exception is raised and returned when transaction commit
+* failed, because other transaction finished successfully
+* and modified same data as failed transaction.
+*
+* Clients may recover from this error condition by
+* retrieving current state and submitting new updated
+* transaction.
+*
+*/
+public class OptimisticLockFailedException extends TransactionCommitFailedException {
+
+ private static final long serialVersionUID = 1L;
+
+ protected OptimisticLockFailedException(final String message, final Throwable cause, final boolean enableSuppression,
+ final boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+
+ public OptimisticLockFailedException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
+
+ public OptimisticLockFailedException(final String message) {
+ super(message);
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.common.api.data;
+
+/**
+ *
+ * Failed commit of asynchronous transaction
+ *
+ * This exception is raised and returned when transaction commit
+ * failed.
+ *
+ */
+public class TransactionCommitFailedException extends Exception {
+
+ private static final long serialVersionUID = -6138306275373237068L;
+
+ protected TransactionCommitFailedException(final String message, final Throwable cause, final boolean enableSuppression, final boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+
+ public TransactionCommitFailedException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
+
+ public TransactionCommitFailedException(final String message) {
+ super(message);
+ }
+
+}
public interface NotificationPublishService<N> {
void publish(N notification);
-
+
void publish(N notification,ExecutorService executor);
}
import org.opendaylight.yangtools.concepts.Path;
public interface MutableRoutingTable<C, P extends Path<P>, T> extends RoutingTable<C,P,T>, Mutable {
-
+
void setDefaultRoute(T target);
void updateRoute(P path,T target);
void removeRoute(P path);
public interface Route<C,P> extends Immutable {
C getType();
-
+
P getPath();
}
public interface RoutingTable<C, P extends Path<P>, T> {
C getIdentifier();
-
+
T getDefaultRoute();
-
+
Map<P,T> getRoutes();
-
+
T getRoute(P path);
}
/**
* Base abstract implementation of DataReadRouter, which performs
* a read operation on multiple data readers and then merges result.
- *
+ *
* @param <P>
* @param <D>
*/
/**
* Merges data readed by reader instances from specified path
- *
+ *
* @param path Path on which read was performed
* @param data Data which was returned by read operation.
* @return Merged result.
/**
* Returns a function which performs configuration read for supplied path
- *
+ *
* @param path
* @return function which performs configuration read for supplied path
*/
-
+
private Function<DataReader<P, D>, D> configurationRead(final P path) {
return new Function<DataReader<P, D>, D>() {
@Override
/**
* Returns a function which performs operational read for supplied path
- *
+ *
* @param path
* @return function which performs operational read for supplied path
*/
/**
* Register's a reader for operational data.
- *
+ *
* @param path Path which is served by this reader
* @param reader Reader instance which is responsible for reading particular subpath.
- * @return
+ * @return
*/
public Registration<DataReader<P, D>> registerOperationalReader(P path, DataReader<P, D> reader) {
OperationalDataReaderRegistration<P, D> ret = new OperationalDataReaderRegistration<>(path, reader);
}
private Predicate<? super Entry<P, DataReaderRegistration<P, D>>> affects(final P path) {
-
+
return new Predicate<Entry<P, DataReaderRegistration<P, D>>>() {
-
+
@Override
public boolean apply(Entry<P, DataReaderRegistration<P, D>> input) {
final P key = input.getKey();
return key.contains(path) || ((P) path).contains(key);
}
-
+
};
}
private final D updatedOperationalSubtree;
private final D updatedConfigurationSubtree;
-
-
-
+
+
+
public DataChangeEventImpl(DataChange<P, D> dataChange, D originalConfigurationSubtree,
D originalOperationalSubtree, D updatedOperationalSubtree, D updatedConfigurationSubtree) {
super();
updatedConfigurationData = Collections.emptyMap();
updatedOperationalData = Collections.emptyMap();
}
-
+
public InitialDataChangeEventImpl(D configTree, D operTree, Map<P, D> updatedCfgData, Map<P, D> updatedOperData) {
updatedConfigurationTree = configTree;
updatedOperationalTree = operTree;
updatedConfigurationData = updatedCfgData;
updatedOperationalData = updatedOperData;
}
-
+
@Override
public Map<P, D> getCreatedConfigurationData() {
return Collections.emptyMap();
}
-
+
@Override
public Map<P, D> getCreatedOperationalData() {
return Collections.emptyMap();
}
-
+
@Override
public Map<P, D> getOriginalConfigurationData() {
return Collections.emptyMap();
public Map<P, D> getUpdatedConfigurationData() {
return updatedConfigurationData;
}
-
+
@Override
public D getUpdatedConfigurationSubtree() {
return updatedConfigurationTree;
public D getUpdatedOperationalSubtree() {
return updatedOperationalTree;
}
-
+
@Override
public D getOriginalConfigurationSubtree() {
return updatedConfigurationTree;
}
-
+
@Override
public D getOriginalOperationalSubtree() {
return updatedOperationalTree;
}
-
+
@Override
public Map<P, D> getUpdatedOperationalData() {
return updatedOperationalData;
}
-
+
}
package org.opendaylight.controller.md.sal.common.impl.util.compat;
public class DataNormalizationException extends Exception {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- public DataNormalizationException(String message) {
- super(message);
- }
+ public DataNormalizationException(String message) {
+ super(message);
+ }
- public DataNormalizationException(String message, Throwable cause) {
- super(message, cause);
- }
+ public DataNormalizationException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
public class CommitHandlerTransactions {
private static class AllwaysSuccessfulTransaction<P extends Path<P>,D> implements DataCommitTransaction<P, D> {
-
+
private final DataModification<P, D> modification;
public AllwaysSuccessfulTransaction(DataModification<P, D> modification) {
public RpcResult<Void> finish() throws IllegalStateException {
return Rpcs.<Void>getRpcResult(true, null, Collections.<RpcError>emptyList());
}
-
+
@Override
public DataModification<P, D> getModification() {
return modification;
}
}
-
+
public static final <P extends Path<P>,D> AllwaysSuccessfulTransaction<P, D> allwaysSuccessfulTransaction(DataModification<P, D> modification) {
return new AllwaysSuccessfulTransaction<>(modification);
}
*
*/
public class RpcErrors {
-
+
/**
* @param applicationTag
* @param tag
* @param info
* @param severity
* @param message
- * @param errorType
- * @param cause
+ * @param errorType
+ * @param cause
* @return {@link RpcError} implementation
*/
public static RpcError getRpcError(String applicationTag, String tag, String info,
ErrorSeverity severity, String message, ErrorType errorType, Throwable cause) {
- RpcErrorTO ret = new RpcErrorTO(applicationTag, tag, info, severity, message,
+ RpcErrorTO ret = new RpcErrorTO(applicationTag, tag, info, severity, message,
errorType, cause);
return ret;
}
public Throwable getCause() {
return cause;
}
-
+
@Override
public ErrorType getErrorType() {
return errorType;
*/
package org.opendaylight.controller.sal.compability;
-import org.opendaylight.controller.sal.core.*;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeConnector;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.port.rev130925.PortFeatures;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
public class ToSalPropertyClassUtils {
public static Bandwidth salAdvertisedBandwidthFrom(NodeConnector nodeConnector) {
- FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
+ FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
PortFeatures portFeatures = flowCapNodeConn.getAdvertisedFeatures();
return new AdvertisedBandwidth(resolveBandwidth(portFeatures));
}
public static Bandwidth salPeerBandwidthFrom(NodeConnector nodeConnector) {
- FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
+ FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
PortFeatures portFeatures = flowCapNodeConn.getPeerFeatures();
return new PeerBandwidth(resolveBandwidth(portFeatures));
}
public static Bandwidth salSupportedBandwidthFrom(NodeConnector nodeConnector) {
- FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
+ FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
PortFeatures portFeatures = flowCapNodeConn.getSupported();
return new SupportedBandwidth(resolveBandwidth(portFeatures));
}
public static MacAddress salMacAddressFrom(NodeConnector nodeConnector) {
- FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
+ FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
String hwAddress = flowCapNodeConn.getHardwareAddress().getValue();
- return new MacAddress(bytesFrom(hwAddress));
+ return new MacAddress(bytesFrom(hwAddress));
}
-
-
+
+
public static Name salNameFrom(NodeConnector nodeConnector) {
- FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
+ FlowCapableNodeConnector flowCapNodeConn = nodeConnector.getAugmentation(FlowCapableNodeConnector.class);
return new Name(flowCapNodeConn.getName());
}
-
-
+
+
private static byte[] bytesFrom(String hwAddress) {
String[] mac = hwAddress.split(":");
/**
* Java class name of Rpc Context
- *
- *
+ *
+ *
*/
@Override
public String getContext() {
/**
* String representation of route e.g. node-id
- *
+ *
*/
@Override
public String getRoute() {
/**
* Java class name of Rpc Type e.g org.opendaylight.AddFlowInput
- *
+ *
*/
@Override
public String getType() {
public class BindingAwareZeroMqRpcRouter implements BindingAwareRpcRouter {
BindingAwareRpcRouter mdSalRouter;
-
+
public BindingAwareRpcRouter getMdSalRouter() {
return mdSalRouter;
}
// Write message down to the wire
return null;
}
-
+
// Receiver part - invoked when request is received and deserialized
private Future<RpcReply<byte[]>> receivedRequest(RpcRequest<String, String, String, byte[]> input) {
-
+
return mdSalRouter.sendRpc(input);
}
public interface Connector extends RpcImplementation, NotificationListener {
-
-
+
+
Set<InstanceIdentifier> getConfigurationPrefixes();
Set<InstanceIdentifier> getRuntimePrefixes();
-
+
void registerListener(ConnectorListener listener);
void unregisterListener(ConnectorListener listener);
}
import java.util.concurrent.Future;
/**
- *
+ *
* @author ttkacik
*
* @param <C> Routing Context Identifier
*/
public interface RpcRouter<C,T,R,D> {
-
-
+
+
Future<RpcReply<D>> sendRpc(RpcRequest<C, T, R, D> input);
-
-
+
+
/**
- *
- * @author
+ *
+ * @author
*
* @param <C> Routing Context Identifier
* @param <R> Route Type
RouteIdentifier<C,T,R> getRoutingInformation();
D getPayload();
}
-
+
public interface RouteIdentifier<C,T,R> {
-
+
C getContext(); // defines a routing table (e.g. NodeContext)
T getType(); // rpc type
R getRoute(); // e.g. (node identity)
}
-
+
public interface RpcReply<D> {
D getPayload();
}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-parent</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>sal-distributed-datastore</artifactId>
+ <packaging>bundle</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-actor_${scala.version}</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-cluster_${scala.version}</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-persistence-experimental_${scala.version}</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-remote_${scala.version}</artifactId>
+ </dependency>
+
+ <!-- SAL Dependencies -->
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>config-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-binding-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-binding-config</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-spi</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>concepts</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-common</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.core</artifactId>
+ </dependency>
+
+ <!-- AKKA Dependencies -->
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ </dependency>
+
+ <!-- Test Dependencies -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <version>${slf4j.version}</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <extensions>true</extensions>
+ <configuration>
+ <instructions>
+ <Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
+ <Export-package></Export-package>
+ <Private-Package></Private-Package>
+ </instructions>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.jacoco</groupId>
+ <artifactId>jacoco-maven-plugin</artifactId>
+ <configuration>
+ <includes>
+ <include>org.opendaylight.controller.*</include>
+ </includes>
+ <check>false</check>
+ </configuration>
+ <executions>
+ <execution>
+ <id>pre-test</id>
+ <goals>
+ <goal>prepare-agent</goal>
+ </goals>
+ </execution>
+ <execution>
+ <id>post-test</id>
+ <goals>
+ <goal>report</goal>
+ </goals>
+ <phase>test</phase>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>config</id>
+ <goals>
+ <goal>generate-sources</goal>
+ </goals>
+ <configuration>
+ <codeGenerators>
+ <generator>
+ <codeGeneratorClass>org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator</codeGeneratorClass>
+ <outputBaseDir>${jmxGeneratorPath}</outputBaseDir>
+ <additionalConfiguration>
+ <namespaceToPackage1>urn:opendaylight:params:xml:ns:yang:controller==org.opendaylight.controller.config.yang</namespaceToPackage1>
+ </additionalConfiguration>
+ </generator>
+ <generator>
+ <codeGeneratorClass>org.opendaylight.yangtools.maven.sal.api.gen.plugin.CodeGeneratorImpl</codeGeneratorClass>
+ <outputBaseDir>${salGeneratorPath}</outputBaseDir>
+ </generator>
+ </codeGenerators>
+ <inspectDependencies>true</inspectDependencies>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <tag>HEAD</tag>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL:Architecture:Clustering</url>
+ </scm>
+</project>
--- /dev/null
+package org.opendaylight.controller.cluster.datastore;
+
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
+import org.opendaylight.controller.sal.core.spi.data.DOMStore;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+/**
+ *
+ */
+public class DistributedDataStore implements DOMStore {
+
+ @Override
+ public <L extends AsyncDataChangeListener<InstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(InstanceIdentifier path, L listener, AsyncDataBroker.DataChangeScope scope) {
+ return new ListenerRegistrationProxy();
+ }
+
+ @Override
+ public DOMStoreTransactionChain createTransactionChain() {
+ return new TransactionChainProxy();
+ }
+
+ @Override
+ public DOMStoreReadTransaction newReadOnlyTransaction() {
+ return new TransactionProxy();
+ }
+
+ @Override
+ public DOMStoreWriteTransaction newWriteOnlyTransaction() {
+ return new TransactionProxy();
+ }
+
+ @Override
+ public DOMStoreReadWriteTransaction newReadWriteTransaction() {
+ return new TransactionProxy();
+ }
+}
--- /dev/null
+package org.opendaylight.controller.cluster.datastore;
+
+import org.opendaylight.yangtools.concepts.ListenerRegistration;
+
+/**
+ * ListenerRegistrationProxy acts as a proxy for a ListenerRegistration that was done on a remote shard
+ *
+ * Registering a DataChangeListener on the Data Store creates a new instance of the ListenerRegistrationProxy
+ * The ListenerRegistrationProxy talks to a remote ListenerRegistration actor.
+ */
+public class ListenerRegistrationProxy implements ListenerRegistration {
+ @Override
+ public Object getInstance() {
+ throw new UnsupportedOperationException("getInstance");
+ }
+
+ @Override
+ public void close() {
+ throw new UnsupportedOperationException("close");
+ }
+}
--- /dev/null
+package org.opendaylight.controller.cluster.datastore;
+
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+
+/**
+ * TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
+ */
+public class TransactionChainProxy implements DOMStoreTransactionChain{
+ @Override
+ public DOMStoreReadTransaction newReadOnlyTransaction() {
+ throw new UnsupportedOperationException("newReadOnlyTransaction");
+ }
+
+ @Override
+ public DOMStoreReadWriteTransaction newReadWriteTransaction() {
+ throw new UnsupportedOperationException("newReadWriteTransaction");
+ }
+
+ @Override
+ public DOMStoreWriteTransaction newWriteOnlyTransaction() {
+ throw new UnsupportedOperationException("newWriteOnlyTransaction");
+ }
+
+ @Override
+ public void close() {
+ throw new UnsupportedOperationException("close");
+ }
+}
--- /dev/null
+package org.opendaylight.controller.cluster.datastore;
+
+import com.google.common.base.Optional;
+import com.google.common.util.concurrent.ListenableFuture;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+/**
+ * TransactionProxy acts as a proxy for one or more transactions that were created on a remote shard
+ *
+ * Creating a transaction on the consumer side will create one instance of a transaction proxy. If during
+ * the transaction reads and writes are done on data that belongs to different shards then a separate transaction will
+ * be created on each of those shards by the TransactionProxy
+ *
+ * The TransactionProxy does not make any guarantees about atomicity or order in which the transactions on the various
+ * shards will be executed.
+ *
+ */
+public class TransactionProxy implements DOMStoreReadWriteTransaction {
+ @Override
+ public ListenableFuture<Optional<NormalizedNode<?, ?>>> read(InstanceIdentifier path) {
+ throw new UnsupportedOperationException("read");
+ }
+
+ @Override
+ public void write(InstanceIdentifier path, NormalizedNode<?, ?> data) {
+ throw new UnsupportedOperationException("write");
+ }
+
+ @Override
+ public void merge(InstanceIdentifier path, NormalizedNode<?, ?> data) {
+ throw new UnsupportedOperationException("merge");
+ }
+
+ @Override
+ public void delete(InstanceIdentifier path) {
+ throw new UnsupportedOperationException("delete");
+ }
+
+ @Override
+ public DOMStoreThreePhaseCommitCohort ready() {
+ throw new UnsupportedOperationException("ready");
+ }
+
+ @Override
+ public Object getIdentifier() {
+ throw new UnsupportedOperationException("getIdentifier");
+ }
+
+ @Override
+ public void close() {
+ throw new UnsupportedOperationException("close");
+ }
+}
--- /dev/null
+package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
+
+import org.opendaylight.controller.cluster.datastore.DistributedDataStore;
+
+public class DistributedDataStoreProviderModule extends org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedDataStoreProviderModule {
+ public DistributedDataStoreProviderModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ super(identifier, dependencyResolver);
+ }
+
+ public DistributedDataStoreProviderModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver, org.opendaylight.controller.config.yang.config.distributed_datastore_provider.DistributedDataStoreProviderModule oldModule, java.lang.AutoCloseable oldInstance) {
+ super(identifier, dependencyResolver, oldModule, oldInstance);
+ }
+
+ @Override
+ public void customValidation() {
+ // add custom validation form module attributes here.
+ }
+
+ @Override
+ public java.lang.AutoCloseable createInstance() {
+ new DistributedDataStore();
+
+ final class AutoCloseableDistributedDataStore implements AutoCloseable {
+
+ @Override
+ public void close() throws Exception {
+
+ }
+ }
+
+ return new AutoCloseableDistributedDataStore();
+ }
+
+}
--- /dev/null
+/*
+* Generated file
+*
+* Generated from: yang module name: distributed-datastore-provider yang module local name: distributed-datastore-provider
+* Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
+* Generated at: Thu Jun 12 15:23:43 PDT 2014
+*
+* Do not modify this file unless it is present under src/main directory
+*/
+package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
+public class DistributedDataStoreProviderModuleFactory extends org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedDataStoreProviderModuleFactory {
+
+}
--- /dev/null
+// vi: set smarttab et sw=4 tabstop=4:
+module distributed-datastore-provider {
+
+ yang-version 1;
+ namespace "urn:opendaylight:params:xml:ns:yang:controller:config:distributed-datastore-provider";
+ prefix "distributed-datastore-provider";
+
+ import config { prefix config; revision-date 2013-04-05; }
+ import rpc-context { prefix rpcx; revision-date 2013-06-17; }
+
+ description
+ "This module contains the base YANG definitions for
+ the distributed datastore provider implementation";
+
+ revision "2014-06-12" {
+ description
+ "Initial revision.";
+ }
+
+ // This is the definition of the service implementation as a module identity.
+ identity distributed-datastore-provider {
+ base config:module-type;
+
+ // Specifies the prefix for generated java classes.
+ config:java-name-prefix DistributedDataStoreProvider;
+ }
+
+ // Augments the 'configuration' choice node under modules/module.
+ augment "/config:modules/config:module/config:configuration" {
+ case distributed-datastore-provider {
+ when "/config:modules/config:module/config:type = 'distributed-datastore-provider'";
+ }
+ }
+}
--- /dev/null
+package org.opendaylight.controller.cluster.datastore;
+
+import junit.framework.Assert;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+public class DistributedDataStoreTest {
+
+ private DistributedDataStore distributedDataStore;
+
+ @org.junit.Before
+ public void setUp() throws Exception {
+ distributedDataStore = new DistributedDataStore();
+ }
+
+ @org.junit.After
+ public void tearDown() throws Exception {
+
+ }
+
+ @org.junit.Test
+ public void testRegisterChangeListener() throws Exception {
+ ListenerRegistration registration =
+ distributedDataStore.registerChangeListener(InstanceIdentifier.builder().build(), new AsyncDataChangeListener<InstanceIdentifier, NormalizedNode<?, ?>>() {
+ @Override
+ public void onDataChanged(AsyncDataChangeEvent<InstanceIdentifier, NormalizedNode<?, ?>> change) {
+ throw new UnsupportedOperationException("onDataChanged");
+ }
+ }, AsyncDataBroker.DataChangeScope.BASE);
+
+ Assert.assertNotNull(registration);
+ }
+
+ @org.junit.Test
+ public void testCreateTransactionChain() throws Exception {
+ final DOMStoreTransactionChain transactionChain = distributedDataStore.createTransactionChain();
+ Assert.assertNotNull(transactionChain);
+ }
+
+ @org.junit.Test
+ public void testNewReadOnlyTransaction() throws Exception {
+ final DOMStoreReadTransaction transaction = distributedDataStore.newReadOnlyTransaction();
+ Assert.assertNotNull(transaction);
+ }
+
+ @org.junit.Test
+ public void testNewWriteOnlyTransaction() throws Exception {
+ final DOMStoreWriteTransaction transaction = distributedDataStore.newWriteOnlyTransaction();
+ Assert.assertNotNull(transaction);
+ }
+
+ @org.junit.Test
+ public void testNewReadWriteTransaction() throws Exception {
+ final DOMStoreReadWriteTransaction transaction = distributedDataStore.newReadWriteTransaction();
+ Assert.assertNotNull(transaction);
+ }
+}
/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
package org.opendaylight.controller.md.sal.dom.api;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainFactory;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
import org.opendaylight.controller.sal.core.api.BrokerService;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-public interface DOMDataBroker extends AsyncDataBroker<InstanceIdentifier, NormalizedNode<?, ?>, DOMDataChangeListener>, BrokerService {
+/**
+ * Data Broker which provides data transaction and data change listener fuctionality
+ * using {@link NormalizedNode} data format.
+ *
+ * This interface is type capture of generic interfaces and returns type captures
+ * of results for client-code convenience.
+ *
+ */
+public interface DOMDataBroker extends
+ AsyncDataBroker<InstanceIdentifier, NormalizedNode<?, ?>, DOMDataChangeListener>,
+ TransactionChainFactory<InstanceIdentifier, NormalizedNode<?, ?>>, BrokerService {
+
+ /**
+ * {@inheritDoc}
+ */
@Override
DOMDataReadTransaction newReadOnlyTransaction();
+ /**
+ * {@inheritDoc}
+ */
@Override
DOMDataReadWriteTransaction newReadWriteTransaction();
+ /**
+ * {@inheritDoc}
+ */
@Override
DOMDataWriteTransaction newWriteOnlyTransaction();
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ DOMTransactionChain createTransactionChain(TransactionChainListener listener);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.api;
+
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChain;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+/**
+ * A chain of DOM Data transactions.
+ *
+ * Transactions in a chain need to be committed in sequence and each
+ * transaction should see the effects of previous transactions as if they happened. A chain
+ * makes no guarantees of atomicity, in fact transactions are committed as soon as possible.
+ *
+ * <p>
+ * This interface is type capture of {@link TransactionChain} for DOM Data Contracts.
+ */
+public interface DOMTransactionChain extends TransactionChain<InstanceIdentifier, NormalizedNode<?, ?>> {
+
+ @Override
+ DOMDataReadTransaction newReadOnlyTransaction();
+
+ @Override
+ DOMDataReadWriteTransaction newReadWriteTransaction();
+
+ @Override
+ DOMDataWriteTransaction newWriteOnlyTransaction();
+
+}
public abstract class AbstractConsumer implements Consumer, BundleActivator,ServiceTrackerCustomizer<Broker, Broker> {
-
-
-
+
+
+
private BundleContext context;
private ServiceTracker<Broker, Broker> tracker;
private Broker broker;
return Collections.emptySet();
}
-
+
@Override
public Broker addingService(ServiceReference<Broker> reference) {
if(broker == null) {
broker.registerConsumer(this, context);
return broker;
}
-
+
return null;
}
-
+
@Override
public void modifiedService(ServiceReference<Broker> reference, Broker service) {
// NOOP
}
-
+
@Override
public void removedService(ServiceReference<Broker> reference, Broker service) {
stopImpl(context);
import java.util.concurrent.Future;
import org.opendaylight.controller.md.sal.common.api.routing.RoutedRegistration;
-import org.opendaylight.controller.sal.core.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.core.api.data.DataProviderService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationPublishService;
-import org.opendaylight.controller.sal.core.api.notify.NotificationService;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.concepts.ObjectRegistration;
import org.opendaylight.yangtools.yang.common.QName;
* <li>RPC Invocation - see {@link ConsumerSession#rpc(QName, CompositeNode)},
* {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)} and
* {@link RpcImplementation}
- * <li>Notification Service - see {@link NotificationService} and
- * {@link NotificationPublishService}
+ * <li>Notification Service - see {@link org.opendaylight.controller.sal.core.api.notify.NotificationService} and
+ * {@link org.opendaylight.controller.sal.core.api.notify.NotificationPublishService}
* <li>Functionality and Data model
- * <li>Data Store access and modification - see {@link DataBrokerService} and
- * {@link DataProviderService}
+ * <li>Data Store access and modification - see {@link org.opendaylight.controller.sal.core.api.data.DataBrokerService} and
+ * {@link org.opendaylight.controller.sal.core.api.data.DataProviderService}
* </ul>
*
* The services are exposed via session.
* functionality of the provider from the system.
*/
@Override
- public void close();
+ void close();
@Override
boolean isClosed();
void close();
}
- public interface RoutedRpcRegistration extends RpcRegistration,
- RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
+ public interface RoutedRpcRegistration extends RpcRegistration, RoutedRegistration<QName, InstanceIdentifier, RpcImplementation> {
}
}
*/
package org.opendaylight.controller.sal.core.api;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-
/**
- *
+ *
* Session-specific instance of the broker functionality.
- *
+ *
* <p>
* BrokerService is marker interface for infrastructure services provided by the
* SAL. These services are session-specific, each {@link Provider} and
* {@link Consumer} usually has own instance of the service with it's own
* context.
- *
+ *
* <p>
* The consumer's (or provider's) instance of specific service could be obtained
- * by invoking {@link ConsumerSession#getService(Class)} method on session
+ * by invoking {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#getService(Class)} method on session
* assigned to the consumer.
- *
+ *
* <p>
* {@link BrokerService} and {@link Provider} may seem similar, but provider
* provides YANG model-based functionality and {@link BrokerService} exposes the
* necessary supporting functionality to implement specific functionality of
* YANG and to reuse it in the development of {@link Consumer}s and
* {@link Provider}s.
- *
- *
+ *
+ *
*/
public interface BrokerService {
import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
/**
- *
+ *
* Defines the component of controller and supplies additional metadata. A
* component of the controller or application supplies a concrete implementation
* of this interface.
- *
+ *
* A user-implemented component (application) which facilitates the SAL and SAL
* services to access infrastructure services or providers' functionality.
- *
- *
+ *
+ *
*/
public interface Consumer {
/**
* Callback signaling initialization of the consumer session to the SAL.
- *
+ *
* The consumer MUST use the session for all communication with SAL or
* retrieving SAL infrastructure services.
- *
+ *
* This method is invoked by {@link Broker#registerConsumer(Consumer)}
- *
+ *
* @param session
* Unique session between consumer and SAL.
*/
/**
* Get a set of implementations of consumer functionality to be registered
* into system during the consumer registration to the SAL.
- *
+ *
* This method is invoked by {@link Broker#registerConsumer(Consumer)}.
- *
+ *
* @return Set of consumer functionality.
*/
public Collection<ConsumerFunctionality> getConsumerFunctionality();
/**
* The marker interface for the interfaces describing the consumer
* functionality contracts.
- *
- *
+ *
+ *
*/
public interface ConsumerFunctionality {
import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
/**
- *
+ *
* Defines the component of controller and supplies additional metadata. A
* component of the controller or application supplies a concrete implementation
* of this interface.
- *
+ *
* <p>
* A user-implemented component (application) which facilitates the SAL and SAL
* services to access infrastructure services and to provide functionality to
* {@link Consumer}s and other providers.
- *
- *
+ *
+ *
*/
public interface Provider {
/**
* Callback signaling initialization of the provider session to the SAL.
- *
+ *
* <p>
* The provider <b>MUST use the session</b> for all communication with SAL
* or retrieving SAL infrastructure services.
- *
+ *
* <p>
* This method is invoked by {@link Broker#registerConsumer(Consumer)}
- *
+ *
* @param session
* Unique session between provider and SAL.
*/
/**
* Gets a set of implementations of provider functionality to be registered
* into system during the provider registration to the SAL.
- *
+ *
* <p>
* This method is invoked by {@link Broker#registerProvider(Provider)} to
* learn the initial provided functionality
- *
+ *
* @return Set of provider's functionality.
*/
public Collection<ProviderFunctionality> getProviderFunctionality();
/**
* Functionality provided by the {@link Provider}
- *
+ *
* <p>
* Marker interface used to mark the interfaces describing specific
* functionality which could be exposed by providers to other components.
- *
+ *
- *
+ *
*/
public interface ProviderFunctionality {
public interface RpcConsumptionRegistry {
/**
* Sends an RPC to other components registered to the broker.
- *
+ *
* @see RpcImplementation
* @param rpc
* Name of RPC
import java.util.Set;
-import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
* {@link Provider#getProviderFunctionality()}
* <li>passing an instance of implementation and {@link QName} of rpc as
* arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, RpcImplementation)}
* </ul>
*
* The simplified process of the invocation of rpc is following:
*
* <ol>
* <li> {@link Consumer} invokes
- * {@link ConsumerSession#rpc(QName, CompositeNode)}
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ConsumerSession#rpc(QName, CompositeNode)}
* <li> {@link Broker} finds registered {@link RpcImplementation}s
* <li> {@link Broker} invokes
* {@link RpcImplementation#invokeRpc(QName, CompositeNode)}
/**
* Registers an implementation of the rpc.
- *
+ *
* <p>
* The registered rpc functionality will be available to all other
* consumers and providers registered to the broker, which are aware of
* the {@link QName} assigned to the rpc.
- *
+ *
* <p>
* There is no assumption that rpc type is in the set returned by
* invoking {@link RpcImplementation#getSupportedRpcs()}. This allows
* for dynamic rpc implementations.
- *
+ *
* @param rpcType
* Name of Rpc
* @param implementation
*/
RpcRegistration addRpcImplementation(QName rpcType, RpcImplementation implementation)
throws IllegalArgumentException;
-
+
ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(RpcRegistrationListener listener);
RoutedRpcRegistration addRoutedRpcImplementation(QName rpcType, RpcImplementation implementation);
import org.opendaylight.yangtools.yang.common.QName;
public interface RpcRegistrationListener extends EventListener {
-
+
public void onRpcImplementationAdded(QName name);
-
+
public void onRpcImplementationRemoved(QName name);
}
public class RpcRoutingContext implements Immutable, Serializable {
/**
- *
+ *
*/
private static final long serialVersionUID = -9079324728075883325L;
-
+
private final QName context;
private final QName rpc;
-
-
+
+
private RpcRoutingContext(QName context, QName rpc) {
super();
this.context = context;
this.rpc = rpc;
}
-
+
public static final RpcRoutingContext create(QName context, QName rpc) {
return new RpcRoutingContext(context, rpc);
}
/**
* DataBrokerService provides unified access to the data stores available in the
* system.
- *
- *
+ *
+ *
* @see DataProviderService
- *
+ *
*/
-public interface DataBrokerService extends
+public interface DataBrokerService extends
BrokerService, //
DataReader<InstanceIdentifier, CompositeNode>, //
DataModificationTransactionFactory<InstanceIdentifier, CompositeNode>, //
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.controller.md.sal.common.api.data.DataReader;;
-public interface DataProviderService extends
+public interface DataProviderService extends
DataBrokerService, //
DataProvisionService<InstanceIdentifier, CompositeNode>
{
/**
* Adds {@link DataValidator} for specified Data Store
- *
+ *
* @param store
* Data Store
* @param validator
/**
* Removes {@link DataValidator} from specified Data Store
- *
+ *
* @param store
* @param validator
* Validator
/**
* Adds {@link DataRefresher} for specified data store
- *
+ *
* @param store
* @param refresher
*/
/**
* Removes {@link DataRefresher} from specified data store
- *
+ *
* @param store
* @param refresher
*/
void removeRefresher(DataStoreIdentifier store, DataRefresher refresher);
-
+
Registration<DataReader<InstanceIdentifier, CompositeNode>> registerConfigurationReader(InstanceIdentifier path, DataReader<InstanceIdentifier, CompositeNode> reader);
Registration<DataReader<InstanceIdentifier, CompositeNode>> registerOperationalReader(InstanceIdentifier path, DataReader<InstanceIdentifier, CompositeNode> reader);
-
+
public interface DataRefresher extends Provider.ProviderFunctionality {
/**
* Fired when some component explicitly requested the data refresh.
- *
+ *
* The provider which exposed the {@link DataRefresher} should republish
* its provided data by editing the data in all affected data stores.
*/
public interface DataStore extends //
DataReader<InstanceIdentifier, CompositeNode>,
DataCommitHandler<InstanceIdentifier, CompositeNode> {
-
-
+
+
Iterable<InstanceIdentifier> getStoredConfigurationPaths();
Iterable<InstanceIdentifier> getStoredOperationalPaths();
-
+
boolean containsConfigurationPath(InstanceIdentifier path);
boolean containsOperationalPath(InstanceIdentifier path);
/**
* {@link Provider}-supplied Validator of the data.
- *
+ *
* <p>
* The registration could be done by :
* <ul>
* as arguments to the
* {@link DataProviderService#addValidator(DataStoreIdentifier, DataValidator)}
* </ul>
- *
+ *
**/
public interface DataValidator extends Provider.ProviderFunctionality {
/**
* A set of Data Stores supported by implementation.
- *
+ *
* The set of {@link DataStoreIdentifier}s which identifies target data
* stores which are supported by this implementation. This set is used, when
* {@link Provider} is registered to the SAL, to register and expose the
* validation functionality to affected data stores.
- *
+ *
* @return Set of Data Store identifiers
*/
Set<DataStoreIdentifier> getSupportedDataStores();
/**
* Performs validation on supplied data.
- *
+ *
* @param toValidate
* Data to validate
* @return Validation result. The
public interface NotificationListener extends Consumer.ConsumerFunctionality, EventListener {
/**
* A set of notification types supported by listeners.
- *
+ *
* The set of notification {@link QName}s which are supported by this
* listener. This set is used, when {@link Consumer} is registered to the
* SAL, to automatically register the listener.
- *
+ *
* @return Set of QNames identifying supported notifications.
*/
Set<QName> getSupportedNotifications();
/**
* Fired when the notification occurs.
- *
+ *
* The type of the notification could be learned by
* <code>QName type = notification.getNodeType();</code>
- *
+ *
* @param notification
* Notification content
*/
*/
package org.opendaylight.controller.sal.core.api.notify;
-import org.opendaylight.controller.sal.core.api.Broker;
-import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
* Notification Publishing Service
- *
+ *
* The simplified process of the notification publishing is following:
- *
+ *
* <ol>
- * <li> {@link Provider} invokes {@link #sendNotification(CompositeNode)}
- * <li> {@link Broker} finds {@link NotificationListener}s which subscribed for
+ * <li> {@link org.opendaylight.controller.sal.core.api.Provider} invokes {@link #sendNotification(CompositeNode)}
+ * <li> {@link org.opendaylight.controller.sal.core.api.Broker} finds {@link NotificationListener}s which subscribed for
* the notification type.
- *
- * <li>For each subscriber {@link Broker} invokes
+ *
+ * <li>For each subscriber {@link org.opendaylight.controller.sal.core.api.Broker} invokes
* {@link NotificationListener#onNotification(CompositeNode)}
* </ol>
*/
public interface NotificationPublishService extends NotificationService {
/**
* Publishes a notification.
- *
+ *
* Notification type is determined by the
* {@link CompositeNode#getNodeType()} of the
* <code>notification<code> parameter.
- *
+ *
* @param notification
* Notification to publish
*/
package org.opendaylight.controller.sal.core.api.notify;
import org.opendaylight.controller.sal.core.api.BrokerService;
-import org.opendaylight.controller.sal.core.api.Provider;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.yangtools.concepts.Registration;
import org.opendaylight.yangtools.yang.common.QName;
/**
* NotificationService provides access to the notification functionality of the
* SAL.
- *
+ *
* NotificationService allows for consumption of notifications by registering
* implementations of NotificationListener.
- *
+ *
* The registration of notification listeners could be done by:
* <ul>
* <li>returning an instance of implementation in the return value of
- * {@link Provider#getProviderFunctionality()}
- * <li>passing an instance of implementation and {@link QName} of rpc as an
- * arguments to the
- * {@link ProviderSession#addRpcImplementation(QName, RpcImplementation)}
+ * {@link org.opendaylight.controller.sal.core.api.Provider#getProviderFunctionality()}
+ * <li>passing an instance of implementation and {@link QName} of an RPC as an
+ * argument to
+ * {@link org.opendaylight.controller.sal.core.api.Broker.ProviderSession#addRpcImplementation(QName, org.opendaylight.controller.sal.core.api.RpcImplementation)}
* </ul>
- *
- *
+ *
+ *
*/
public interface NotificationService extends BrokerService {
/**
* Registers a notification listener for supplied notification type.
- *
+ *
* @param notification
* @param listener
*/
public class DomBrokerRuntimeMXBeanImpl implements
DomBrokerImplRuntimeMXBean {
-
+
private final DataBrokerImpl dataService;
private final Transactions transactions = new Transactions();
private final Data data = new Data();
-
+
public DomBrokerRuntimeMXBeanImpl(DataBrokerImpl dataService) {
- this.dataService = dataService;
+ this.dataService = dataService;
}
public Transactions getTransactions() {
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.data.AsyncTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * Composite DOM Transaction backed by {@link DOMStoreTransaction}.
+ *
+ * Abstract base for composite transaction, which provides access only to common
+ * functionality as retrieval of subtransaction, close method and retrieval of
+ * identifier.
+ *
+ * @param <K>
+ * Subtransaction distinguisher
+ * @param <T>
+ * Subtransaction type
+ */
+abstract class AbstractDOMForwardedCompositeTransaction<K, T extends DOMStoreTransaction> implements
+ AsyncTransaction<InstanceIdentifier, NormalizedNode<?, ?>> {
+
+ private final ImmutableMap<K, T> backingTxs;
+ private final Object identifier;
+
+ /**
+ *
+ * Creates new composite Transactions.
+ *
+ * @param identifier
+ * Identifier of transaction.
+ * @param backingTxs
+ * Key,value map of backing transactions.
+ */
+ protected AbstractDOMForwardedCompositeTransaction(final Object identifier, final ImmutableMap<K, T> backingTxs) {
+ this.identifier = Preconditions.checkNotNull(identifier, "Identifier should not be null");
+ this.backingTxs = Preconditions.checkNotNull(backingTxs, "Backing transactions should not be null");
+ }
+
+ /**
+ * Returns subtransaction associated with supplied key.
+ *
+ * @param key
+ * @return
+ * @throws NullPointerException
+ * if key is null
+ * @throws IllegalArgumentException
+ * if no subtransaction is associated with key.
+ */
+ protected final T getSubtransaction(final K key) {
+ Preconditions.checkNotNull(key, "key must not be null.");
+ Preconditions.checkArgument(backingTxs.containsKey(key), "No subtransaction associated with %s", key);
+ return backingTxs.get(key);
+ }
+
+ /**
+ * Returns immutable Iterable of all subtransactions.
+ *
+ */
+ protected Iterable<T> getSubtransactions() {
+ return backingTxs.values();
+ }
+
+ @Override
+ public Object getIdentifier() {
+ return identifier;
+ }
+
+ @Override
+ public void close() {
+ /*
+ * We share one exception for all failures, which are added
+ * as supressedExceptions to it.
+ *
+ */
+ IllegalStateException failure = null;
+ for (T subtransaction : backingTxs.values()) {
+ try {
+ subtransaction.close();
+ } catch (Exception e) {
+ // If we did not allocated failure we allocate it
+ if(failure == null) {
+ failure = new IllegalStateException("Uncaught exception occured during closing transaction.", e);
+ } else {
+ // We update it with addotional exceptions, which occured during error.
+ failure.addSuppressed(e);
+ }
+ }
+ }
+ // If we have failure, we throw it at after all attempts to close.
+ if(failure != null) {
+ throw failure;
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionFactory;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+
+/**
+ *
+ * Abstract composite transaction factory.
+ *
+ * Provides an convenience common implementation for composite DOM Transactions,
+ * where subtransaction is identified by {@link LogicalDatastoreType} type and
+ * implementation of subtransaction is provided by
+ * {@link DOMStoreTransactionFactory}.
+ *
+ * <b>Note:</b>This class does not have thread-safe implementation of {@link #close()},
+ * implementation may allow accessing and allocating new transactions during closing
+ * this instance.
+ *
+ * @param <T>
+ * Type of {@link DOMStoreTransactionFactory} factory.
+ */
+public abstract class AbstractDOMForwardedTransactionFactory<T extends DOMStoreTransactionFactory> implements DOMDataCommitImplementation, AutoCloseable {
+
+ private final ImmutableMap<LogicalDatastoreType, T> storeTxFactories;
+
+ private boolean closed;
+
+ protected AbstractDOMForwardedTransactionFactory(final Map<LogicalDatastoreType, ? extends T> txFactories) {
+ this.storeTxFactories = ImmutableMap.copyOf(txFactories);
+ }
+
+ /**
+ * Implementations must return unique identifier for each and every call of
+ * this method;
+ *
+ * @return new Unique transaction identifier.
+ */
+ protected abstract Object newTransactionIdentifier();
+
+ /**
+ * Creates a new composite read-only transaction
+ *
+ * Creates a new composite read-only transaction backed by one transaction
+ * per factory in {@link #getTxFactories()}.
+ *
+ * Subtransaction for reading is selected by supplied
+ * {@link LogicalDatastoreType} as parameter for
+ * {@link DOMDataReadTransaction#read(LogicalDatastoreType,org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)}
+ * .
+ *
+ * Id of returned transaction is retrieved via
+ * {@link #newTransactionIdentifier()}.
+ *
+ * @return New composite read-only transaction.
+ */
+ public DOMDataReadTransaction newReadOnlyTransaction() {
+ checkNotClosed();
+ ImmutableMap.Builder<LogicalDatastoreType, DOMStoreReadTransaction> builder = ImmutableMap.builder();
+ for (Entry<LogicalDatastoreType, T> store : storeTxFactories.entrySet()) {
+ builder.put(store.getKey(), store.getValue().newReadOnlyTransaction());
+ }
+ return new DOMForwardedReadOnlyTransaction(newTransactionIdentifier(), builder.build());
+ }
+
+
+
+ /**
+ * Creates a new composite write-only transaction
+ *
+ * <p>
+ * Creates a new composite write-only transaction backed by one write-only
+ * transaction per factory in {@link #getTxFactories()}.
+ *
+ * <p>
+ * Implementation of composite Write-only transaction is following:
+ *
+ * <ul>
+ * <li>
+ * {@link DOMDataWriteTransaction#put(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#write(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * is invoked on selected subtransaction.
+ * <li>
+ * {@link DOMDataWriteTransaction#merge(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#merge(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * is invoked on selected subtransaction.
+ * <li>
+ * {@link DOMDataWriteTransaction#delete(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#delete(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)} is invoked on
+ * selected subtransaction.
+ * <li> {@link DOMDataWriteTransaction#commit()} - results in invoking
+ * {@link DOMStoreWriteTransaction#ready()}, gathering all resulting cohorts
+ * and then invoking finalized implementation callback
+ * {@link #commit(DOMDataWriteTransaction, Iterable)} with transaction which
+ * was commited and gathered results.
+ * </ul>
+ *
+ * Id of returned transaction is generated via
+ * {@link #newTransactionIdentifier()}.
+ *
+ * @return New composite write-only transaction associated with this
+ * factory.
+ */
+ public DOMDataWriteTransaction newWriteOnlyTransaction() {
+ checkNotClosed();
+ ImmutableMap.Builder<LogicalDatastoreType, DOMStoreWriteTransaction> builder = ImmutableMap.builder();
+ for (Entry<LogicalDatastoreType, T> store : storeTxFactories.entrySet()) {
+ builder.put(store.getKey(), store.getValue().newWriteOnlyTransaction());
+ }
+ return new DOMForwardedWriteTransaction<DOMStoreWriteTransaction>(newTransactionIdentifier(), builder.build(),
+ this);
+ }
+
+ /**
+ * Creates a new composite write-only transaction
+ *
+ * <p>
+ * Creates a new composite write-only transaction backed by one write-only
+ * transaction per factory in {@link #getTxFactories()}.
+ * <p>
+ * Implementation of composite Write-only transaction is following:
+ *
+ * <ul>
+ * <li>
+ * {@link DOMDataWriteTransaction#read(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)}
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#read(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)} is invoked on
+ * selected subtransaction.
+ * <li>
+ * {@link DOMDataWriteTransaction#put(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#write(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * is invoked on selected subtransaction.
+ * <li>
+ * {@link DOMDataWriteTransaction#merge(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#merge(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier, org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode)}
+ * is invoked on selected subtransaction.
+ * <li>
+ * {@link DOMDataWriteTransaction#delete(LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)
+ * - backing subtransaction is selected by {@link LogicalDatastoreType},
+ * {@link DOMStoreWriteTransaction#delete(org.opendaylight.yangtools.yang.data.api.InstanceIdentifier)} is invoked on
+ * selected subtransaction.
+ * <li> {@link DOMDataWriteTransaction#commit()} - results in invoking
+ * {@link DOMStoreWriteTransaction#ready()}, gathering all resulting cohorts
+ * and then invoking finalized implementation callback
+ * {@link #commit(DOMDataWriteTransaction, Iterable)} with transaction which
+ * was commited and gathered results.
+ * <li>
+ * </ul>
+ *
+ * Id of returned transaction is generated via
+ * {@link #newTransactionIdentifier()}.
+ *
+ * @return New composite read-write transaction associated with this
+ * factory.
+ *
+ */
+ public DOMDataReadWriteTransaction newReadWriteTransaction() {
+ checkNotClosed();
+ ImmutableMap.Builder<LogicalDatastoreType, DOMStoreReadWriteTransaction> builder = ImmutableMap.builder();
+ for (Entry<LogicalDatastoreType, T> store : storeTxFactories.entrySet()) {
+ builder.put(store.getKey(), store.getValue().newReadWriteTransaction());
+ }
+ return new DOMForwardedReadWriteTransaction(newTransactionIdentifier(), builder.build(), this);
+ }
+
+ /**
+ * Convenience accessor of backing factories intended to be used only by
+ * finalization of this class.
+ *
+ * <b>Note:</b>
+ * Finalization of this class may want to access other functionality of
+ * supplied Transaction factories.
+ *
+ * @return Map of backing transaction factories.
+ */
+ protected final Map<LogicalDatastoreType, T> getTxFactories() {
+ return storeTxFactories;
+ }
+
+ /**
+ *
+ * Checks if instance is not closed.
+ *
+ * @throws IllegalStateException If instance of this class was closed.
+ *
+ */
+ @GuardedBy("this")
+ protected synchronized void checkNotClosed() {
+ Preconditions.checkState(!closed,"Transaction factory was closed. No further operations allowed.");
+ }
+
+ @Override
+ @GuardedBy("this")
+ public synchronized void close() {
+ closed = true;
+ }
+
+}
*/
package org.opendaylight.controller.md.sal.dom.broker.impl;
-import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
-import java.util.Collections;
-import java.util.List;
import java.util.Map.Entry;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.md.sal.common.api.data.AsyncTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker;
import org.opendaylight.controller.md.sal.dom.api.DOMDataChangeListener;
-import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction;
-import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
-import org.opendaylight.controller.sal.common.util.Rpcs;
+import org.opendaylight.controller.md.sal.dom.api.DOMTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Function;
import com.google.common.base.Optional;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableList.Builder;
import com.google.common.collect.ImmutableMap;
-import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
-public class DOMDataBrokerImpl implements DOMDataBroker, AutoCloseable {
+public class DOMDataBrokerImpl extends AbstractDOMForwardedTransactionFactory<DOMStore> implements DOMDataBroker,
+ AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(DOMDataBrokerImpl.class);
- private static final Logger COORDINATOR_LOG = LoggerFactory.getLogger(CommitCoordination.class);
- private final ImmutableMap<LogicalDatastoreType, DOMStore> datastores;
- private final ListeningExecutorService executor;
+
+ private final DOMDataCommitCoordinatorImpl coordinator;
private final AtomicLong txNum = new AtomicLong();
+ private final AtomicLong chainNum = new AtomicLong();
public DOMDataBrokerImpl(final ImmutableMap<LogicalDatastoreType, DOMStore> datastores,
final ListeningExecutorService executor) {
- super();
- this.datastores = datastores;
- this.executor = executor;
+ super(datastores);
+ this.coordinator = new DOMDataCommitCoordinatorImpl(executor);
}
- private static final Function<Iterable<Boolean>, Boolean> AND_FUNCTION = new Function<Iterable<Boolean>, Boolean>() {
-
- @Override
- public Boolean apply(final Iterable<Boolean> input) {
-
- for (Boolean value : input) {
- if (value == false) {
- return Boolean.FALSE;
- }
- }
- return Boolean.TRUE;
- }
- };
-
@Override
- public DOMDataReadTransaction newReadOnlyTransaction() {
- ImmutableMap.Builder<LogicalDatastoreType, DOMStoreReadTransaction> builder = ImmutableMap.builder();
- for (Entry<LogicalDatastoreType, DOMStore> store : datastores.entrySet()) {
- builder.put(store.getKey(), store.getValue().newReadOnlyTransaction());
- }
- return new ReadOnlyTransactionImpl(newTransactionIdentifier(), builder.build());
- }
-
- private Object newTransactionIdentifier() {
+ protected Object newTransactionIdentifier() {
return "DOM-" + txNum.getAndIncrement();
}
- @Override
- public DOMDataReadWriteTransaction newReadWriteTransaction() {
- ImmutableMap.Builder<LogicalDatastoreType, DOMStoreReadWriteTransaction> builder = ImmutableMap.builder();
- for (Entry<LogicalDatastoreType, DOMStore> store : datastores.entrySet()) {
- builder.put(store.getKey(), store.getValue().newReadWriteTransaction());
- }
- return new ReadWriteTransactionImpl(newTransactionIdentifier(), builder.build(), this);
- }
-
- @Override
- public DOMDataWriteTransaction newWriteOnlyTransaction() {
- ImmutableMap.Builder<LogicalDatastoreType, DOMStoreWriteTransaction> builder = ImmutableMap.builder();
- for (Entry<LogicalDatastoreType, DOMStore> store : datastores.entrySet()) {
- builder.put(store.getKey(), store.getValue().newWriteOnlyTransaction());
- }
- return new WriteTransactionImpl<DOMStoreWriteTransaction>(newTransactionIdentifier(), builder.build(), this);
- }
-
@Override
public ListenerRegistration<DOMDataChangeListener> registerDataChangeListener(final LogicalDatastoreType store,
final InstanceIdentifier path, final DOMDataChangeListener listener, final DataChangeScope triggeringScope) {
- DOMStore potentialStore = datastores.get(store);
+ DOMStore potentialStore = getTxFactories().get(store);
checkState(potentialStore != null, "Requested logical data store is not available.");
return potentialStore.registerChangeListener(path, listener, triggeringScope);
}
- private ListenableFuture<RpcResult<TransactionStatus>> submit(
- final WriteTransactionImpl<? extends DOMStoreWriteTransaction> transaction) {
- LOG.debug("Tx: {} is submitted for execution.", transaction.getIdentifier());
- return executor.submit(new CommitCoordination(transaction));
- }
-
- private abstract static class AbstractCompositeTransaction<K, T extends DOMStoreTransaction> implements
- AsyncTransaction<InstanceIdentifier, NormalizedNode<?, ?>> {
-
- private final ImmutableMap<K, T> backingTxs;
- private final Object identifier;
-
- protected AbstractCompositeTransaction(final Object identifier, final ImmutableMap<K, T> backingTxs) {
- this.identifier = checkNotNull(identifier, "Identifier should not be null");
- this.backingTxs = checkNotNull(backingTxs, "Backing transactions should not be null");
- }
-
- protected T getSubtransaction(final K key) {
- return backingTxs.get(key);
- }
-
- public Iterable<T> getSubtransactions() {
- return backingTxs.values();
- }
-
- @Override
- public Object getIdentifier() {
- return identifier;
- }
-
- @Override
- public void close() {
- try {
- for (T subtransaction : backingTxs.values()) {
- subtransaction.close();
- }
- } catch (Exception e) {
- throw new IllegalStateException("Uncaught exception occured during closing transaction.", e);
- }
- }
-
- }
-
- private static class ReadOnlyTransactionImpl extends
- AbstractCompositeTransaction<LogicalDatastoreType, DOMStoreReadTransaction> implements
- DOMDataReadTransaction {
-
- protected ReadOnlyTransactionImpl(final Object identifier,
- final ImmutableMap<LogicalDatastoreType, DOMStoreReadTransaction> backingTxs) {
- super(identifier, backingTxs);
- }
-
- @Override
- public ListenableFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store,
- final InstanceIdentifier path) {
- return getSubtransaction(store).read(path);
- }
-
- }
-
- private static class WriteTransactionImpl<T extends DOMStoreWriteTransaction> extends
- AbstractCompositeTransaction<LogicalDatastoreType, T> implements DOMDataWriteTransaction {
-
- private final DOMDataBrokerImpl broker;
- private ImmutableList<DOMStoreThreePhaseCommitCohort> cohorts;
-
- protected WriteTransactionImpl(final Object identifier, final ImmutableMap<LogicalDatastoreType, T> backingTxs,
- final DOMDataBrokerImpl broker) {
- super(identifier, backingTxs);
- this.broker = broker;
- }
-
- public synchronized Iterable<DOMStoreThreePhaseCommitCohort> ready() {
- checkState(cohorts == null, "Transaction was already marked as ready.");
- ImmutableList.Builder<DOMStoreThreePhaseCommitCohort> cohortsBuilder = ImmutableList.builder();
- for (DOMStoreWriteTransaction subTx : getSubtransactions()) {
- cohortsBuilder.add(subTx.ready());
- }
- cohorts = cohortsBuilder.build();
- return cohorts;
- }
-
- protected ImmutableList<DOMStoreThreePhaseCommitCohort> getCohorts() {
- return cohorts;
- }
-
- @Override
- public void put(final LogicalDatastoreType store, final InstanceIdentifier path, final NormalizedNode<?, ?> data) {
- getSubtransaction(store).write(path, data);
- }
-
- @Override
- public void delete(final LogicalDatastoreType store, final InstanceIdentifier path) {
- getSubtransaction(store).delete(path);
- }
-
- @Override
- public void merge(final LogicalDatastoreType store, final InstanceIdentifier path,
- final NormalizedNode<?, ?> data) {
- getSubtransaction(store).merge(path,data);
- }
-
- @Override
- public void cancel() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public ListenableFuture<RpcResult<TransactionStatus>> commit() {
-
- ready();
- return broker.submit(this);
- }
-
- }
-
- private static class ReadWriteTransactionImpl extends WriteTransactionImpl<DOMStoreReadWriteTransaction> implements
- DOMDataReadWriteTransaction {
-
- protected ReadWriteTransactionImpl(final Object identifier,
- final ImmutableMap<LogicalDatastoreType, DOMStoreReadWriteTransaction> backingTxs,
- final DOMDataBrokerImpl broker) {
- // super(identifier, backingTxs);
- super(identifier, backingTxs, broker);
- }
-
- @Override
- public ListenableFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store,
- final InstanceIdentifier path) {
- return getSubtransaction(store).read(path);
- }
- }
-
- private final class CommitCoordination implements Callable<RpcResult<TransactionStatus>> {
-
- private final WriteTransactionImpl<? extends DOMStoreWriteTransaction> transaction;
-
- public CommitCoordination(final WriteTransactionImpl<? extends DOMStoreWriteTransaction> transaction) {
- this.transaction = transaction;
- }
-
- @Override
- public RpcResult<TransactionStatus> call() throws Exception {
-
- try {
- Boolean canCommit = canCommit().get();
-
- if (canCommit) {
- try {
- preCommit().get();
- try {
- commit().get();
- COORDINATOR_LOG.debug("Tx: {} Is commited.", transaction.getIdentifier());
- return Rpcs.getRpcResult(true, TransactionStatus.COMMITED,
- Collections.<RpcError> emptySet());
-
- } catch (InterruptedException | ExecutionException e) {
- COORDINATOR_LOG.error("Tx: {} Error during commit", transaction.getIdentifier(), e);
- }
-
- } catch (InterruptedException | ExecutionException e) {
- COORDINATOR_LOG.warn("Tx: {} Error during preCommit, starting Abort",
- transaction.getIdentifier(), e);
- }
- } else {
- COORDINATOR_LOG.info("Tx: {} Did not pass canCommit phase.", transaction.getIdentifier());
- abort().get();
- }
- } catch (InterruptedException | ExecutionException e) {
- COORDINATOR_LOG.warn("Tx: {} Error during canCommit, starting Abort", transaction.getIdentifier(), e);
-
- }
- try {
- abort().get();
- } catch (InterruptedException | ExecutionException e) {
- COORDINATOR_LOG.error("Tx: {} Error during abort", transaction.getIdentifier(), e);
- }
- return Rpcs.getRpcResult(false, TransactionStatus.FAILED, Collections.<RpcError> emptySet());
- }
-
- public ListenableFuture<Void> preCommit() {
- COORDINATOR_LOG.debug("Transaction {}: PreCommit Started ", transaction.getIdentifier());
- Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
- for (DOMStoreThreePhaseCommitCohort cohort : transaction.getCohorts()) {
- ops.add(cohort.preCommit());
- }
- return (ListenableFuture) Futures.allAsList(ops.build());
- }
-
- public ListenableFuture<Void> commit() {
- COORDINATOR_LOG.debug("Transaction {}: Commit Started ", transaction.getIdentifier());
- Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
- for (DOMStoreThreePhaseCommitCohort cohort : transaction.getCohorts()) {
- ops.add(cohort.commit());
- }
- return (ListenableFuture) Futures.allAsList(ops.build());
- }
-
- public ListenableFuture<Boolean> canCommit() {
- COORDINATOR_LOG.debug("Transaction {}: CanCommit Started ", transaction.getIdentifier());
- Builder<ListenableFuture<Boolean>> canCommitOperations = ImmutableList.builder();
- for (DOMStoreThreePhaseCommitCohort cohort : transaction.getCohorts()) {
- canCommitOperations.add(cohort.canCommit());
- }
- ListenableFuture<List<Boolean>> allCanCommits = Futures.allAsList(canCommitOperations.build());
- return Futures.transform(allCanCommits, AND_FUNCTION);
- }
-
- public ListenableFuture<Void> abort() {
- COORDINATOR_LOG.debug("Transaction {}: Abort Started ", transaction.getIdentifier());
- Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
- for (DOMStoreThreePhaseCommitCohort cohort : transaction.getCohorts()) {
- ops.add(cohort.abort());
- }
- return (ListenableFuture) Futures.allAsList(ops.build());
- };
+ @Override
+ public DOMTransactionChain createTransactionChain(final TransactionChainListener listener) {
+ ImmutableMap.Builder<LogicalDatastoreType, DOMStoreTransactionChain> backingChainsBuilder = ImmutableMap
+ .builder();
+ for (Entry<LogicalDatastoreType, DOMStore> entry : getTxFactories().entrySet()) {
+ backingChainsBuilder.put(entry.getKey(), entry.getValue().createTransactionChain());
+ }
+ long chainId = chainNum.getAndIncrement();
+ ImmutableMap<LogicalDatastoreType, DOMStoreTransactionChain> backingChains = backingChainsBuilder.build();
+ LOG.debug("Transactoin chain {} created with listener {}, backing store chains {}", chainId, listener,
+ backingChains);
+ return new DOMDataBrokerTransactionChainImpl(chainId, backingChains, coordinator, listener);
}
@Override
- public void close() throws Exception {
-
+ public ListenableFuture<RpcResult<TransactionStatus>> commit(final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts) {
+ LOG.debug("Transaction: {} submitted with cohorts {}.", transaction.getIdentifier(), cohorts);
+ return coordinator.submit(transaction, cohorts, Optional.<DOMDataCommitErrorListener> absent());
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ * NormalizedNode implementation of {@link org.opendaylight.controller.md.sal.common.api.data.TransactionChain} which is backed
+ * by several {@link DOMStoreTransactionChain} differentiated by provided
+ * {@link org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType} type.
+ *
+ */
+public class DOMDataBrokerTransactionChainImpl extends AbstractDOMForwardedTransactionFactory<DOMStoreTransactionChain>
+ implements DOMTransactionChain, DOMDataCommitErrorListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(DOMDataBrokerTransactionChainImpl.class);
+ private final DOMDataCommitExecutor coordinator;
+ private final TransactionChainListener listener;
+ private final long chainId;
+ private final AtomicLong txNum = new AtomicLong();
+ @GuardedBy("this")
+ private boolean failed = false;
+
+ /**
+ *
+ * @param chainId
+ * ID of transaction chain
+ * @param chains
+ * Backing {@link DOMStoreTransactionChain}s.
+ * @param coordinator
+ * Commit Coordinator which should be used to coordinate commits
+ * of transaction
+ * produced by this chain.
+ * @param listener
+ * Listener, which listens on transaction chain events.
+ * @throws NullPointerException
+ * If any of arguments is null.
+ */
+ public DOMDataBrokerTransactionChainImpl(final long chainId,
+ final ImmutableMap<LogicalDatastoreType, DOMStoreTransactionChain> chains,
+ final DOMDataCommitExecutor coordinator, final TransactionChainListener listener) {
+ super(chains);
+ this.chainId = chainId;
+ this.coordinator = Preconditions.checkNotNull(coordinator);
+ this.listener = Preconditions.checkNotNull(listener);
+ }
+
+ @Override
+ protected Object newTransactionIdentifier() {
+ return "DOM-CHAIN-" + chainId + "-" + txNum.getAndIncrement();
+ }
+
+ @Override
+ public synchronized ListenableFuture<RpcResult<TransactionStatus>> commit(
+ final DOMDataWriteTransaction transaction, final Iterable<DOMStoreThreePhaseCommitCohort> cohorts) {
+ return coordinator.submit(transaction, cohorts, Optional.<DOMDataCommitErrorListener> of(this));
+ }
+
+ @Override
+ public synchronized void close() {
+ super.close();
+ for (DOMStoreTransactionChain subChain : getTxFactories().values()) {
+ subChain.close();
+ }
+
+ if (!failed) {
+ LOG.debug("Transaction chain {}Â successfully finished.", this);
+ listener.onTransactionChainSuccessful(this);
+ }
+ }
+
+ @Override
+ public synchronized void onCommitFailed(final DOMDataWriteTransaction tx, final Throwable cause) {
+ failed = true;
+ LOG.debug("Transaction chain {}Â failed.", this, cause);
+ listener.onTransactionChainFailed(this, tx, cause);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.common.util.Rpcs;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.yang.common.RpcError;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableList.Builder;
+import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+
+/**
+ *
+ * Implementation of blocking three phase commit coordinator, which which
+ * supports coordination on multiple {@link DOMStoreThreePhaseCommitCohort}.
+ *
+ * This implementation does not support cancelation of commit,
+ *
+ * In order to advance to next phase of three phase commit all subtasks of
+ * previous step must be finish.
+ *
+ * This executor does not have an upper bound on subtask timeout.
+ *
+ *
+ */
+public class DOMDataCommitCoordinatorImpl implements DOMDataCommitExecutor {
+
+ private static final Logger LOG = LoggerFactory.getLogger(DOMDataCommitCoordinatorImpl.class);
+
+ /**
+ * Runs AND binary operation between all booleans in supplied iteration of booleans.
+ *
+ * This method will stop evaluating iterables if first found is false.
+ */
+ private static final Function<Iterable<Boolean>, Boolean> AND_FUNCTION = new Function<Iterable<Boolean>, Boolean>() {
+
+ @Override
+ public Boolean apply(final Iterable<Boolean> input) {
+ for(boolean value : input) {
+ if(!value) {
+ return Boolean.FALSE;
+ }
+ }
+ return Boolean.TRUE;
+ }
+ };
+
+ private final ListeningExecutorService executor;
+
+ /**
+ *
+ * Construct DOMDataCommitCoordinator which uses supplied executor to
+ * process commit coordinations.
+ *
+ * @param executor
+ */
+ public DOMDataCommitCoordinatorImpl(final ListeningExecutorService executor) {
+ this.executor = Preconditions.checkNotNull(executor, "executor must not be null.");
+ }
+
+ @Override
+ public ListenableFuture<RpcResult<TransactionStatus>> submit(final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts, final Optional<DOMDataCommitErrorListener> listener) {
+ Preconditions.checkArgument(transaction != null, "Transaction must not be null.");
+ Preconditions.checkArgument(cohorts != null, "Cohorts must not be null.");
+ Preconditions.checkArgument(listener != null, "Listener must not be null");
+ LOG.debug("Tx: {} is submitted for execution.", transaction.getIdentifier());
+ ListenableFuture<RpcResult<TransactionStatus>> commitFuture = executor.submit(new CommitCoordinationTask(
+ transaction, cohorts, listener));
+ if (listener.isPresent()) {
+ Futures.addCallback(commitFuture, new DOMDataCommitErrorInvoker(transaction, listener.get()));
+ }
+ return commitFuture;
+ }
+
+ /**
+ *
+ * Phase of 3PC commit
+ *
+ * Represents phase of 3PC Commit
+ *
+ *
+ */
+ private static enum CommitPhase {
+ /**
+ *
+ * Commit Coordination Task is submitted for executing
+ *
+ */
+ SUBMITTED,
+ /**
+ * Commit Coordination Task is in can commit phase of 3PC
+ *
+ */
+ CAN_COMMIT,
+ /**
+ * Commit Coordination Task is in pre-commit phase of 3PC
+ *
+ */
+ PRE_COMMIT,
+ /**
+ * Commit Coordination Task is in commit phase of 3PC
+ *
+ */
+ COMMIT,
+ /**
+ * Commit Coordination Task is in abort phase of 3PC
+ *
+ */
+ ABORT
+ }
+
+ /**
+ *
+ * Implementation of blocking three-phase commit-coordination tasks without
+ * support of cancelation.
+ *
+ */
+ private static class CommitCoordinationTask implements Callable<RpcResult<TransactionStatus>> {
+
+ private final DOMDataWriteTransaction tx;
+ private final Iterable<DOMStoreThreePhaseCommitCohort> cohorts;
+
+ @GuardedBy("this")
+ private CommitPhase currentPhase;
+
+ public CommitCoordinationTask(final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts,
+ final Optional<DOMDataCommitErrorListener> listener) {
+ this.tx = Preconditions.checkNotNull(transaction, "transaction must not be null");
+ this.cohorts = Preconditions.checkNotNull(cohorts, "cohorts must not be null");
+ this.currentPhase = CommitPhase.SUBMITTED;
+ }
+
+ @Override
+ public RpcResult<TransactionStatus> call() throws TransactionCommitFailedException {
+
+ try {
+ canCommitBlocking();
+ preCommitBlocking();
+ return commitBlocking();
+ } catch (TransactionCommitFailedException e) {
+ LOG.warn("Tx: {} Error during phase {}, starting Abort", tx.getIdentifier(), currentPhase, e);
+ abortBlocking(e);
+ throw e;
+ }
+ }
+
+ /**
+ *
+ * Invokes canCommit on underlying cohorts and blocks till
+ * all results are returned.
+ *
+ * Valid state transition is from SUBMITTED to CAN_COMMIT,
+ * if currentPhase is not SUBMITTED throws IllegalStateException.
+ *
+ * @throws TransactionCommitFailedException
+ * If one of cohorts failed can Commit
+ *
+ */
+ private void canCommitBlocking() throws TransactionCommitFailedException {
+ final Boolean canCommitResult = canCommitAll().checkedGet();
+ if (!canCommitResult) {
+ throw new TransactionCommitFailedException("Can Commit failed, no detailed cause available.");
+ }
+ }
+
+ /**
+ *
+ * Invokes preCommit on underlying cohorts and blocks till
+ * all results are returned.
+ *
+ * Valid state transition is from CAN_COMMIT to PRE_COMMIT, if current
+ * state is not CAN_COMMIT
+ * throws IllegalStateException.
+ *
+ * @throws TransactionCommitFailedException
+ * If one of cohorts failed preCommit
+ *
+ */
+ private void preCommitBlocking() throws TransactionCommitFailedException {
+ preCommitAll().checkedGet();
+ }
+
+ /**
+ *
+ * Invokes commit on underlying cohorts and blocks till
+ * all results are returned.
+ *
+ * Valid state transition is from PRE_COMMIT to COMMIT, if not throws
+ * IllegalStateException.
+ *
+ * @throws TransactionCommitFailedException
+ * If one of cohorts failed preCommit
+ *
+ */
+ private RpcResult<TransactionStatus> commitBlocking() throws TransactionCommitFailedException {
+ commitAll().checkedGet();
+ return Rpcs.getRpcResult(true, TransactionStatus.COMMITED, Collections.<RpcError> emptySet());
+ }
+
+ /**
+ * Aborts transaction.
+ *
+ * Invokes {@link DOMStoreThreePhaseCommitCohort#abort()} on all
+ * cohorts, blocks
+ * for all results. If any of the abort failed throws
+ * IllegalStateException,
+ * which will contains originalCause as suppressed Exception.
+ *
+ * If aborts we're successful throws supplied exception
+ *
+ * @param originalCause
+ * Exception which should be used to fail transaction for
+ * consumers of transaction
+ * future and listeners of transaction failure.
+ * @throws TransactionCommitFailedException
+ * on invocation of this method.
+ * originalCa
+ * @throws IllegalStateException
+ * if abort failed.
+ */
+ private void abortBlocking(final TransactionCommitFailedException originalCause)
+ throws TransactionCommitFailedException {
+ LOG.warn("Tx: {} Error during phase {}, starting Abort", tx.getIdentifier(), currentPhase, originalCause);
+ Exception cause = originalCause;
+ try {
+ abortAsyncAll().get();
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.error("Tx: {} Error during Abort.", tx.getIdentifier(), e);
+ cause = new IllegalStateException("Abort failed.", e);
+ cause.addSuppressed(e);
+ }
+ Throwables.propagateIfPossible(cause, TransactionCommitFailedException.class);
+ }
+
+ /**
+ *
+ * Invokes preCommit on underlying cohorts and returns future
+ * which will complete once all preCommit on cohorts completed or
+ * failed.
+ *
+ *
+ * Valid state transition is from CAN_COMMIT to PRE_COMMIT, if current
+ * state is not CAN_COMMIT
+ * throws IllegalStateException.
+ *
+ * @return Future which will complete once all cohorts completed
+ * preCommit.
+ * Future throws TransactionCommitFailedException
+ * If any of cohorts failed preCommit
+ *
+ */
+ private CheckedFuture<Void, TransactionCommitFailedException> preCommitAll() {
+ changeStateFrom(CommitPhase.CAN_COMMIT, CommitPhase.PRE_COMMIT);
+ Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
+ for (DOMStoreThreePhaseCommitCohort cohort : cohorts) {
+ ops.add(cohort.preCommit());
+ }
+ /*
+ * We are returing all futures as list, not only succeeded ones in
+ * order to fail composite future if any of them failed.
+ * See Futures.allAsList for this description.
+ */
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ ListenableFuture<Void> compositeResult = (ListenableFuture) Futures.allAsList(ops.build());
+ return Futures.makeChecked(compositeResult, TransactionCommitFailedExceptionMapper.PRE_COMMIT_MAPPER);
+ }
+
+ /**
+ *
+ * Invokes commit on underlying cohorts and returns future which
+ * completes
+ * once all commits on cohorts are completed.
+ *
+ * Valid state transition is from PRE_COMMIT to COMMIT, if not throws
+ * IllegalStateException
+ *
+ * @return Future which will complete once all cohorts completed
+ * commit.
+ * Future throws TransactionCommitFailedException
+ * If any of cohorts failed preCommit
+ *
+ */
+ private CheckedFuture<Void, TransactionCommitFailedException> commitAll() {
+ changeStateFrom(CommitPhase.PRE_COMMIT, CommitPhase.COMMIT);
+ Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
+ for (DOMStoreThreePhaseCommitCohort cohort : cohorts) {
+ ops.add(cohort.commit());
+ }
+ /*
+ * We are returing all futures as list, not only succeeded ones in
+ * order to fail composite future if any of them failed.
+ * See Futures.allAsList for this description.
+ */
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ ListenableFuture<Void> compositeResult = (ListenableFuture) Futures.allAsList(ops.build());
+ return Futures.makeChecked(compositeResult, TransactionCommitFailedExceptionMapper.COMMIT_ERROR_MAPPER);
+ }
+
+ /**
+ *
+ * Invokes canCommit on underlying cohorts and returns composite future
+ * which will contains {@link Boolean#TRUE} only and only if
+ * all cohorts returned true.
+ *
+ * Valid state transition is from SUBMITTED to CAN_COMMIT,
+ * if currentPhase is not SUBMITTED throws IllegalStateException.
+ *
+ * @return Future which will complete once all cohorts completed
+ * preCommit.
+ * Future throws TransactionCommitFailedException
+ * If any of cohorts failed preCommit
+ *
+ */
+ private CheckedFuture<Boolean, TransactionCommitFailedException> canCommitAll() {
+ changeStateFrom(CommitPhase.SUBMITTED, CommitPhase.CAN_COMMIT);
+ Builder<ListenableFuture<Boolean>> canCommitOperations = ImmutableList.builder();
+ for (DOMStoreThreePhaseCommitCohort cohort : cohorts) {
+ canCommitOperations.add(cohort.canCommit());
+ }
+ ListenableFuture<List<Boolean>> allCanCommits = Futures.allAsList(canCommitOperations.build());
+ ListenableFuture<Boolean> allSuccessFuture = Futures.transform(allCanCommits, AND_FUNCTION);
+ return Futures
+ .makeChecked(allSuccessFuture, TransactionCommitFailedExceptionMapper.CAN_COMMIT_ERROR_MAPPER);
+
+ }
+
+ /**
+ *
+ * Invokes abort on underlying cohorts and returns future which
+ * completes
+ * once all abort on cohorts are completed.
+ *
+ * @return Future which will complete once all cohorts completed
+ * abort.
+ *
+ */
+ private ListenableFuture<Void> abortAsyncAll() {
+ changeStateFrom(currentPhase, CommitPhase.ABORT);
+ Builder<ListenableFuture<Void>> ops = ImmutableList.builder();
+ for (DOMStoreThreePhaseCommitCohort cohort : cohorts) {
+ ops.add(cohort.abort());
+ }
+ /*
+ * We are returing all futures as list, not only succeeded ones in
+ * order to fail composite future if any of them failed.
+ * See Futures.allAsList for this description.
+ */
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ ListenableFuture<Void> compositeResult = (ListenableFuture) Futures.allAsList(ops.build());
+ return compositeResult;
+ }
+
+ /**
+ * Change phase / state of transaction from expected value to new value
+ *
+ * This method checks state and updates state to new state of
+ * of this task if current state equals expected state.
+ * If expected state and current state are different raises
+ * IllegalStateException
+ * which means there is probably bug in implementation of commit
+ * coordination.
+ *
+ * If transition is successful, it logs transition on DEBUG level.
+ *
+ * @param currentExpected
+ * Required phase for change of state
+ * @param newState
+ * New Phase which will be entered by transaction.
+ * @throws IllegalStateException
+ * If currentState of task does not match expected state
+ */
+ private synchronized void changeStateFrom(final CommitPhase currentExpected, final CommitPhase newState) {
+ Preconditions.checkState(currentPhase.equals(currentExpected),
+ "Invalid state transition: Tx: %s current state: %s new state: %s", tx.getIdentifier(),
+ currentPhase, newState);
+ LOG.debug("Transaction {}: Phase {} Started ", tx.getIdentifier(), newState);
+ currentPhase = newState;
+ };
+
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.FutureCallback;
+
+/**
+ *
+ * Utility implemetation of {@link FutureCallback} which is responsible
+ * for invoking {@link DOMDataCommitErrorListener} on TransactionCommit failed.
+ *
+ * When {@link #onFailure(Throwable)} is invoked, supplied {@link DOMDataCommitErrorListener}
+ * callback is invoked with associated transaction and throwable is invoked on listener.
+ *
+ */
+class DOMDataCommitErrorInvoker implements FutureCallback<RpcResult<TransactionStatus>> {
+
+ private final DOMDataWriteTransaction tx;
+ private final DOMDataCommitErrorListener listener;
+
+
+ /**
+ *
+ * Construct new DOMDataCommitErrorInvoker.
+ *
+ * @param transaction Transaction which should be passed as argument to {@link DOMDataCommitErrorListener#onCommitFailed(DOMDataWriteTransaction, Throwable)}
+ * @param listener Listener which should be invoked on error.
+ */
+ public DOMDataCommitErrorInvoker(DOMDataWriteTransaction transaction, DOMDataCommitErrorListener listener) {
+ this.tx = Preconditions.checkNotNull(transaction, "Transaction must not be null");
+ this.listener = Preconditions.checkNotNull(listener, "Listener must not be null");
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ listener.onCommitFailed(tx, t);
+ }
+
+ @Override
+ public void onSuccess(RpcResult<TransactionStatus> result) {
+ // NOOP
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.EventListener;
+
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+
+/**
+ *
+ * Listener on transaction failure which may be passed to
+ * {@link DOMDataCommitExecutor}. This listener is notified during transaction
+ * processing, before result is delivered to other client code outside MD-SAL.
+ * This allows implementors to update their internal state before transaction
+ * failure is visible to client code.
+ *
+ * This is internal API for MD-SAL implementations, for consumer facing error
+ * listeners see {@link org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener}.
+ *
+ */
+interface DOMDataCommitErrorListener extends EventListener {
+
+ /**
+ *
+ * Callback which is invoked on transaction failure during three phase
+ * commit in {@link DOMDataCommitExecutor}.
+ *
+ *
+ * Implementation of this callback MUST NOT do any blocking calls or any
+ * calls to MD-SAL, since this callback is invoked synchronously on MD-SAL
+ * Broker coordination thread.
+ *
+ * @param tx
+ * Transaction which failed
+ * @param cause
+ * Failure reason
+ */
+ void onCommitFailed(DOMDataWriteTransaction tx, Throwable cause);
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+
+import com.google.common.base.Optional;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ * Executor of Three Phase Commit coordination for
+ * {@link DOMDataWriteTransaction} transactions.
+ *
+ * Implementations are responsible for executing implementation of three-phase
+ * commit protocol on supplied {@link DOMStoreThreePhaseCommitCohort}s.
+ *
+ *
+ */
+interface DOMDataCommitExecutor {
+
+ /**
+ * Submits supplied transaction to be executed in context of provided
+ * cohorts.
+ *
+ * Transaction is used only as a context, cohorts should be associated with
+ * this transaction.
+ *
+ * @param tx
+ * Transaction to be used as context for reporting
+ * @param cohort
+ * DOM Store cohorts representing provided transaction, its
+ * subtransactoins.
+ * @param listener
+ * Error listener which should be notified if transaction failed.
+ * @return ListenableFuture which contains RpcResult with
+ * {@link TransactionStatus#COMMITED} if commit coordination on
+ * cohorts finished successfully.
+ *
+ */
+ ListenableFuture<RpcResult<TransactionStatus>> submit(DOMDataWriteTransaction tx,
+ Iterable<DOMStoreThreePhaseCommitCohort> cohort, Optional<DOMDataCommitErrorListener> listener);
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ *
+ * Implementation prototype of commit method for
+ * {@link DOMForwardedWriteTransaction}.
+ *
+ */
+public interface DOMDataCommitImplementation {
+
+ /**
+ * User-supplied implementation of {@link DOMDataWriteTransaction#commit()}
+ * for transaction.
+ *
+ * Callback invoked when {@link DOMDataWriteTransaction#commit()} is invoked
+ * on transaction created by this factory.
+ *
+ * @param transaction
+ * Transaction on which {@link DOMDataWriteTransaction#commit()}
+ * was invoked.
+ * @param cohorts
+ * Iteration of cohorts for subtransactions associated with
+ * commited transaction.
+ *
+ */
+ ListenableFuture<RpcResult<TransactionStatus>> commit(final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts);
+}
+
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ *
+ * Read Only Transaction, which is composed of several
+ * {@link DOMStoreReadTransaction} transactions. Subtransaction is selected by
+ * {@link LogicalDatastoreType} type parameter in
+ * {@link #read(LogicalDatastoreType, InstanceIdentifier)}.
+ */
+class DOMForwardedReadOnlyTransaction extends
+ AbstractDOMForwardedCompositeTransaction<LogicalDatastoreType, DOMStoreReadTransaction> implements
+ DOMDataReadTransaction {
+
+ protected DOMForwardedReadOnlyTransaction(final Object identifier,
+ final ImmutableMap<LogicalDatastoreType, DOMStoreReadTransaction> backingTxs) {
+ super(identifier, backingTxs);
+ }
+
+ @Override
+ public ListenableFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store,
+ final InstanceIdentifier path) {
+ return getSubtransaction(store).read(path);
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ *
+ * Read-Write Transaction, which is composed of several
+ * {@link DOMStoreReadWriteTransaction} transactions. Subtransaction is selected by
+ * {@link LogicalDatastoreType} type parameter in:
+ *
+ * <ul>
+ * <li>{@link #read(LogicalDatastoreType, InstanceIdentifier)}
+ * <li>{@link #put(LogicalDatastoreType, InstanceIdentifier, NormalizedNode)}
+ * <li>{@link #delete(LogicalDatastoreType, InstanceIdentifier)}
+ * <li>{@link #merge(LogicalDatastoreType, InstanceIdentifier, NormalizedNode)}
+ * </ul>
+ * {@link #commit()} will result in invocation of
+ * {@link DOMDataCommitImplementation#commit(org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction, Iterable)}
+ * invocation with all {@link org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort} for underlying
+ * transactions.
+ *
+ */
+
+class DOMForwardedReadWriteTransaction extends DOMForwardedWriteTransaction<DOMStoreReadWriteTransaction> implements
+ DOMDataReadWriteTransaction {
+
+ protected DOMForwardedReadWriteTransaction(final Object identifier,
+ final ImmutableMap<LogicalDatastoreType, DOMStoreReadWriteTransaction> backingTxs,
+ final DOMDataCommitImplementation commitImpl) {
+ super(identifier, backingTxs, commitImpl);
+ }
+
+ @Override
+ public ListenableFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store,
+ final InstanceIdentifier path) {
+ return getSubtransaction(store).read(path);
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ *
+ *
+ * Read-Write Transaction, which is composed of several
+ * {@link DOMStoreWriteTransaction} transactions. Subtransaction is selected by
+ * {@link LogicalDatastoreType} type parameter in:
+ *
+ * <ul>
+ * <li>{@link #put(LogicalDatastoreType, InstanceIdentifier, NormalizedNode)}
+ * <li>{@link #delete(LogicalDatastoreType, InstanceIdentifier)}
+ * <li>{@link #merge(LogicalDatastoreType, InstanceIdentifier, NormalizedNode)}
+ * </ul>
+ * <p>
+ * {@link #commit()} will result in invocation of
+ * {@link DOMDataCommitImplementation#commit(org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction, Iterable)}
+ * invocation with all {@link org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort} for underlying
+ * transactions.
+ *
+ * @param <T>
+ * Subtype of {@link DOMStoreWriteTransaction} which is used as
+ * subtransaction.
+ */
+class DOMForwardedWriteTransaction<T extends DOMStoreWriteTransaction> extends
+ AbstractDOMForwardedCompositeTransaction<LogicalDatastoreType, T> implements DOMDataWriteTransaction {
+
+ @GuardedBy("this")
+ private DOMDataCommitImplementation commitImpl;
+
+ @GuardedBy("this")
+ private boolean canceled;
+ @GuardedBy("this")
+ private ListenableFuture<RpcResult<TransactionStatus>> commitFuture;
+
+ protected DOMForwardedWriteTransaction(final Object identifier,
+ final ImmutableMap<LogicalDatastoreType, T> backingTxs, final DOMDataCommitImplementation commitImpl) {
+ super(identifier, backingTxs);
+ this.commitImpl = Preconditions.checkNotNull(commitImpl, "commitImpl must not be null.");
+ }
+
+ @Override
+ public void put(final LogicalDatastoreType store, final InstanceIdentifier path, final NormalizedNode<?, ?> data) {
+ checkNotReady();
+ getSubtransaction(store).write(path, data);
+ }
+
+ @Override
+ public void delete(final LogicalDatastoreType store, final InstanceIdentifier path) {
+ checkNotReady();
+ getSubtransaction(store).delete(path);
+ }
+
+ @Override
+ public void merge(final LogicalDatastoreType store, final InstanceIdentifier path, final NormalizedNode<?, ?> data) {
+ checkNotReady();
+ getSubtransaction(store).merge(path, data);
+ }
+
+ @Override
+ public synchronized void cancel() {
+ checkState(!canceled, "Transaction was canceled.");
+ if (commitFuture != null) {
+ // FIXME: Implement cancelation of commit future
+ // when Broker impl will support cancelation.
+ throw new UnsupportedOperationException("Not implemented yet.");
+ }
+ canceled = true;
+ commitImpl = null;
+
+ }
+
+ @Override
+ public synchronized ListenableFuture<RpcResult<TransactionStatus>> commit() {
+ checkNotReady();
+
+ ImmutableList.Builder<DOMStoreThreePhaseCommitCohort> cohortsBuilder = ImmutableList.builder();
+ for (DOMStoreWriteTransaction subTx : getSubtransactions()) {
+ cohortsBuilder.add(subTx.ready());
+ }
+ ImmutableList<DOMStoreThreePhaseCommitCohort> cohorts = cohortsBuilder.build();
+ commitFuture = commitImpl.commit(this, cohorts);
+ return commitFuture;
+ }
+
+ private void checkNotReady() {
+ checkNotCanceled();
+ checkNotCommited();
+ }
+
+ private void checkNotCanceled() {
+ Preconditions.checkState(!canceled, "Transaction was canceled.");
+ }
+
+ private void checkNotCommited() {
+ checkState(commitFuture == null, "Transaction was already commited.");
+ }
+
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.concurrent.ExecutionException;
+
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
+
+import com.google.common.base.Function;
+import com.google.common.base.Preconditions;
+
+/**
+ *
+ * Utility exception mapper which translates {@link Exception}
+ * to {@link TransactionCommitFailedException}.
+ *
+ * This mapper is intended to be used with {@link com.google.common.util.concurrent.Futures#makeChecked(com.google.common.util.concurrent.ListenableFuture, Function)}
+ * <ul>
+ * <li>if exception is {@link TransactionCommitFailedException} or one of its subclasses returns original exception.
+ * <li>if exception is {@link ExecutionException} and cause is {@link TransactionCommitFailedException} return cause
+ * <li>otherwise returns {@link TransactionCommitFailedException} with original exception as a cause.
+ * </ul>
+ *
+ */
+
+final class TransactionCommitFailedExceptionMapper implements
+ Function<Exception, TransactionCommitFailedException> {
+
+ static final TransactionCommitFailedExceptionMapper PRE_COMMIT_MAPPER = create("canCommit");
+
+ static final TransactionCommitFailedExceptionMapper CAN_COMMIT_ERROR_MAPPER = create("preCommit");
+
+ static final TransactionCommitFailedExceptionMapper COMMIT_ERROR_MAPPER = create("commit");
+
+ private final String opName;
+
+ private TransactionCommitFailedExceptionMapper(final String opName) {
+ this.opName = Preconditions.checkNotNull(opName);
+ }
+
+ public static final TransactionCommitFailedExceptionMapper create(final String opName) {
+ return new TransactionCommitFailedExceptionMapper(opName);
+ }
+
+ @Override
+ public TransactionCommitFailedException apply(final Exception e) {
+ // If excetion is TransactionCommitFailedException
+ // we reuse it directly.
+ if (e instanceof TransactionCommitFailedException) {
+ return (TransactionCommitFailedException) e;
+ }
+ // If error is ExecutionException which was caused by cause of
+ // TransactionCommitFailedException
+ // we reuse original cause
+ if (e instanceof ExecutionException && e.getCause() instanceof TransactionCommitFailedException) {
+ return (TransactionCommitFailedException) e.getCause();
+ }
+ if (e instanceof InterruptedException) {
+ return new TransactionCommitFailedException(opName + " failed - DOMStore was interupted.", e);
+ }
+ // Otherwise we are using new exception, with original cause
+ return new TransactionCommitFailedException(opName + " failed", e);
+ }
+}
\ No newline at end of file
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.SnapshotBackedWriteTransaction.TransactionReadyPrototype;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.ConflictingModificationAppliedException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTree;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTreeCandidate;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTreeModification;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTreeSnapshot;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataValidationFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.data.InMemoryDataTreeFactory;
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
/**
* In-memory DOM Data Store
- *
+ *
* Implementation of {@link DOMStore} which uses {@link DataTree} and other
* classes such as {@link SnapshotBackedWriteTransaction}.
* {@link SnapshotBackedReadTransaction} and {@link ResolveDataChangeEventsTask}
* to implement {@link DOMStore} contract.
- *
+ *
*/
public class InMemoryDOMDataStore implements DOMStore, Identifiable<String>, SchemaContextListener,
TransactionReadyPrototype {
/*
* Make sure commit is not occurring right now. Listener has to be
* registered and its state capture enqueued at a consistent point.
- *
+ *
* FIXME: improve this to read-write lock, such that multiple listener
* registrations can occur simultaneously
*/
public ListenableFuture<Boolean> canCommit() {
return executor.submit(new Callable<Boolean>() {
@Override
- public Boolean call() {
+ public Boolean call() throws TransactionCommitFailedException {
try {
dataTree.validate(modification);
LOG.debug("Store Transaction: {} can be committed", transaction.getIdentifier());
return true;
- } catch (DataPreconditionFailedException e) {
+ } catch (ConflictingModificationAppliedException e) {
+ LOG.warn("Store Tx: {} Conflicting modification for {}.", transaction.getIdentifier(),
+ e.getPath());
+ throw new OptimisticLockFailedException("Optimistic lock failed.",e);
+ } catch (DataValidationFailedException e) {
LOG.warn("Store Tx: {} Data Precondition failed for {}.", transaction.getIdentifier(),
e.getPath(), e);
- return false;
+ throw new TransactionCommitFailedException("Data did not pass validation.",e);
}
}
});
* Implementation of Write transaction which is backed by
* {@link DataTreeSnapshot} and executed according to
* {@link TransactionReadyPrototype}.
- *
+ *
*/
class SnapshotBackedWriteTransaction extends AbstractDOMStoreTransaction implements DOMStoreWriteTransaction {
/**
* Creates new write-only transaction.
- *
+ *
* @param identifier
* transaction Identifier
* @param snapshot
/**
* Prototype implementation of
* {@link #ready(SnapshotBackedWriteTransaction)}
- *
+ *
* This class is intended to be implemented by Transaction factories
* responsible for allocation of {@link SnapshotBackedWriteTransaction} and
* providing underlying logic for applying implementation.
- *
+ *
*/
public static interface TransactionReadyPrototype {
/**
* Returns a commit coordinator associated with supplied transactions.
- *
+ *
* This call must not fail.
- *
+ *
* @param tx
* Transaction on which ready was invoked.
* @return DOMStoreThreePhaseCommitCohort associated with transaction
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl.tree;
+
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+
+/**
+ * Exception thrown when a proposed change fails validation before being
+ * applied into the Data Tree because the Data Tree has been modified
+ * in way that a conflicting
+ * node is present.
+ */
+public class ConflictingModificationAppliedException extends DataValidationFailedException {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ public ConflictingModificationAppliedException(final InstanceIdentifier path, final String message, final Throwable cause) {
+ super(path, message, cause);
+ }
+
+ public ConflictingModificationAppliedException(final InstanceIdentifier path, final String message) {
+ super(path, message);
+ }
+
+}
/**
* Validate whether a particular modification can be applied to the data tree.
*/
- void validate(DataTreeModification modification) throws DataPreconditionFailedException;
+ void validate(DataTreeModification modification) throws DataValidationFailedException;
/**
* Prepare a modification for commit.
* Factory interface for creating data trees.
*/
public interface DataTreeFactory {
- /**
- * Create a new data tree.
- *
- * @return A data tree instance.
- */
- DataTree create();
+ /**
+ * Create a new data tree.
+ *
+ * @return A data tree instance.
+ */
+ DataTree create();
}
* the datastore has been concurrently modified such that a conflicting
* node is present, or the modification is structurally incorrect.
*/
-public class DataPreconditionFailedException extends Exception {
+public class DataValidationFailedException extends Exception {
private static final long serialVersionUID = 1L;
private final InstanceIdentifier path;
* @param path Object path which caused this exception
* @param message Specific message describing the failure
*/
- public DataPreconditionFailedException(final InstanceIdentifier path, final String message) {
+ public DataValidationFailedException(final InstanceIdentifier path, final String message) {
this(path, message, null);
}
/**
* @param message Specific message describing the failure
* @param cause Exception which triggered this failure, may be null
*/
- public DataPreconditionFailedException(final InstanceIdentifier path, final String message, final Throwable cause) {
+ public DataValidationFailedException(final InstanceIdentifier path, final String message, final Throwable cause) {
super(message, cause);
this.path = Preconditions.checkNotNull(path);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl.tree;
+
+import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
+
+/**
+ * Exception thrown when a proposed change fails validation before being
+ * applied into the datastore because of incorrect structure of user supplied
+ * data.
+ *
+ */
+public class IncorrectDataStructureException extends DataValidationFailedException {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1L;
+
+ public IncorrectDataStructureException(final InstanceIdentifier path, final String message, final Throwable cause) {
+ super(path, message, cause);
+ }
+
+ public IncorrectDataStructureException(final InstanceIdentifier path, final String message) {
+ super(path, message);
+ }
+
+}
import java.util.List;
import java.util.Map;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier.PathArgument;
import com.google.common.base.Predicates;
/**
- * A set of utility methods for interacting with {@link TreeNode} objects.
+ * A set of utility methods for interacting with {@link org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode} objects.
*/
public final class TreeNodeUtils {
private TreeNodeUtils() {
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataValidationFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTree;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTreeCandidate;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataTreeModification;
}
@Override
- public void validate(final DataTreeModification modification) throws DataPreconditionFailedException {
+ public void validate(final DataTreeModification modification) throws DataValidationFailedException {
Preconditions.checkArgument(modification instanceof InMemoryDataTreeModification, "Invalid modification class %s", modification.getClass());
final InMemoryDataTreeModification m = (InMemoryDataTreeModification)modification;
*/
package org.opendaylight.controller.md.sal.dom.store.impl.tree.data;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataValidationFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.StoreTreeNode;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.Version;
* @param current Metadata Node to which modification should be applied
* @return true if modification is applicable
* false if modification is no applicable
- * @throws DataPreconditionFailedException
+ * @throws DataValidationFailedException
*/
- void checkApplicable(InstanceIdentifier path, NodeModification modification, Optional<TreeNode> current) throws DataPreconditionFailedException;
+ void checkApplicable(InstanceIdentifier path, NodeModification modification, Optional<TreeNode> current) throws DataValidationFailedException;
}
import java.util.Map;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataValidationFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ModificationType;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.data.DataNodeContainerModificationStrategy.ListEntryModificationStrategy;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.data.ValueNodeModificationStrategy.LeafSetEntryModificationStrategy;
@Override
protected void checkWriteApplicable(final InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ final Optional<TreeNode> current) throws DataValidationFailedException {
// FIXME: Implement proper write check for replacement of node container
// prerequisite is to have transaction chain available for clients
// otherwise this will break chained writes to same node.
@Override
protected void checkSubtreeModificationApplicable(final InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException {
- checkDataPrecondition(path, current.isPresent(), "Node was deleted by other transaction.");
+ final Optional<TreeNode> current) throws DataValidationFailedException {
+ checkConflicting(path, current.isPresent(), "Node was deleted by other transaction.");
checkChildPreconditions(path, modification, current);
}
- private void checkChildPreconditions(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ private void checkChildPreconditions(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataValidationFailedException {
final TreeNode currentMeta = current.get();
for (NodeModification childMod : modification.getChildren()) {
final PathArgument childId = childMod.getIdentifier();
@Override
protected void checkMergeApplicable(final InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ final Optional<TreeNode> current) throws DataValidationFailedException {
if(current.isPresent()) {
checkChildPreconditions(path, modification,current);
}
import java.util.List;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.ConflictingModificationAppliedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataValidationFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.IncorrectDataStructureException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ModificationType;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.data.DataNodeContainerModificationStrategy.ContainerModificationStrategy;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.data.DataNodeContainerModificationStrategy.UnkeyedListItemModificationStrategy;
return null;
}
- public static boolean checkDataPrecondition(final InstanceIdentifier path, final boolean condition, final String message) throws DataPreconditionFailedException {
+ public static boolean checkConflicting(final InstanceIdentifier path, final boolean condition, final String message) throws ConflictingModificationAppliedException {
if(!condition) {
- throw new DataPreconditionFailedException(path, message);
+ throw new ConflictingModificationAppliedException(path, message);
}
return condition;
}
}
}
- private static final void checkNotConflicting(final InstanceIdentifier path, final TreeNode original, final TreeNode current) throws DataPreconditionFailedException {
- checkDataPrecondition(path, original.getVersion().equals(current.getVersion()),
+ private static final void checkNotConflicting(final InstanceIdentifier path, final TreeNode original, final TreeNode current) throws ConflictingModificationAppliedException {
+ checkConflicting(path, original.getVersion().equals(current.getVersion()),
"Node was replaced by other transaction.");
- checkDataPrecondition(path, original.getSubtreeVersion().equals(current.getSubtreeVersion()),
+ checkConflicting(path, original.getSubtreeVersion().equals(current.getSubtreeVersion()),
"Node children was modified by other transaction");
}
}
@Override
- public final void checkApplicable(final InstanceIdentifier path,final NodeModification modification, final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ public final void checkApplicable(final InstanceIdentifier path,final NodeModification modification, final Optional<TreeNode> current) throws DataValidationFailedException {
switch (modification.getType()) {
case DELETE:
checkDeleteApplicable(modification, current);
}
- protected void checkMergeApplicable(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ protected void checkMergeApplicable(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataValidationFailedException {
Optional<TreeNode> original = modification.getOriginal();
if (original.isPresent() && current.isPresent()) {
/*
}
}
- protected void checkWriteApplicable(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataPreconditionFailedException {
+ protected void checkWriteApplicable(final InstanceIdentifier path, final NodeModification modification, final Optional<TreeNode> current) throws DataValidationFailedException {
Optional<TreeNode> original = modification.getOriginal();
if (original.isPresent() && current.isPresent()) {
checkNotConflicting(path, original.get(), current.get());
} else if(original.isPresent()) {
- throw new DataPreconditionFailedException(path,"Node was deleted by other transaction.");
+ throw new ConflictingModificationAppliedException(path,"Node was deleted by other transaction.");
}
}
protected abstract TreeNode applySubtreeChange(ModifiedNode modification,
TreeNode currentMeta, Version version);
+ /**
+ *
+ * Checks is supplied {@link NodeModification} is applicable for Subtree Modification.
+ *
+ * @param path Path to current node
+ * @param modification Node modification which should be applied.
+ * @param current Current state of data tree
+ * @throws ConflictingModificationAppliedException If subtree was changed in conflicting way
+ * @throws IncorrectDataStructureException If subtree modification is not applicable (e.g. leaf node).
+ */
protected abstract void checkSubtreeModificationApplicable(InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException;
+ final Optional<TreeNode> current) throws DataValidationFailedException;
protected abstract void verifyWrittenStructure(NormalizedNode<?, ?> writtenValue);
@Override
protected void checkSubtreeModificationApplicable(final InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException {
- throw new DataPreconditionFailedException(path, "Subtree modification is not allowed.");
+ final Optional<TreeNode> current) throws IncorrectDataStructureException {
+ throw new IncorrectDataStructureException(path, "Subtree modification is not allowed.");
}
}
}
import static com.google.common.base.Preconditions.checkArgument;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.DataPreconditionFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.IncorrectDataStructureException;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNode;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.TreeNodeFactory;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.spi.Version;
@Override
protected void checkSubtreeModificationApplicable(final InstanceIdentifier path, final NodeModification modification,
- final Optional<TreeNode> current) throws DataPreconditionFailedException {
- throw new DataPreconditionFailedException(path, "Subtree modification is not allowed.");
+ final Optional<TreeNode> current) throws IncorrectDataStructureException {
+ throw new IncorrectDataStructureException(path, "Subtree modification is not allowed.");
}
public static class LeafSetEntryModificationStrategy extends ValueNodeModificationStrategy<LeafListSchemaNode> {
/**
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
+ *
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
private AtomicLong nextTransaction = new AtomicLong();
private final AtomicLong createdTransactionsCount = new AtomicLong();
-
+
public DataBrokerImpl() {
setDataReadRouter(new DataReaderRouter());
setExecutor(MoreExecutors.sameThreadExecutor());
}
-
+
public AtomicLong getCreatedTransactionsCount() {
return createdTransactionsCount;
}
-
+
@Override
public DataTransactionImpl beginTransaction() {
String transactionId = "DOM-" + nextTransaction.getAndIncrement();
@Override
public void close() throws Exception {
-
+
}
}
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-public class DataTransactionImpl extends AbstractDataTransaction<InstanceIdentifier, CompositeNode>
+public class DataTransactionImpl extends AbstractDataTransaction<InstanceIdentifier, CompositeNode>
implements DataModificationTransaction {
private final ListenerRegistry<DataTransactionListener> listeners = new ListenerRegistry<DataTransactionListener>();
-
-
-
+
+
+
public DataTransactionImpl(Object identifier,DataBrokerImpl dataBroker) {
super(identifier,dataBroker);
}
if (CONTEXT_REFERENCE.equals(extension.getNodeType())) {
return Optional.fromNullable(extension.getQName());
}
- ;
}
return Optional.absent();
}
*/
package org.opendaylight.controller.sal.dom.broker.osgi;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.opendaylight.controller.sal.core.api.BrokerService;
import org.opendaylight.yangtools.concepts.Registration;
import org.osgi.framework.ServiceReference;
-import static com.google.common.base.Preconditions.*;
public abstract class AbstractBrokerServiceProxy<T extends BrokerService> implements AutoCloseable, BrokerService {
private T delegate;
private final ServiceReference<T> reference;
- public AbstractBrokerServiceProxy(ServiceReference<T> ref, T delegate) {
+ public AbstractBrokerServiceProxy(final ServiceReference<T> ref, final T delegate) {
this.delegate = checkNotNull(delegate, "Delegate should not be null.");
this.reference = checkNotNull(ref, "Reference should not be null.");
}
return reference;
}
- private Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
+ private final Set<Registration<?>> registrations = Collections.synchronizedSet(new HashSet<Registration<?>>());
- protected <R extends Registration<?>> R addRegistration(R registration) {
+ protected <R extends Registration<?>> R addRegistration(final R registration) {
if (registration != null) {
registrations.add(registration);
}
package org.opendaylight.controller.sal.dom.broker.osgi;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker;
import org.opendaylight.controller.md.sal.dom.api.DOMDataChangeListener;
import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction;
import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMTransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.osgi.framework.ServiceReference;
return getDelegate().registerDataChangeListener(store, path, listener, triggeringScope);
}
+ @Override
+ public DOMTransactionChain createTransactionChain(final TransactionChainListener listener) {
+ return getDelegate().createTransactionChain(listener);
+ }
+
}
public DataModificationTransaction beginTransaction() {
return getDelegate().beginTransaction();
}
-
-
+
+
}
public void removeValidator(DataStoreIdentifier store, DataValidator validator) {
getDelegate().removeValidator(store, validator);
}
-
+
@Override
public ListenerRegistration<RegistrationListener<DataCommitHandlerRegistration<InstanceIdentifier, CompositeNode>>> registerCommitHandlerListener(
RegistrationListener<DataCommitHandlerRegistration<InstanceIdentifier, CompositeNode>> commitHandlerListener) {
import java.util.Set;
import org.opendaylight.controller.md.sal.common.api.routing.RouteChangeListener;
-import org.opendaylight.controller.sal.core.api.*;
+import org.opendaylight.controller.sal.core.api.Broker;
+import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
+import org.opendaylight.controller.sal.core.api.RpcImplementation;
+import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
+import org.opendaylight.controller.sal.core.api.RpcRegistrationListener;
+import org.opendaylight.controller.sal.core.api.RpcRoutingContext;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
-public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry>
- implements RpcProvisionRegistry {
+public class RpcProvisionRegistryProxy extends AbstractBrokerServiceProxy<RpcProvisionRegistry> implements RpcProvisionRegistry {
- public RpcProvisionRegistryProxy(ServiceReference<RpcProvisionRegistry> ref, RpcProvisionRegistry delegate) {
+ public RpcProvisionRegistryProxy(final ServiceReference<RpcProvisionRegistry> ref, final RpcProvisionRegistry delegate) {
super(ref, delegate);
}
@Override
- public Broker.RpcRegistration addRpcImplementation(QName rpcType, RpcImplementation implementation) throws IllegalArgumentException {
+ public Broker.RpcRegistration addRpcImplementation(final QName rpcType, final RpcImplementation implementation) throws IllegalArgumentException {
return getDelegate().addRpcImplementation(rpcType, implementation);
}
@Override
- public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(RpcRegistrationListener listener) {
+ public ListenerRegistration<RpcRegistrationListener> addRpcRegistrationListener(final RpcRegistrationListener listener) {
return getDelegate().addRpcRegistrationListener(listener);
}
@Override
- public Broker.RoutedRpcRegistration addRoutedRpcImplementation(QName rpcType, RpcImplementation implementation) {
+ public Broker.RoutedRpcRegistration addRoutedRpcImplementation(final QName rpcType, final RpcImplementation implementation) {
return getDelegate().addRoutedRpcImplementation(rpcType, implementation);
}
@Override
- public void setRoutedRpcDefaultDelegate(RoutedRpcDefaultImplementation defaultImplementation) {
+ public void setRoutedRpcDefaultDelegate(final RoutedRpcDefaultImplementation defaultImplementation) {
getDelegate().setRoutedRpcDefaultDelegate(defaultImplementation);
}
@Override
- public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(L listener) {
+ public <L extends RouteChangeListener<RpcRoutingContext, InstanceIdentifier>> ListenerRegistration<L> registerRouteChangeListener(final L listener) {
return getDelegate().registerRouteChangeListener(listener);
}
}
@Override
- public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(QName rpc, CompositeNode input) {
+ public ListenableFuture<RpcResult<CompositeNode>> invokeRpc(final QName rpc, final CompositeNode input) {
return getDelegate().invokeRpc(rpc, input);
}
}
/**
* Registers a notification listener for supplied notification type.
- *
+ *
* @param notification
* @param listener
*/
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.SimpleNode;
-import static com.google.common.base.Preconditions.*;
-
-public class YangDataUtils {
+public final class YangDataUtils {
- public YangDataUtils() {
- // TODO Auto-generated constructor stub
+ private YangDataUtils() {
+ throw new UnsupportedOperationException("Utility class");
}
-
-
- public static Map<Map<QName,Object>,CompositeNode> toIndexMap(List<CompositeNode> nodes,List<QName> keys) {
+ public static Map<Map<QName,Object>,CompositeNode> toIndexMap(final List<CompositeNode> nodes,final List<QName> keys) {
ConcurrentHashMap<Map<QName,Object>,CompositeNode> ret = new ConcurrentHashMap<>();
for(CompositeNode node : nodes) {
Map<QName, Object> key = getKeyMap(node,keys);
return ret;
}
-
-
- public static Map<QName,Object> getKeyMap(CompositeNode node, List<QName> keys) {
+ public static Map<QName,Object> getKeyMap(final CompositeNode node, final List<QName> keys) {
Map<QName,Object> map = new HashMap<>();
for(QName key : keys) {
SimpleNode<?> keyNode = node.getFirstSimpleByName(QName.create(node.getNodeType(), key.getLocalName()));
checkArgument(keyNode != null,"Node must contains all keys.");
Object value = keyNode.getValue();
map.put(key, value);
-
+
}
return map;
}
*/
package org.opendaylight.controller.sal.dom.broker.util;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
import java.util.Iterator;
import java.util.List;
import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
import org.opendaylight.yangtools.yang.model.api.UsesNode;
-import static com.google.common.base.Preconditions.*;
-
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
-public class YangSchemaUtils {
+public final class YangSchemaUtils {
private static final Function<PathArgument, QName> QNAME_FROM_PATH_ARGUMENT = new Function<PathArgument, QName>(){
-
+
@Override
- public QName apply(PathArgument input) {
+ public QName apply(final PathArgument input) {
if(input == null) {
return null;
}
}
};
- private YangSchemaUtils() {
+ private YangSchemaUtils() {
throw new UnsupportedOperationException("Utility class.");
}
-
-
- public static DataSchemaNode getSchemaNode(SchemaContext schema,InstanceIdentifier path) {
+
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final InstanceIdentifier path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
return getSchemaNode(schema, FluentIterable.from(path.getPath()).transform(QNAME_FROM_PATH_ARGUMENT));
}
-
- public static DataSchemaNode getSchemaNode(SchemaContext schema,Iterable<QName> path) {
+
+ public static DataSchemaNode getSchemaNode(final SchemaContext schema,final Iterable<QName> path) {
checkArgument(schema != null,"YANG Schema must not be null.");
checkArgument(path != null,"Path must not be null.");
if(!path.iterator().hasNext()){
return toRootDataNode(schema);
}
-
+
QName firstNode = path.iterator().next();
DataNodeContainer previous = schema.findModuleByNamespaceAndRevision(firstNode.getNamespace(),
firstNode.getRevision());
Iterator<QName> iterator = path.iterator();
-
+
while (iterator.hasNext()) {
checkArgument(previous!= null, "Supplied path does not resolve into valid schema node.");
QName arg = iterator.next();
return (DataSchemaNode) previous;
}
- private static DataSchemaNode searchInChoices(DataNodeContainer node, QName arg) {
+ private static DataSchemaNode searchInChoices(final DataNodeContainer node, final QName arg) {
Set<DataSchemaNode> children = node.getChildNodes();
for (DataSchemaNode child : children) {
if (child instanceof ChoiceNode) {
return null;
}
- private static DataSchemaNode searchInCases(ChoiceNode choiceNode, QName arg) {
+ private static DataSchemaNode searchInCases(final ChoiceNode choiceNode, final QName arg) {
Set<ChoiceCaseNode> cases = choiceNode.getCases();
for (ChoiceCaseNode caseNode : cases) {
DataSchemaNode node = caseNode.getDataChildByName(arg);
return null;
}
- private static ContainerSchemaNode toRootDataNode(SchemaContext schema) {
+ private static ContainerSchemaNode toRootDataNode(final SchemaContext schema) {
return new NetconfDataRootNode(schema);
}
private static final class NetconfDataRootNode implements ContainerSchemaNode {
-
- public NetconfDataRootNode(SchemaContext schema) {
+
+ public NetconfDataRootNode(final SchemaContext schema) {
// TODO Auto-generated constructor stub
}
-
+
@Override
public Set<TypeDefinition<?>> getTypeDefinitions() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public Set<DataSchemaNode> getChildNodes() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public Set<GroupingDefinition> getGroupings() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
- public DataSchemaNode getDataChildByName(QName name) {
+ public DataSchemaNode getDataChildByName(final QName name) {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
- public DataSchemaNode getDataChildByName(String name) {
+ public DataSchemaNode getDataChildByName(final String name) {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public Set<UsesNode> getUses() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public Set<AugmentationSchema> getAvailableAugmentations() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public boolean isAugmenting() {
// TODO Auto-generated method stub
return false;
}
-
+
@Override
public boolean isAddedByUses() {
// TODO Auto-generated method stub
return false;
}
-
+
@Override
public boolean isConfiguration() {
// TODO Auto-generated method stub
return false;
}
-
+
@Override
public ConstraintDefinition getConstraints() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public QName getQName() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public SchemaPath getPath() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public String getDescription() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public String getReference() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public Status getStatus() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
// TODO Auto-generated method stub
return null;
}
-
+
@Override
public boolean isPresenceContainer() {
// TODO Auto-generated method stub
return false;
}
-
+
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import org.opendaylight.controller.md.sal.common.api.data.AsyncTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChain;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
+
+import com.google.common.util.concurrent.SettableFuture;
+
+/**
+ * Simple implementation of {@link TransactionChainListener} for testing.
+ *
+ * This transaction chain listener does not contain any logic, only update
+ * futures ({@link #getFailFuture()} and {@link #getSuccessFuture()} when
+ * transaction chain event is retrieved.
+ *
+ */
+class BlockingTransactionChainListener implements TransactionChainListener {
+
+ private final SettableFuture<Throwable> failFuture = SettableFuture.create();
+ private final SettableFuture<Void> successFuture = SettableFuture.create();
+
+ @Override
+ public void onTransactionChainFailed(final TransactionChain<?, ?> chain, final AsyncTransaction<?, ?> transaction,
+ final Throwable cause) {
+ failFuture.set(cause);
+ }
+
+ @Override
+ public void onTransactionChainSuccessful(final TransactionChain<?, ?> chain) {
+ successFuture.set(null);
+ }
+
+ public SettableFuture<Throwable> getFailFuture() {
+ return failFuture;
+ }
+
+ public SettableFuture<Void> getSuccessFuture() {
+ return successFuture;
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION;
+import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.OPERATIONAL;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
+import org.opendaylight.controller.md.sal.dom.api.DOMTransactionChain;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
+import org.opendaylight.controller.md.sal.dom.store.impl.TestModel;
+import org.opendaylight.controller.sal.core.spi.data.DOMStore;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+
+public class DOMTransactionChainTest {
+
+ private SchemaContext schemaContext;
+ private DOMDataBrokerImpl domBroker;
+
+ @Before
+ public void setupStore() {
+ InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor());
+ InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor());
+ schemaContext = TestModel.createTestContext();
+
+ operStore.onGlobalContextUpdated(schemaContext);
+ configStore.onGlobalContextUpdated(schemaContext);
+
+ ImmutableMap<LogicalDatastoreType, DOMStore> stores = ImmutableMap.<LogicalDatastoreType, DOMStore> builder() //
+ .put(CONFIGURATION, configStore) //
+ .put(OPERATIONAL, operStore) //
+ .build();
+
+ ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
+ domBroker = new DOMDataBrokerImpl(stores, executor);
+ }
+
+ @Test
+ public void testTransactionChainNoConflict() throws InterruptedException, ExecutionException, TimeoutException {
+ BlockingTransactionChainListener listener = new BlockingTransactionChainListener();
+ DOMTransactionChain txChain = domBroker.createTransactionChain(listener);
+ assertNotNull(txChain);
+
+ /**
+ * We alocate new read-write transaction and write /test
+ *
+ *
+ */
+ DOMDataReadWriteTransaction firstTx = allocateAndWrite(txChain);
+
+ /**
+ * First transaction is marked as ready, we are able to allocate chained
+ * transactions
+ */
+ ListenableFuture<RpcResult<TransactionStatus>> firstWriteTxFuture = firstTx.commit();
+
+ /**
+ * We alocate chained transaction - read transaction.
+ */
+ DOMDataReadTransaction secondReadTx = txChain.newReadOnlyTransaction();
+
+ /**
+ *
+ * We test if we are able to read data from tx, read should not fail
+ * since we are using chained transaction.
+ *
+ *
+ */
+ assertTestContainerExists(secondReadTx);
+
+ /**
+ *
+ * We alocate next transaction, which is still based on first one, but
+ * is read-write.
+ *
+ */
+ DOMDataReadWriteTransaction thirdDeleteTx = allocateAndDelete(txChain);
+
+ /**
+ * third transaction is sealed.
+ */
+ ListenableFuture<RpcResult<TransactionStatus>> thirdDeleteTxFuture = thirdDeleteTx.commit();
+
+ /**
+ * We commit first transaction
+ *
+ */
+ assertCommitSuccessful(firstWriteTxFuture);
+
+ // Alocates store transaction
+ DOMDataReadTransaction storeReadTx = domBroker.newReadOnlyTransaction();
+ /**
+ * We verify transaction is commited to store, container should exists
+ * in datastore.
+ */
+ assertTestContainerExists(storeReadTx);
+ /**
+ * We commit third transaction
+ *
+ */
+ assertCommitSuccessful(thirdDeleteTxFuture);
+
+ /**
+ * We close transaction chain.
+ */
+ txChain.close();
+
+ listener.getSuccessFuture().get(1000, TimeUnit.MILLISECONDS);
+ }
+
+ @Test
+ public void testTransactionChainNotSealed() throws InterruptedException, ExecutionException, TimeoutException {
+ BlockingTransactionChainListener listener = new BlockingTransactionChainListener();
+ DOMTransactionChain txChain = domBroker.createTransactionChain(listener);
+ assertNotNull(txChain);
+
+ /**
+ * We alocate new read-write transaction and write /test
+ *
+ *
+ */
+ allocateAndWrite(txChain);
+
+ /**
+ * We alocate chained transaction - read transaction, note first one is
+ * still not commited to datastore, so this allocation should fail with
+ * IllegalStateException.
+ */
+ try {
+ txChain.newReadOnlyTransaction();
+ fail("Allocation of secondReadTx should fail with IllegalStateException");
+ } catch (Exception e) {
+ assertTrue(e instanceof IllegalStateException);
+ }
+ }
+
+ private static DOMDataReadWriteTransaction allocateAndDelete(final DOMTransactionChain txChain)
+ throws InterruptedException, ExecutionException {
+ DOMDataReadWriteTransaction tx = txChain.newReadWriteTransaction();
+
+ /**
+ * We test existence of /test in third transaction container should
+ * still be visible from first one (which is still uncommmited).
+ *
+ */
+ assertTestContainerExists(tx);
+
+ /**
+ * We delete node in third transaction
+ */
+ tx.delete(LogicalDatastoreType.OPERATIONAL, TestModel.TEST_PATH);
+ return tx;
+ }
+
+ private static DOMDataReadWriteTransaction allocateAndWrite(final DOMTransactionChain txChain)
+ throws InterruptedException, ExecutionException {
+ DOMDataReadWriteTransaction tx = txChain.newReadWriteTransaction();
+ assertTestContainerWrite(tx);
+ return tx;
+ }
+
+ private static void assertCommitSuccessful(final ListenableFuture<RpcResult<TransactionStatus>> future)
+ throws InterruptedException, ExecutionException {
+ RpcResult<TransactionStatus> rpcResult = future.get();
+ assertTrue(rpcResult.isSuccessful());
+ assertEquals(TransactionStatus.COMMITED, rpcResult.getResult());
+ }
+
+ private static void assertTestContainerExists(final DOMDataReadTransaction readTx) throws InterruptedException,
+ ExecutionException {
+ ListenableFuture<Optional<NormalizedNode<?, ?>>> readFuture = readTx.read(OPERATIONAL, TestModel.TEST_PATH);
+ Optional<NormalizedNode<?, ?>> readedData = readFuture.get();
+ assertTrue(readedData.isPresent());
+ }
+
+ private static void assertTestContainerWrite(final DOMDataReadWriteTransaction tx) throws InterruptedException,
+ ExecutionException {
+ tx.put(OPERATIONAL, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+ assertTestContainerExists(tx);
+ }
+}
DOMStoreReadWriteTransaction writeTx = domStore.newReadWriteTransaction();
assertNotNull(writeTx);
/**
- *
+ *
* Writes /test in writeTx
- *
+ *
*/
writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
/**
- *
+ *
* Reads /test from writeTx Read should return container.
- *
+ *
*/
ListenableFuture<Optional<NormalizedNode<?, ?>>> writeTxContainer = writeTx.read(TestModel.TEST_PATH);
assertTrue(writeTxContainer.get().isPresent());
/**
- *
+ *
* Reads /test from readTx Read should return Absent.
- *
+ *
*/
ListenableFuture<Optional<NormalizedNode<?, ?>>> readTxContainer = readTx.read(TestModel.TEST_PATH);
assertFalse(readTxContainer.get().isPresent());
DOMStoreReadWriteTransaction writeTx = domStore.newReadWriteTransaction();
assertNotNull(writeTx);
/**
- *
+ *
* Writes /test in writeTx
- *
+ *
*/
writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
/**
- *
+ *
* Reads /test from writeTx Read should return container.
- *
+ *
*/
ListenableFuture<Optional<NormalizedNode<?, ?>>> writeTxContainer = writeTx.read(TestModel.TEST_PATH);
assertTrue(writeTxContainer.get().isPresent());
/**
* We alocate new read-write transaction and write /test
- *
- *
+ *
+ *
*/
DOMStoreReadWriteTransaction firstTx = txChain.newReadWriteTransaction();
assertTestContainerWrite(firstTx);
DOMStoreReadTransaction secondReadTx = txChain.newReadOnlyTransaction();
/**
- *
+ *
* We test if we are able to read data from tx, read should not fail
* since we are using chained transaction.
- *
- *
+ *
+ *
*/
assertTestContainerExists(secondReadTx);
/**
- *
+ *
* We alocate next transaction, which is still based on first one, but
* is read-write.
- *
+ *
*/
DOMStoreReadWriteTransaction thirdDeleteTx = txChain.newReadWriteTransaction();
/**
* We test existence of /test in third transaction container should
* still be visible from first one (which is still uncommmited).
- *
- *
+ *
+ *
*/
assertTestContainerExists(thirdDeleteTx);
/**
* We commit first transaction
- *
+ *
*/
assertThreePhaseCommit(firstWriteTxCohort);
assertTestContainerExists(storeReadTx);
/**
* We commit third transaction
- *
+ *
*/
assertThreePhaseCommit(thirdDeleteTxCohort);
}
private static Optional<NormalizedNode<?, ?>> assertTestContainerWrite(final DOMStoreReadWriteTransaction writeTx)
throws InterruptedException, ExecutionException {
/**
- *
+ *
* Writes /test in writeTx
- *
+ *
*/
writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
}
/**
- *
+ *
* Reads /test from readTx Read should return container.
- *
+ *
*/
private static Optional<NormalizedNode<?, ?>> assertTestContainerExists(DOMStoreReadTransaction readTx)
throws InterruptedException, ExecutionException {
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.TransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* Read and write access to stored data is provided only via transactions
* created using {@link #newReadOnlyTransaction()},
* {@link #newWriteOnlyTransaction()} and {@link #newReadWriteTransaction()}, or
- * by creating {@link TransactionChain}.
+ * by creating {@link org.opendaylight.controller.md.sal.common.api.data.TransactionChain}.
*
*/
public interface DOMStore extends DOMStoreTransactionFactory {
/**
- * Registers {@link DataChangeListener} for Data Change callbacks which will
+ * Registers {@link org.opendaylight.controller.md.sal.common.api.data.DataChangeListener} for Data Change callbacks which will
* be triggered on the change of provided subpath. What constitutes a change
* depends on the @scope parameter.
*
*/
package org.opendaylight.controller.sal.core.spi.data;
-import java.util.concurrent.Future;
-
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* read
* @return Listenable Future which contains read result
* <ul>
- * <li>If data at supplied path exists the {@link Future#get()}
+ * <li>If data at supplied path exists the {@link java.util.concurrent.Future#get()}
* returns Optional object containing data
* <li>If data at supplied path does not exists the
- * {@link Future#get()} returns {@link Optional#absent()}.
+ * {@link java.util.concurrent.Future#get()} returns {@link Optional#absent()}.
* </ul>
*/
ListenableFuture<Optional<NormalizedNode<?,?>>> read(InstanceIdentifier path);
*/
package org.opendaylight.controller.sal.core.spi.data;
-import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.yangtools.concepts.Path;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
* specified path.
*
* If you need add or merge of current object with specified use
- * {@link #merge(LogicalDatastoreType, Path, Object)}
+ * {@link #merge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.concepts.Path, Object)}
*
*
* @param path
*/
package org.opendaylight.controller.sal.connect.api;
-import org.opendaylight.controller.sal.connect.netconf.sal.NetconfDeviceRpc;
import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider;
import org.opendaylight.controller.netconf.client.NetconfClientDispatcher;
import org.opendaylight.controller.netconf.client.NetconfClientSession;
import org.opendaylight.controller.netconf.client.NetconfClientSessionListener;
-import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration;
import org.opendaylight.controller.netconf.client.conf.NetconfReconnectingClientConfiguration;
import org.opendaylight.controller.netconf.util.xml.XmlElement;
import org.opendaylight.controller.netconf.util.xml.XmlNetconfConstants;
import com.google.common.util.concurrent.Futures;
import java.util.Collections;
import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
import javax.annotation.Nullable;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import java.util.List;
import java.util.Set;
-import javax.annotation.concurrent.ThreadSafe;
-
import org.opendaylight.controller.sal.connect.api.SchemaContextProviderFactory;
import org.opendaylight.controller.sal.connect.util.RemoteDeviceId;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
*/
package org.opendaylight.controller.sal.connect.netconf.schema.mapping;
+import com.google.common.base.Optional;
import java.util.Collections;
import java.util.List;
import java.util.Set;
-
import javax.activation.UnsupportedDataTypeException;
-
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.sal.common.util.Rpcs;
import org.opendaylight.controller.sal.connect.api.MessageTransformer;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.impl.CompositeNodeTOImpl;
import org.opendaylight.yangtools.yang.data.impl.ImmutableCompositeNode;
+import org.opendaylight.yangtools.yang.data.impl.codec.xml.XmlCodecProvider;
import org.opendaylight.yangtools.yang.data.impl.codec.xml.XmlDocumentUtils;
import org.opendaylight.yangtools.yang.data.impl.util.CompositeNodeBuilder;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.NotificationDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import com.google.common.base.Optional;
-
public class NetconfMessageTransformer implements MessageTransformer<NetconfMessage> {
public static final String MESSAGE_ID_PREFIX = "m";
NetconfMessageTransformUtil.NETCONF_RPC_QNAME, NetconfMessageTransformUtil.flattenInput(node));
final Document w3cPayload;
try {
- w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, XmlDocumentUtils.defaultValueCodecProvider());
+ final XmlCodecProvider codecProvider = XmlDocumentUtils.defaultValueCodecProvider();
+ if(schemaContext.isPresent()) {
+ if (NetconfMessageTransformUtil.isDataEditOperation(rpc)) {
+ final DataNodeContainer schemaForEdit = NetconfMessageTransformUtil.createSchemaForEdit(schemaContext.get());
+ w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, schemaForEdit, codecProvider);
+ } else {
+ w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, schemaContext.get(), codecProvider);
+ }
+ } else {
+ w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, codecProvider);
+ }
} catch (final UnsupportedDataTypeException e) {
throw new IllegalArgumentException("Unable to create message", e);
}
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-
import java.util.Map;
+
import javax.annotation.Nullable;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.yangtools.yang.data.impl.CompositeNodeTOImpl;
import org.opendaylight.yangtools.yang.data.impl.ImmutableCompositeNode;
import org.opendaylight.yangtools.yang.data.impl.SimpleNodeTOImpl;
-import org.opendaylight.yangtools.yang.data.impl.codec.xml.XmlDocumentUtils;
import org.opendaylight.yangtools.yang.data.impl.util.CompositeNodeBuilder;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
public class NetconfMessageTransformUtil {
}
public static boolean isDataRetrievalOperation(final QName rpc) {
- return NETCONF_URI == rpc.getNamespace()
+ return NETCONF_URI.equals(rpc.getNamespace())
&& (rpc.getLocalName().equals(NETCONF_GET_CONFIG_QNAME.getLocalName()) || rpc.getLocalName().equals(
NETCONF_GET_QNAME.getLocalName()));
}
+ public static boolean isDataEditOperation(final QName rpc) {
+ return NETCONF_URI.equals(rpc.getNamespace())
+ && rpc.getLocalName().equals(NETCONF_EDIT_CONFIG_QNAME.getLocalName());
+ }
+
+ /**
+ * Creates artificial schema node for edit-config rpc. This artificial schema looks like:
+ * <pre>
+ * {@code
+ * rpc
+ * edit-config
+ * config
+ * // All schema nodes from remote schema
+ * config
+ * edit-config
+ * rpc
+ * }
+ * </pre>
+ *
+ * This makes the translation of rpc edit-config request(especially the config node)
+ * to xml use schema which is crucial for some types of nodes e.g. identity-ref.
+ */
+ public static DataNodeContainer createSchemaForEdit(final SchemaContext schemaContext) {
+ final QName config = QName.create(NETCONF_EDIT_CONFIG_QNAME, "config");
+ final QName editConfig = QName.create(NETCONF_EDIT_CONFIG_QNAME, "edit-config");
+ final NodeContainerProxy configProxy = new NodeContainerProxy(config, schemaContext.getChildNodes());
+ final NodeContainerProxy editConfigProxy = new NodeContainerProxy(editConfig, Sets.<DataSchemaNode>newHashSet(configProxy));
+ return new NodeContainerProxy(NETCONF_RPC_QNAME, Sets.<DataSchemaNode>newHashSet(editConfigProxy));
+ }
+
public static CompositeNodeTOImpl wrap(final QName name, final Node<?> node) {
if (node != null) {
return new CompositeNodeTOImpl(name, null, Collections.<Node<?>> singletonList(node));
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.connect.netconf.util;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.model.api.AugmentationSchema;
+import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.GroupingDefinition;
+import org.opendaylight.yangtools.yang.model.api.SchemaPath;
+import org.opendaylight.yangtools.yang.model.api.Status;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.UsesNode;
+
+class NodeContainerProxy implements ContainerSchemaNode {
+
+ private final Map<QName, DataSchemaNode> childNodes;
+ private final QName qName;
+
+ public NodeContainerProxy(final QName qName, final Map<QName, DataSchemaNode> childNodes) {
+ this.childNodes = Preconditions.checkNotNull(childNodes, "childNodes");
+ this.qName = Preconditions.checkNotNull(qName, "qName");
+ }
+
+ public NodeContainerProxy(final QName qName, final Set<DataSchemaNode> childNodes) {
+ this(qName, asMap(childNodes));
+ }
+
+ private static Map<QName, DataSchemaNode> asMap(final Set<DataSchemaNode> childNodes) {
+ final Map<QName, DataSchemaNode> mapped = Maps.newHashMap();
+ for (final DataSchemaNode childNode : childNodes) {
+ mapped.put(childNode.getQName(), childNode);
+ }
+ return mapped;
+ }
+
+ @Override
+ public Set<TypeDefinition<?>> getTypeDefinitions() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public Set<DataSchemaNode> getChildNodes() {
+ return Sets.newHashSet(childNodes.values());
+ }
+
+ @Override
+ public Set<GroupingDefinition> getGroupings() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public DataSchemaNode getDataChildByName(final QName qName) {
+ return childNodes.get(qName);
+ }
+
+ @Override
+ public DataSchemaNode getDataChildByName(final String s) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Set<UsesNode> getUses() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public boolean isPresenceContainer() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Set<AugmentationSchema> getAvailableAugmentations() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean isAugmenting() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean isAddedByUses() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean isConfiguration() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public ConstraintDefinition getConstraints() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public QName getQName() {
+ return qName;
+ }
+
+ @Override
+ public SchemaPath getPath() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public String getDescription() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public String getReference() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Status getStatus() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<UnknownSchemaNode> getUnknownSchemaNodes() {
+ return Collections.emptyList();
+ }
+}
*/
package org.opendaylight.controller.config.yang.md.sal.remote.rpc;
-import org.opendaylight.controller.sal.connector.remoterpc.*;
+import org.opendaylight.controller.sal.connector.remoterpc.ClientImpl;
+import org.opendaylight.controller.sal.connector.remoterpc.RemoteRpcProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.RoutingTableProvider;
+import org.opendaylight.controller.sal.connector.remoterpc.ServerImpl;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry;
import org.osgi.framework.BundleContext;
*
*/
public final class ZeroMQServerModule
- extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
+extends org.opendaylight.controller.config.yang.md.sal.remote.rpc.AbstractZeroMQServerModule {
- private static final Integer ZEROMQ_ROUTER_PORT = 5554;
- private BundleContext bundleContext;
+ private static final Integer ZEROMQ_ROUTER_PORT = 5554;
+ private BundleContext bundleContext;
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
- super(identifier, dependencyResolver);
- }
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ super(identifier, dependencyResolver);
+ }
- public ZeroMQServerModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- ZeroMQServerModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public ZeroMQServerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final ZeroMQServerModule oldModule, final java.lang.AutoCloseable oldInstance) {
- super(identifier, dependencyResolver, oldModule, oldInstance);
- }
+ super(identifier, dependencyResolver, oldModule, oldInstance);
+ }
- @Override
- protected void customValidation() {
- // Add custom validation for module attributes here.
- }
+ @Override
+ protected void customValidation() {
+ // Add custom validation for module attributes here.
+ }
- @Override
- public java.lang.AutoCloseable createInstance() {
+ @Override
+ public java.lang.AutoCloseable createInstance() {
- Broker broker = getDomBrokerDependency();
+ Broker broker = getDomBrokerDependency();
- final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
+ final int port = getPort() != null ? getPort() : ZEROMQ_ROUTER_PORT;
- ServerImpl serverImpl = new ServerImpl(port);
+ ServerImpl serverImpl = new ServerImpl(port);
- ClientImpl clientImpl = new ClientImpl();
+ ClientImpl clientImpl = new ClientImpl();
- RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
+ RoutingTableProvider provider = new RoutingTableProvider(bundleContext);//,serverImpl);
- RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
- facade.setRoutingTableProvider(provider);
- facade.setContext(bundleContext);
- facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
+ RemoteRpcProvider facade = new RemoteRpcProvider(serverImpl, clientImpl);
+ facade.setRoutingTableProvider(provider);
+ facade.setContext(bundleContext);
+ facade.setRpcProvisionRegistry((RpcProvisionRegistry) broker);
- broker.registerProvider(facade, bundleContext);
- return facade;
- }
+ broker.registerProvider(facade, bundleContext);
+ return facade;
+ }
- public void setBundleContext(BundleContext bundleContext) {
- this.bundleContext = bundleContext;
- }
+ public void setBundleContext(final BundleContext bundleContext) {
+ this.bundleContext = bundleContext;
+ }
}
module.setBundleContext(bundleContext);
return module;
}
-
+
@Override
public Module createModule(String instanceName, DependencyResolver dependencyResolver,
DynamicMBeanWithInstance old, BundleContext bundleContext) throws Exception {
import org.opendaylight.controller.sal.connector.remoterpc.dto.Message;
import org.opendaylight.controller.sal.connector.remoterpc.dto.RouteIdentifierImpl;
import org.opendaylight.controller.sal.connector.remoterpc.util.XmlUtils;
-import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcResult;
import com.google.common.util.concurrent.ListenableFuture;
/**
- * An implementation of {@link RpcImplementation} that makes
+ * An implementation of {@link org.opendaylight.controller.sal.core.api.RpcImplementation} that makes
* remote RPC calls
*/
public class ClientImpl implements RemoteRpcClient {
private Message _message;
private ZMQ.Socket _receiveSocket;
-
+
public MessageWrapper(Message message, ZMQ.Socket receiveSocket) {
this._message = message;
this._receiveSocket = receiveSocket;
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
- </parent>
- <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <packaging>bundle</packaging>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller.tests</groupId>
+ <artifactId>sal-remoterpc-connector-test-parent</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </parent>
+ <artifactId>sal-remoterpc-connector-test-consumer</artifactId>
+ <packaging>bundle</packaging>
+ <dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <configuration>
- <instructions>
- <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
- </instructions>
- </configuration>
- </plugin>
- </plugins>
- </build>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>containermanager</artifactId>
+ <version>0.5.2-SNAPSHOT</version>
+ </dependency>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal</artifactId>
+ <version>0.8.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-binding-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-util</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-api</artifactId>
+ <version>1.1-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-data-impl</artifactId>
+ <version>${yangtools.version}</version>
+ </dependency>
+ </dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-impl</artifactId>
- <version>${yangtools.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <configuration>
+ <instructions>
+ <Bundle-Activator>org.opendaylight.controller.sample.zeromq.consumer.ExampleConsumer</Bundle-Activator>
+ </instructions>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
import java.io.InputStream;
import java.net.URI;
import java.util.Hashtable;
-import java.util.concurrent.*;
+import java.util.concurrent.Future;
import org.opendaylight.controller.sal.core.api.AbstractConsumer;
import org.opendaylight.controller.sal.core.api.Broker.ConsumerSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.opendaylight.yangtools.yang.data.impl.XmlTreeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.CompositeNodeTOImpl;
import javax.xml.stream.XMLStreamException;
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
- <packaging>pom</packaging>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
- <scm>
- <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
- <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
- <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
- </scm>
+ <packaging>pom</packaging>
<modules>
<module>consumer-service</module>
<module>test-it</module>
<module>test-nb</module>
</modules>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL</url>
+ </scm>
</project>
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-binding-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>containermanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.5.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.8.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-common-util</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-remoterpc-connector</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
</dependencies>
import org.slf4j.LoggerFactory;
import java.net.URI;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
public class ExampleProvider extends AbstractProvider implements RpcImplementation {
}
return false;
}
-
+
// Adds a child SimpleNode containing the value "success" to the input CompositeNode
private CompositeNode addSuccessNode(CompositeNode input) {
List<Node<?>> list = new ArrayList<Node<?>>(input.getChildren());
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-it</artifactId>
<scm>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-provider</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.tests</groupId>
<artifactId>sal-remoterpc-connector-test-consumer</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-broker-impl</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.ops4j.pax.exam</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>ietf-netconf-monitoring</artifactId>
+ <version>0.2.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
import junit.framework.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.net.URI;
-import java.util.Hashtable;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.baseModelBundles;
import static org.opendaylight.controller.test.sal.binding.it.TestHelper.bindingAwareSalBundles;
-import static org.ops4j.pax.exam.CoreOptions.*;
+//import static org.ops4j.pax.exam.CoreOptions.*;
@RunWith(PaxExam.class)
public class RouterTest {
@Inject
@Filter(timeout=60*1000)
Broker broker;
-
+
private ZMQ.Context zmqCtx = ZMQ.context(1);
//private Server router;
//private ExampleProvider provider;
return msg;
}
-
+
private void printState(){
Bundle[] b = ctx.getBundles();
_logger.debug("\n\nNumber of bundles [{}]\n\n]", b.length);
<parent>
<artifactId>sal-remoterpc-connector-test-parent</artifactId>
<groupId>org.opendaylight.controller.tests</groupId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.1-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector-test-nb</artifactId>
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.InstanceIdentifier;
-import org.osgi.framework.*;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
* <ul>
* <li><b>/restconf</b> - {@link #getRoot()}
* <ul>
- * <li><b>/config</b> - {@link #readConfigurationData(String)}
+ * <li><b>/config</b> - {@link #readConfigurationData(String)}
* {@link #updateConfigurationData(String, CompositeNode)}
* {@link #createConfigurationData(CompositeNode)}
* {@link #createConfigurationData(String, CompositeNode)}
* {@link #deleteConfigurationData(String)}
- * <li><b>/operational</b> - {@link #readOperationalData(String)}
+ * <li><b>/operational</b> - {@link #readOperationalData(String)}
* <li>/modules - {@link #getModules()}
* <ul>
* <li>/module
Draft02.MediaTypes.DATA+JSON, Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public StructuredData invokeRpc(@Encoded @PathParam("identifier") String identifier, CompositeNode payload);
-
+
@POST
@Path("/operations/{identifier:.+}")
@Produces({Draft02.MediaTypes.OPERATION+JSON, Draft02.MediaTypes.OPERATION+XML,
Draft02.MediaTypes.DATA+JSON, Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public StructuredData invokeRpc(@Encoded @PathParam("identifier") String identifier, @DefaultValue("") String noPayload);
-
+
@GET
@Path("/config/{identifier:.+}")
- @Produces({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
+ @Produces({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public StructuredData readConfigurationData(@Encoded @PathParam("identifier") String identifier);
@GET
@Path("/operational/{identifier:.+}")
- @Produces({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
+ @Produces({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public StructuredData readOperationalData(@Encoded @PathParam("identifier") String identifier);
@PUT
@Path("/config/{identifier:.+}")
- @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
+ @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public Response updateConfigurationData(@Encoded @PathParam("identifier") String identifier, CompositeNode payload);
@POST
@Path("/config/{identifier:.+}")
- @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
+ @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public Response createConfigurationData(@Encoded @PathParam("identifier") String identifier, CompositeNode payload);
@POST
@Path("/config")
- @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
+ @Consumes({Draft02.MediaTypes.DATA+JSON,Draft02.MediaTypes.DATA+XML,
MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_XML})
public Response createConfigurationData(CompositeNode payload);
import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
public final class RestUtil {
-
+
public static final String SQUOTE = "'";
public static final String DQUOTE = "\"";
private static final Pattern PREDICATE_PATTERN = Pattern.compile("\\[(.*?)\\]");
IdentityValuesDTO identityValuesDTO = new IdentityValuesDTO(value);
for (int i = 1; i < xPathParts.length; i++) {
String xPathPartTrimmed = xPathParts[i].trim();
-
+
String xPathPartStr = getIdAndPrefixAsStr(xPathPartTrimmed);
IdentityValue identityValue = toIdentity(xPathPartStr, prefixMap);
if (identityValue == null) {
return null;
}
-
+
List<Predicate> predicates = toPredicates(xPathPartTrimmed, prefixMap);
if (predicates == null) {
return null;
}
identityValue.setPredicates(predicates);
-
+
identityValuesDTO.add(identityValue);
}
return identityValuesDTO.getValuesWithNamespaces().isEmpty() ? null : identityValuesDTO;
}
-
+
private static String getIdAndPrefixAsStr(String pathPart) {
int predicateStartIndex = pathPart.indexOf("[");
return predicateStartIndex == -1 ? pathPart : pathPart.substring(0, predicateStartIndex);
}
-
+
private static IdentityValue toIdentity(String xPathPart, PrefixesMaping prefixMap) {
String xPathPartTrimmed = xPathPart.trim();
if (xPathPartTrimmed.isEmpty()) {
package org.opendaylight.controller.sal.rest.impl;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERRORS_CONTAINER_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_APP_TAG_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_INFO_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_LIST_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_MESSAGE_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_TAG_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.ERROR_TYPE_QNAME;
+import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.NAMESPACE;
+
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
-import static org.opendaylight.controller.sal.rest.api.Draft02.RestConfModule.*;
-
import org.opendaylight.controller.sal.restconf.impl.ControllerContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
import org.opendaylight.controller.sal.restconf.impl.RestconfError;
private HttpHeaders headers;
@Override
- public Response toResponse( RestconfDocumentedException exception ) {
+ public Response toResponse( final RestconfDocumentedException exception ) {
LOG.debug( "In toResponse: {}", exception.getMessage() );
// single space char in the entity.
return Response.status( exception.getStatus() )
- .type( MediaType.TEXT_PLAIN_TYPE )
- .entity( " " ).build();
+ .type( MediaType.TEXT_PLAIN_TYPE )
+ .entity( " " ).build();
}
int status = errors.iterator().next().getErrorTag().getStatusCode();
if( errorsSchemaNode == null ) {
return Response.status( status )
- .type( MediaType.TEXT_PLAIN_TYPE )
- .entity( exception.getMessage() ).build();
+ .type( MediaType.TEXT_PLAIN_TYPE )
+ .entity( exception.getMessage() ).build();
}
ImmutableList.Builder<Node<?>> errorNodes = ImmutableList.<Node<?>> builder();
}
ImmutableCompositeNode errorsNode =
- ImmutableCompositeNode.create( ERRORS_CONTAINER_QNAME, errorNodes.build() );
+ ImmutableCompositeNode.create( ERRORS_CONTAINER_QNAME, errorNodes.build() );
Object responseBody;
if( mediaType.getSubtype().endsWith( "json" ) ) {
return Response.status( status ).type( mediaType ).entity( responseBody ).build();
}
- private Object toJsonResponseBody( ImmutableCompositeNode errorsNode,
- DataNodeContainer errorsSchemaNode ) {
+ private Object toJsonResponseBody( final ImmutableCompositeNode errorsNode,
+ final DataNodeContainer errorsSchemaNode ) {
JsonMapper jsonMapper = new JsonMapper();
return responseBody;
}
- private Object toXMLResponseBody( ImmutableCompositeNode errorsNode,
- DataNodeContainer errorsSchemaNode ) {
+ private Object toXMLResponseBody( final ImmutableCompositeNode errorsNode,
+ final DataNodeContainer errorsSchemaNode ) {
XmlMapper xmlMapper = new XmlMapper();
return responseBody;
}
- private String documentToString( Document doc ) throws TransformerException, UnsupportedEncodingException {
+ private String documentToString( final Document doc ) throws TransformerException, UnsupportedEncodingException {
Transformer transformer = createTransformer();
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
}
private Transformer createTransformer() throws TransformerFactoryConfigurationError,
- TransformerConfigurationException {
+ TransformerConfigurationException {
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "no" );
return transformer;
}
- private Node<?> toDomNode( RestconfError error ) {
+ private Node<?> toDomNode( final RestconfError error ) {
CompositeNodeBuilder<ImmutableCompositeNode> builder = ImmutableCompositeNode.builder();
builder.setQName( ERROR_LIST_QNAME );
return builder.toInstance();
}
- private Node<?> parseErrorInfo( String errorInfo ) {
+ private Node<?> parseErrorInfo( final String errorInfo ) {
if( Strings.isNullOrEmpty( errorInfo ) ) {
return null;
}
String errorInfoWithRoot =
new StringBuilder( "<error-info xmlns=\"" ).append( NAMESPACE ).append( "\">" )
- .append( errorInfo ).append( "</error-info>" ).toString();
+ .append( errorInfo ).append( "</error-info>" ).toString();
Document doc = null;
try {
doc = factory.newDocumentBuilder().parse(
- new InputSource( new StringReader( errorInfoWithRoot ) ) );
+ new InputSource( new StringReader( errorInfoWithRoot ) ) );
}
catch( Exception e ) {
// TODO: what if the content is text that happens to contain invalid markup? Could
// wrap in CDATA and try again.
LOG.warn( "Error parsing restconf error-info, \"" + errorInfo + "\", as XML: " +
- e.toString() );
+ e.toString() );
return null;
}
return errorInfoNode;
}
- private void addLeaf( CompositeNodeBuilder<ImmutableCompositeNode> builder, QName qname,
- String value ) {
+ private void addLeaf( final CompositeNodeBuilder<ImmutableCompositeNode> builder, final QName qname,
+ final String value ) {
if( !Strings.isNullOrEmpty( value ) ) {
builder.addLeaf( qname, value );
}
*/
package org.opendaylight.controller.sal.restconf.impl;
-import com.google.common.base.Function;
-import com.google.common.base.Objects;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.BiMap;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.HashBiMap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;
import org.opendaylight.controller.sal.core.api.mount.MountService;
import org.opendaylight.controller.sal.rest.api.Draft02;
import org.opendaylight.controller.sal.rest.impl.RestUtil;
-import org.opendaylight.controller.sal.restconf.impl.InstanceIdWithSchemaNode;
-import org.opendaylight.controller.sal.restconf.impl.RestCodec;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
import org.opendaylight.yangtools.concepts.Codec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Function;
+import com.google.common.base.Objects;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.BiMap;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.HashBiMap;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
public class ControllerContext implements SchemaContextListener {
private final static Logger LOG = LoggerFactory.getLogger( ControllerContext.class );
}
private InstanceIdWithSchemaNode toIdentifier( final String restconfInstance,
- final boolean toMountPointIdentifier ) {
+ final boolean toMountPointIdentifier ) {
this.checkPreconditions();
Iterable<String> split = Splitter.on( "/" ).split( restconfInstance );
InstanceIdentifierBuilder builder = InstanceIdentifier.builder();
Module latestModule = this.getLatestModule( globalSchema, startModule );
InstanceIdWithSchemaNode iiWithSchemaNode = this.collectPathArguments( builder, pathArgs,
- latestModule, null, toMountPointIdentifier );
+ latestModule, null, toMountPointIdentifier );
if( iiWithSchemaNode == null ) {
throw new RestconfDocumentedException(
Predicate<Module> filter = new Predicate<Module>() {
@Override
- public boolean apply( Module m ) {
+ public boolean apply( final Module m ) {
return Objects.equal( m.getName(), moduleName );
}
};
final SchemaContext mountPointSchema = mountPoint.getSchemaContext();
Set<Module> moduleSchemas = mountPointSchema == null ? null :
- mountPointSchema.findModuleByNamespace( namespace );
+ mountPointSchema.findModuleByNamespace( namespace );
return moduleSchemas == null ? null : this.filterLatestModule( moduleSchemas );
}
public Module findModuleByNameAndRevision( final QName module ) {
this.checkPreconditions();
Preconditions.checkArgument( module != null && module.getLocalName() != null &&
- module.getRevision() != null );
+ module.getRevision() != null );
return globalSchema.findModuleByName( module.getLocalName(), module.getRevision() );
}
public Module findModuleByNameAndRevision( final MountInstance mountPoint, final QName module ) {
this.checkPreconditions();
Preconditions.checkArgument( module != null && module.getLocalName() != null &&
- module.getRevision() != null && mountPoint != null );
+ module.getRevision() != null && mountPoint != null );
SchemaContext schemaContext = mountPoint.getSchemaContext();
return schemaContext == null ? null :
- schemaContext.findModuleByName( module.getLocalName(), module.getRevision() );
+ schemaContext.findModuleByName( module.getLocalName(), module.getRevision() );
}
public DataNodeContainer getDataNodeContainerFor( final InstanceIdentifier path ) {
String module = this.uriToModuleName.get( qname.getNamespace() );
if( module == null ) {
final Module moduleSchema = globalSchema.findModuleByNamespaceAndRevision(
- qname.getNamespace(), qname.getRevision() );
+ qname.getNamespace(), qname.getRevision() );
if( moduleSchema == null ) {
return null;
}
SchemaContext schemaContext = mountPoint.getSchemaContext();
final Module moduleSchema = schemaContext.findModuleByNamespaceAndRevision(
- qname.getNamespace(), qname.getRevision() );
+ qname.getNamespace(), qname.getRevision() );
if( moduleSchema == null ) {
return null;
}
@Override
public boolean apply(final GroupingDefinition g) {
return Objects.equal(g.getQName().getLocalName(),
- Draft02.RestConfModule.ERRORS_GROUPING_SCHEMA_NODE);
+ Draft02.RestConfModule.ERRORS_GROUPING_SCHEMA_NODE);
}
};
List<DataSchemaNode> instanceDataChildrenByName =
this.findInstanceDataChildrenByName(restconfGrouping,
- Draft02.RestConfModule.ERRORS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.ERRORS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instanceDataChildrenByName, null);
}
- public DataSchemaNode getRestconfModuleRestConfSchemaNode( Module inRestconfModule,
- String schemaNodeName ) {
+ public DataSchemaNode getRestconfModuleRestConfSchemaNode( final Module inRestconfModule,
+ final String schemaNodeName ) {
Module restconfModule = inRestconfModule;
if( restconfModule == null ) {
restconfModule = getRestconfModule();
@Override
public boolean apply(final GroupingDefinition g) {
return Objects.equal(g.getQName().getLocalName(),
- Draft02.RestConfModule.RESTCONF_GROUPING_SCHEMA_NODE);
+ Draft02.RestConfModule.RESTCONF_GROUPING_SCHEMA_NODE);
}
};
List<DataSchemaNode> instanceDataChildrenByName =
this.findInstanceDataChildrenByName(restconfGrouping,
- Draft02.RestConfModule.RESTCONF_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.RESTCONF_CONTAINER_SCHEMA_NODE);
final DataSchemaNode restconfContainer = Iterables.getFirst(instanceDataChildrenByName, null);
if (Objects.equal(schemaNodeName, Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.OPERATIONS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
final DataSchemaNode modules = Iterables.getFirst(instances, null);
instances = this.findInstanceDataChildrenByName(((DataNodeContainer) modules),
- Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAM_LIST_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULES_CONTAINER_SCHEMA_NODE);
final DataSchemaNode modules = Iterables.getFirst(instances, null);
instances = this.findInstanceDataChildrenByName(((DataNodeContainer) modules),
- Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE);
+ Draft02.RestConfModule.MODULE_LIST_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
else if(Objects.equal(schemaNodeName, Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE)) {
List<DataSchemaNode> instances =
this.findInstanceDataChildrenByName(((DataNodeContainer) restconfContainer),
- Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
+ Draft02.RestConfModule.STREAMS_CONTAINER_SCHEMA_NODE);
return Iterables.getFirst(instances, null);
}
private String toUriString( final Object object ) throws UnsupportedEncodingException {
return object == null ? "" :
- URLEncoder.encode( object.toString(), ControllerContext.URI_ENCODING_CHAR_SET );
+ URLEncoder.encode( object.toString(), ControllerContext.URI_ENCODING_CHAR_SET );
}
private InstanceIdWithSchemaNode collectPathArguments( final InstanceIdentifierBuilder builder,
if( strings.isEmpty() ) {
return new InstanceIdWithSchemaNode( builder.toInstance(),
- ((DataSchemaNode) parentNode), mountPoint );
+ ((DataSchemaNode) parentNode), mountPoint );
}
String head = strings.iterator().next();
DataSchemaNode targetNode = null;
if( !Strings.isNullOrEmpty( moduleName ) ) {
if( Objects.equal( moduleName, ControllerContext.MOUNT_MODULE ) &&
- Objects.equal( nodeName, ControllerContext.MOUNT_NODE ) ) {
+ Objects.equal( nodeName, ControllerContext.MOUNT_NODE ) ) {
if( mountPoint != null ) {
throw new RestconfDocumentedException(
"Restconf supports just one mount point in URI.",
if( mount == null ) {
LOG.debug( "Instance identifier to missing mount point: {}", partialPath );
throw new RestconfDocumentedException(
- "Mount point does not exist.", ErrorType.PROTOCOL, ErrorTag.UNKNOWN_ELEMENT );
+ "Mount point does not exist.", ErrorType.PROTOCOL, ErrorTag.UNKNOWN_ELEMENT );
}
final SchemaContext mountPointSchema = mount.getSchemaContext();
final String moduleNameBehindMountPoint = toModuleName( strings.get( 1 ) );
if( moduleNameBehindMountPoint == null ) {
throw new RestconfDocumentedException(
- "First node after mount point in URI has to be in format \"moduleName:nodeName\"",
- ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ "First node after mount point in URI has to be in format \"moduleName:nodeName\"",
+ ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
final Module moduleBehindMountPoint = this.getLatestModule( mountPointSchema,
- moduleNameBehindMountPoint );
+ moduleNameBehindMountPoint );
if( moduleBehindMountPoint == null ) {
throw new RestconfDocumentedException(
"\"" +moduleName + "\" module does not exist in mount point.",
List<String> subList = strings.subList( 1, strings.size() );
return this.collectPathArguments( InstanceIdentifier.builder(), subList, moduleBehindMountPoint,
- mount, returnJustMountPoint );
+ mount, returnJustMountPoint );
}
Module module = null;
else {
SchemaContext schemaContext = mountPoint.getSchemaContext();
module = schemaContext == null ? null :
- this.getLatestModule( schemaContext, moduleName );
+ this.getLatestModule( schemaContext, moduleName );
if( module == null ) {
throw new RestconfDocumentedException(
"\"" + moduleName + "\" module does not exist in mount point.",
}
targetNode = this.findInstanceDataChildByNameAndNamespace(
- parentNode, nodeName, module.getNamespace() );;
+ parentNode, nodeName, module.getNamespace() );
if( targetNode == null ) {
throw new RestconfDocumentedException(
"URI has bad format. Possible reasons:\n" +
- " 1. \"" + head + "\" was not found in parent data node.\n" +
- " 2. \"" + head + "\" is behind mount point. Then it should be in format \"/" +
- MOUNT + "/" + head + "\".", ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ " 1. \"" + head + "\" was not found in parent data node.\n" +
+ " 2. \"" + head + "\" is behind mount point. Then it should be in format \"/" +
+ MOUNT + "/" + head + "\".", ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
} else {
final List<DataSchemaNode> potentialSchemaNodes =
- this.findInstanceDataChildrenByName( parentNode, nodeName );
+ this.findInstanceDataChildrenByName( parentNode, nodeName );
if( potentialSchemaNodes.size() > 1 ) {
final StringBuilder strBuilder = new StringBuilder();
for( final DataSchemaNode potentialNodeSchema : potentialSchemaNodes ) {
strBuilder.append( " " )
- .append( potentialNodeSchema.getQName().getNamespace() )
- .append( "\n" );
+ .append( potentialNodeSchema.getQName().getNamespace() )
+ .append( "\n" );
}
throw new RestconfDocumentedException(
}
this.addKeyValue( keyValues, listNode.getDataChildByName( key ),
- uriKeyValue, mountPoint );
+ uriKeyValue, mountPoint );
i++;
}
}
if( (targetNode instanceof DataNodeContainer) ) {
final List<String> remaining = strings.subList( consumed, strings.size() );
return this.collectPathArguments( builder, remaining,
- ((DataNodeContainer) targetNode), mountPoint, returnJustMountPoint );
+ ((DataNodeContainer) targetNode), mountPoint, returnJustMountPoint );
}
return new InstanceIdWithSchemaNode( builder.toInstance(), targetNode, mountPoint );
Predicate<DataSchemaNode> filter = new Predicate<DataSchemaNode>() {
@Override
- public boolean apply( DataSchemaNode node ) {
+ public boolean apply( final DataSchemaNode node ) {
return Objects.equal( node.getQName().getNamespace(), namespace );
}
};
}
public List<DataSchemaNode> findInstanceDataChildrenByName( final DataNodeContainer container,
- final String name ) {
+ final String name ) {
Preconditions.<DataNodeContainer> checkNotNull( container );
Preconditions.<String> checkNotNull( name );
Predicate<DataSchemaNode> filter = new Predicate<DataSchemaNode>() {
@Override
- public boolean apply( DataSchemaNode node ) {
+ public boolean apply( final DataSchemaNode node ) {
return Objects.equal( node.getQName().getLocalName(), name );
}
};
}
Iterable<ChoiceNode> choiceNodes = Iterables.<ChoiceNode> filter( container.getChildNodes(),
- ChoiceNode.class );
+ ChoiceNode.class );
final Function<ChoiceNode, Set<ChoiceCaseNode>> choiceFunction =
new Function<ChoiceNode, Set<ChoiceCaseNode>>() {
};
Iterable<Set<ChoiceCaseNode>> map = Iterables.<ChoiceNode, Set<ChoiceCaseNode>> transform(
- choiceNodes, choiceFunction );
+ choiceNodes, choiceFunction );
final Iterable<ChoiceCaseNode> allCases = Iterables.<ChoiceCaseNode> concat( map );
for( final ChoiceCaseNode caze : allCases ) {
public boolean isInstantiatedDataSchema( final DataSchemaNode node ) {
return node instanceof LeafSchemaNode || node instanceof LeafListSchemaNode ||
- node instanceof ContainerSchemaNode || node instanceof ListSchemaNode;
+ node instanceof ContainerSchemaNode || node instanceof ListSchemaNode;
}
private void addKeyValue( final HashMap<QName, Object> map, final DataSchemaNode node,
- final String uriValue, final MountInstance mountPoint ) {
+ final String uriValue, final MountInstance mountPoint ) {
Preconditions.<String> checkNotNull( uriValue );
Preconditions.checkArgument( (node instanceof LeafSchemaNode) );
if( decoded == null ) {
throw new RestconfDocumentedException(
- uriValue + " from URI can't be resolved. " + additionalInfo,
- ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
+ uriValue + " from URI can't be resolved. " + additionalInfo,
+ ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE );
}
map.put( node.getQName(), decoded );
};
Optional<QName> namespace = FluentIterable.from( sorted )
- .transform( transform )
- .firstMatch( findFirst );
+ .transform( transform )
+ .firstMatch( findFirst );
return namespace.isPresent() ? QName.create( namespace.get(), node ) : null;
}
}
private CharSequence convertToRestconfIdentifier( final PathArgument argument,
- final DataNodeContainer node ) {
+ final DataNodeContainer node ) {
if( argument instanceof NodeIdentifier && node instanceof ContainerSchemaNode ) {
return convertToRestconfIdentifier( (NodeIdentifier) argument, (ContainerSchemaNode) node );
}
}
else if( argument != null && node != null ) {
throw new IllegalArgumentException(
- "Conversion of generic path argument is not supported" );
+ "Conversion of generic path argument is not supported" );
}
else {
throw new IllegalArgumentException( "Unhandled parameter types: "
}
private CharSequence convertToRestconfIdentifier( final NodeIdentifier argument,
- final ContainerSchemaNode node ) {
+ final ContainerSchemaNode node ) {
StringBuilder builder = new StringBuilder();
builder.append( "/" );
QName nodeType = argument.getNodeType();
}
private CharSequence convertToRestconfIdentifier( final NodeIdentifierWithPredicates argument,
- final ListSchemaNode node ) {
+ final ListSchemaNode node ) {
QName nodeType = argument.getNodeType();
final CharSequence nodeIdentifier = this.toRestconfIdentifier( nodeType );
final Map<QName, Object> keyValues = argument.getKeyValues();
import com.google.common.base.Preconditions;
public final class EmptyNodeWrapper implements NodeWrapper<Node<?>>, Node<Void> {
-
+
private Node<?> unwrapped;
-
+
private String localName;
private URI namespace;
private QName name;
public boolean isComposite() {
return composite;
}
-
+
public void setComposite(boolean composite) {
this.composite = composite;
}
-
+
public EmptyNodeWrapper(URI namespace, String localName) {
this.localName = Preconditions.checkNotNull(localName);
this.namespace = namespace;
}
-
+
@Override
public void setQname(QName name) {
Preconditions.checkState(unwrapped == null, "Cannot change the object, due to data inconsistencies.");
this.name = name;
}
-
+
@Override
public QName getQname() {
return name;
}
-
+
@Override
public String getLocalName() {
if (unwrapped != null) {
}
return localName;
}
-
+
@Override
public URI getNamespace() {
if (unwrapped != null) {
elementData.add(new IdentityValue(namespace, value, prefix));
this.originValue = originValue;
}
-
+
public IdentityValuesDTO(String originValue) {
this.originValue = originValue;
}
-
+
public IdentityValuesDTO() {
originValue = null;
}
public void add(String namespace, String value, String prefix) {
elementData.add(new IdentityValue(namespace, value, prefix));
}
-
+
public void add(IdentityValue identityValue) {
elementData.add(identityValue);
}
-
+
public List<IdentityValue> getValuesWithNamespaces() {
return Collections.unmodifiableList(elementData);
}
-
+
@Override
public String toString() {
return elementData.toString();
}
-
+
public String getOriginValue() {
return originValue;
}
public void setPredicates(List<Predicate> predicates) {
this.predicates = predicates;
}
-
+
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
}
}
-
+
public static final class Predicate {
-
+
private final IdentityValue name;
private final String value;
-
+
public Predicate(IdentityValue name, String value) {
super();
this.name = name;
this.value = value;
}
-
+
public IdentityValue getName() {
return name;
}
-
+
public String getValue() {
return value;
}
}
return sb.toString();
}
-
+
public boolean isLeafList() {
return name == null ? true : false;
}
-
+
}
}
public interface NodeWrapper<T extends Node<?>> {
void setQname(QName name);
-
+
QName getQname();
-
+
T unwrap();
-
+
boolean isChangeAllowed();
-
+
URI getNamespace();
void setNamespace(URI namespace);
-
+
String getLocalName();
}
import org.slf4j.LoggerFactory;
public class RestCodec {
-
+
private static final Logger logger = LoggerFactory.getLogger(RestCodec.class);
private RestCodec() {
logger.info("Idenetityref will be translated as NULL for data - {}", String.valueOf(valueWithNamespace));
return null;
}
-
+
return QName.create(module.getNamespace(), module.getRevision(), valueWithNamespace.getValue());
}
}
}
}
-
+
return result.isEmpty() ? null : new InstanceIdentifier(result);
}
return null;
}
}
-
+
private static Module getModuleByNamespace(String namespace, MountInstance mountPoint) {
URI validNamespace = resolveValidNamespace(namespace, mountPoint);
}
return module;
}
-
+
private static URI resolveValidNamespace(String namespace, MountInstance mountPoint) {
URI validNamespace;
if (mountPoint != null) {
import com.google.common.base.Preconditions;
public final class SimpleNodeWrapper implements NodeWrapper<SimpleNode<?>>, SimpleNode<Object> {
-
+
private SimpleNode<Object> simpleNode;
-
+
private String localName;
private Object value;
private URI namespace;
this.localName = Preconditions.checkNotNull(localName);
this.value = value;
}
-
+
public SimpleNodeWrapper(URI namespace, String localName, Object value) {
this(localName, value);
this.namespace = namespace;
}
-
+
@Override
public void setQname(QName name) {
Preconditions.checkState(simpleNode == null, "Cannot change the object, due to data inconsistencies.");
this.name = name;
}
-
+
@Override
public QName getQname() {
return name;
}
-
+
@Override
public String getLocalName() {
if (simpleNode != null) {
}
return localName;
}
-
+
@Override
public URI getNamespace() {
if (simpleNode != null) {
name = new QName(namespace, localName);
}
simpleNode = NodeFactory.createImmutableSimpleNode(name, null, value);
-
+
value = null;
namespace = null;
localName = null;
public Object setValue(Object value) {
return unwrap().setValue(value);
}
-
+
}
import org.opendaylight.controller.sal.core.api.mount.MountInstance;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
public class StructuredData {
private final DataSchemaNode schema;
private final MountInstance mountPoint;
- public StructuredData(CompositeNode data, DataSchemaNode schema, MountInstance mountPoint) {
+ public StructuredData(final CompositeNode data, final DataSchemaNode schema, final MountInstance mountPoint) {
this.data = data;
this.schema = schema;
this.mountPoint = mountPoint;
*/
public class ListenerAdapter implements DataChangeListener {
- private static final Logger logger = LoggerFactory
- .getLogger(ListenerAdapter.class);
- private final XmlMapper xmlMapper = new XmlMapper();
- private final SimpleDateFormat rfc3339 = new SimpleDateFormat(
- "yyyy-MM-dd'T'hh:mm:ssZ");
-
- private final InstanceIdentifier path;
- private ListenerRegistration<DataChangeListener> registration;
- private final String streamName;
- private Set<Channel> subscribers = new ConcurrentSet<>();
- private final EventBus eventBus;
- private final EventBusChangeRecorder eventBusChangeRecorder;
-
- /**
- * Creates new {@link ListenerAdapter} listener specified by path and stream
- * name.
- *
- * @param path
- * Path to data in data store.
- * @param streamName
- * The name of the stream.
- */
- ListenerAdapter(InstanceIdentifier path, String streamName) {
- Preconditions.checkNotNull(path);
- Preconditions
- .checkArgument(streamName != null && !streamName.isEmpty());
- this.path = path;
- this.streamName = streamName;
- eventBus = new AsyncEventBus(Executors.newSingleThreadExecutor());
- eventBusChangeRecorder = new EventBusChangeRecorder();
- eventBus.register(eventBusChangeRecorder);
- }
-
- @Override
- public void onDataChanged(
- DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
- if (!change.getCreatedConfigurationData().isEmpty()
- || !change.getCreatedOperationalData().isEmpty()
- || !change.getUpdatedConfigurationData().isEmpty()
- || !change.getUpdatedOperationalData().isEmpty()
- || !change.getRemovedConfigurationData().isEmpty()
- || !change.getRemovedOperationalData().isEmpty()) {
- String xml = prepareXmlFrom(change);
- Event event = new Event(EventType.NOTIFY);
- event.setData(xml);
- eventBus.post(event);
- }
- }
-
- /**
- * Tracks events of data change by customer.
- */
- private final class EventBusChangeRecorder {
- @Subscribe
- public void recordCustomerChange(Event event) {
- if (event.getType() == EventType.REGISTER) {
- Channel subscriber = event.getSubscriber();
- if (!subscribers.contains(subscriber)) {
- subscribers.add(subscriber);
- }
- } else if (event.getType() == EventType.DEREGISTER) {
- subscribers.remove(event.getSubscriber());
- Notificator
- .removeListenerIfNoSubscriberExists(ListenerAdapter.this);
- } else if (event.getType() == EventType.NOTIFY) {
- for (Channel subscriber : subscribers) {
- if (subscriber.isActive()) {
- logger.debug("Data are sent to subscriber {}:",
- subscriber.remoteAddress());
- subscriber.writeAndFlush(new TextWebSocketFrame(event
- .getData()));
- } else {
- logger.debug(
- "Subscriber {} is removed - channel is not active yet.",
- subscriber.remoteAddress());
- subscribers.remove(subscriber);
- }
- }
- }
- }
- }
-
- /**
- * Represents event of specific {@link EventType} type, holds data and
- * {@link Channel} subscriber.
- */
- private final class Event {
- private final EventType type;
- private Channel subscriber;
- private String data;
-
- /**
- * Creates new event specified by {@link EventType} type.
- *
- * @param type
- * EventType
- */
- public Event(EventType type) {
- this.type = type;
- }
-
- /**
- * Gets the {@link Channel} subscriber.
- *
- * @return Channel
- */
- public Channel getSubscriber() {
- return subscriber;
- }
-
- /**
- * Sets subscriber for event.
- *
- * @param subscriber
- * Channel
- */
- public void setSubscriber(Channel subscriber) {
- this.subscriber = subscriber;
- }
-
- /**
- * Gets event data.
- *
- * @return String representation of event data.
- */
- public String getData() {
- return data;
- }
-
- /**
- * Sets event data.
- *
- * @param String
- * data.
- */
- public void setData(String data) {
- this.data = data;
- }
-
- /**
- * Gets event type.
- *
- * @return The type of the event.
- */
- public EventType getType() {
- return type;
- }
- }
-
- /**
- * Type of the event.
- */
- private enum EventType {
- REGISTER, DEREGISTER, NOTIFY;
- }
-
- /**
- * Prepare data in printable form and transform it to String.
- *
- * @param change
- * DataChangeEvent
- * @return Data in printable form.
- */
- private String prepareXmlFrom(
- DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
- Document doc = createDocument();
- Element notificationElement = doc.createElementNS(
- "urn:ietf:params:xml:ns:netconf:notification:1.0",
- "notification");
- doc.appendChild(notificationElement);
-
- Element eventTimeElement = doc.createElement("eventTime");
- eventTimeElement.setTextContent(toRFC3339(new Date()));
- notificationElement.appendChild(eventTimeElement);
-
- Element dataChangedNotificationEventElement = doc.createElementNS(
- "urn:opendaylight:params:xml:ns:yang:controller:md:sal:remote",
- "data-changed-notification");
- addValuesToDataChangedNotificationEventElement(doc,
- dataChangedNotificationEventElement, change);
- notificationElement.appendChild(dataChangedNotificationEventElement);
-
- try {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- TransformerFactory tf = TransformerFactory.newInstance();
- Transformer transformer = tf.newTransformer();
- transformer
- .setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
- transformer.setOutputProperty(OutputKeys.METHOD, "xml");
- transformer.setOutputProperty(OutputKeys.INDENT, "yes");
- transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
- transformer.setOutputProperty(
- "{http://xml.apache.org/xslt}indent-amount", "4");
- transformer.transform(new DOMSource(doc), new StreamResult(
- new OutputStreamWriter(out, "UTF-8")));
- byte[] charData = out.toByteArray();
- return new String(charData, "UTF-8");
- } catch (TransformerException | UnsupportedEncodingException e) {
- String msg = "Error during transformation of Document into String";
- logger.error(msg, e);
- return msg;
- }
- }
-
- /**
- * Formats data specified by RFC3339.
- *
- * @param d
- * Date
- * @return Data specified by RFC3339.
- */
- private String toRFC3339(Date d) {
- return rfc3339.format(d).replaceAll("(\\d\\d)(\\d\\d)$", "$1:$2");
- }
-
- /**
- * Creates {@link Document} document.
- *
- * @return {@link Document} document.
- */
- private Document createDocument() {
- DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
- Document doc = null;
- try {
- DocumentBuilder bob = dbf.newDocumentBuilder();
- doc = bob.newDocument();
- } catch (ParserConfigurationException e) {
- return null;
- }
- return doc;
- }
-
- /**
- * Adds values to data changed notification event element.
- *
- * @param doc
- * {@link Document}
- * @param dataChangedNotificationEventElement
- * {@link Element}
- * @param change
- * {@link DataChangeEvent}
- */
- private void addValuesToDataChangedNotificationEventElement(Document doc,
- Element dataChangedNotificationEventElement,
- DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
- addValuesFromDataToElement(doc, change.getCreatedConfigurationData(),
- dataChangedNotificationEventElement, Store.CONFIG,
- Operation.CREATED);
- addValuesFromDataToElement(doc, change.getCreatedOperationalData(),
- dataChangedNotificationEventElement, Store.OPERATION,
- Operation.CREATED);
- if (change.getCreatedConfigurationData().isEmpty()) {
- addValuesFromDataToElement(doc,
- change.getUpdatedConfigurationData(),
- dataChangedNotificationEventElement, Store.CONFIG,
- Operation.UPDATED);
- }
- if (change.getCreatedOperationalData().isEmpty()) {
- addValuesFromDataToElement(doc, change.getUpdatedOperationalData(),
- dataChangedNotificationEventElement, Store.OPERATION,
- Operation.UPDATED);
- }
- addValuesFromDataToElement(doc, change.getRemovedConfigurationData(),
- dataChangedNotificationEventElement, Store.CONFIG,
- Operation.DELETED);
- addValuesFromDataToElement(doc, change.getRemovedOperationalData(),
- dataChangedNotificationEventElement, Store.OPERATION,
- Operation.DELETED);
- }
-
- /**
- * Adds values from data to element.
- *
- * @param doc
- * {@link Document}
- * @param data
- * Set of {@link InstanceIdentifier}.
- * @param element
- * {@link Element}
- * @param store
- * {@link Store}
- * @param operation
- * {@link Operation}
- */
- private void addValuesFromDataToElement(Document doc,
- Set<InstanceIdentifier> data, Element element, Store store,
- Operation operation) {
- if (data == null || data.isEmpty()) {
- return;
- }
- for (InstanceIdentifier path : data) {
- Node node = createDataChangeEventElement(doc, path, null, store,
- operation);
- element.appendChild(node);
- }
- }
-
- /**
- * Adds values from data to element.
- *
- * @param doc
- * {@link Document}
- * @param data
- * Map of {@link InstanceIdentifier} and {@link CompositeNode}.
- * @param element
- * {@link Element}
- * @param store
- * {@link Store}
- * @param operation
- * {@link Operation}
- */
- private void addValuesFromDataToElement(Document doc,
- Map<InstanceIdentifier, CompositeNode> data, Element element,
- Store store, Operation operation) {
- if (data == null || data.isEmpty()) {
- return;
- }
- for (Entry<InstanceIdentifier, CompositeNode> entry : data.entrySet()) {
- Node node = createDataChangeEventElement(doc, entry.getKey(),
- entry.getValue(), store, operation);
- element.appendChild(node);
- }
- }
-
- /**
- * Creates changed event element from data.
- *
- * @param doc
- * {@link Document}
- * @param path
- * Path to data in data store.
- * @param data
- * {@link CompositeNode}
- * @param store
- * {@link Store}
- * @param operation
- * {@link Operation}
- * @return {@link Node} node represented by changed event element.
- */
- private Node createDataChangeEventElement(Document doc,
- InstanceIdentifier path, CompositeNode data, Store store,
- Operation operation) {
- Element dataChangeEventElement = doc.createElement("data-change-event");
-
- Element pathElement = doc.createElement("path");
- addPathAsValueToElement(path, pathElement);
- dataChangeEventElement.appendChild(pathElement);
-
- Element storeElement = doc.createElement("store");
- storeElement.setTextContent(store.value);
- dataChangeEventElement.appendChild(storeElement);
-
- Element operationElement = doc.createElement("operation");
- operationElement.setTextContent(operation.value);
- dataChangeEventElement.appendChild(operationElement);
-
- if (data != null) {
- Element dataElement = doc.createElement("data");
- Node dataAnyXml = translateToXml(path, data);
- Node adoptedNode = doc.adoptNode(dataAnyXml);
- dataElement.appendChild(adoptedNode);
- dataChangeEventElement.appendChild(dataElement);
- }
-
- return dataChangeEventElement;
- }
-
- /**
- * Translates {@link CompositeNode} data to XML format.
- *
- * @param path
- * Path to data in data store.
- * @param data
- * {@link CompositeNode}
- * @return Data in XML format.
- */
- private Node translateToXml(InstanceIdentifier path, CompositeNode data) {
- DataNodeContainer schemaNode = ControllerContext.getInstance()
- .getDataNodeContainerFor(path);
- if (schemaNode == null) {
- logger.info(
- "Path '{}' contains node with unsupported type (supported type is Container or List) or some node was not found.",
- path);
- return null;
- }
- try {
- Document xml = xmlMapper.write(data, schemaNode);
- return xml.getFirstChild();
- } catch (UnsupportedDataTypeException e) {
- logger.error(
- "Error occured during translation of notification to XML.",
- e);
- return null;
- }
- }
-
- /**
- * Adds path as value to element.
- *
- * @param path
- * Path to data in data store.
- * @param element
- * {@link Element}
- */
- private void addPathAsValueToElement(InstanceIdentifier path,
- Element element) {
- // Map< key = namespace, value = prefix>
- Map<String, String> prefixes = new HashMap<>();
- InstanceIdentifier instanceIdentifier = path;
- StringBuilder textContent = new StringBuilder();
- for (PathArgument pathArgument : instanceIdentifier.getPath()) {
- textContent.append("/");
- writeIdentifierWithNamespacePrefix(element, textContent,
- pathArgument.getNodeType(), prefixes);
- if (pathArgument instanceof NodeIdentifierWithPredicates) {
- Map<QName, Object> predicates = ((NodeIdentifierWithPredicates) pathArgument)
- .getKeyValues();
- for (QName keyValue : predicates.keySet()) {
- String predicateValue = String.valueOf(predicates
- .get(keyValue));
- textContent.append("[");
- writeIdentifierWithNamespacePrefix(element, textContent,
- keyValue, prefixes);
- textContent.append("='");
- textContent.append(predicateValue);
- textContent.append("'");
- textContent.append("]");
- }
- } else if (pathArgument instanceof NodeWithValue) {
- textContent.append("[.='");
- textContent.append(((NodeWithValue) pathArgument).getValue());
- textContent.append("'");
- textContent.append("]");
- }
- }
- element.setTextContent(textContent.toString());
- }
-
- /**
- * Writes identifier that consists of prefix and QName.
- *
- * @param element
- * {@link Element}
- * @param textContent
- * StringBuilder
- * @param qName
- * QName
- * @param prefixes
- * Map of namespaces and prefixes.
- */
- private static void writeIdentifierWithNamespacePrefix(Element element,
- StringBuilder textContent, QName qName, Map<String, String> prefixes) {
- String namespace = qName.getNamespace().toString();
- String prefix = prefixes.get(namespace);
- if (prefix == null) {
- prefix = qName.getPrefix();
- if (prefix == null || prefix.isEmpty()
- || prefixes.containsValue(prefix)) {
- prefix = generateNewPrefix(prefixes.values());
- }
- }
-
- element.setAttribute("xmlns:" + prefix, namespace);
- textContent.append(prefix);
- prefixes.put(namespace, prefix);
-
- textContent.append(":");
- textContent.append(qName.getLocalName());
- }
-
- /**
- * Generates new prefix which consists of four random characters <a-z>.
- *
- * @param prefixes
- * Collection of prefixes.
- * @return New prefix which consists of four random characters <a-z>.
- */
- private static String generateNewPrefix(Collection<String> prefixes) {
- StringBuilder result = null;
- Random random = new Random();
- do {
- result = new StringBuilder();
- for (int i = 0; i < 4; i++) {
- int randomNumber = 0x61 + (Math.abs(random.nextInt()) % 26);
- result.append(Character.toChars(randomNumber));
- }
- } while (prefixes.contains(result.toString()));
-
- return result.toString();
- }
-
- /**
- * Gets path pointed to data in data store.
- *
- * @return Path pointed to data in data store.
- */
- public InstanceIdentifier getPath() {
- return path;
- }
-
- /**
- * Sets {@link ListenerRegistration} registration.
- *
- * @param registration
- * ListenerRegistration<DataChangeListener>
- */
- public void setRegistration(
- ListenerRegistration<DataChangeListener> registration) {
- this.registration = registration;
- }
-
- /**
- * Gets the name of the stream.
- *
- * @return The name of the stream.
- */
- public String getStreamName() {
- return streamName;
- }
-
- /**
- * Removes all subscribers and unregisters event bus change recorder form
- * event bus.
- */
- public void close() throws Exception {
- subscribers = new ConcurrentSet<>();
- registration.close();
- registration = null;
- eventBus.unregister(eventBusChangeRecorder);
- }
-
- /**
- * Checks if {@link ListenerRegistration} registration exist.
- *
- * @return True if exist, false otherwise.
- */
- public boolean isListening() {
- return registration == null ? false : true;
- }
-
- /**
- * Creates event of type {@link EventType#REGISTER}, set {@link Channel}
- * subscriber to the event and post event into event bus.
- *
- * @param subscriber
- * Channel
- */
- public void addSubscriber(Channel subscriber) {
- if (!subscriber.isActive()) {
- logger.debug("Channel is not active between websocket server and subscriber {}"
- + subscriber.remoteAddress());
- }
- Event event = new Event(EventType.REGISTER);
- event.setSubscriber(subscriber);
- eventBus.post(event);
- }
-
- /**
- * Creates event of type {@link EventType#DEREGISTER}, sets {@link Channel}
- * subscriber to the event and posts event into event bus.
- *
- * @param subscriber
- */
- public void removeSubscriber(Channel subscriber) {
- logger.debug("Subscriber {} is removed.", subscriber.remoteAddress());
- Event event = new Event(EventType.DEREGISTER);
- event.setSubscriber(subscriber);
- eventBus.post(event);
- }
-
- /**
- * Checks if exists at least one {@link Channel} subscriber.
- *
- * @return True if exist at least one {@link Channel} subscriber, false
- * otherwise.
- */
- public boolean hasSubscribers() {
- return !subscribers.isEmpty();
- }
-
- /**
- * Consists of two types {@link Store#CONFIG} and {@link Store#OPERATION}.
- */
- private static enum Store {
- CONFIG("config"), OPERATION("operation");
-
- private final String value;
-
- private Store(String value) {
- this.value = value;
- }
- }
-
- /**
- * Consists of three types {@link Operation#CREATED},
- * {@link Operation#UPDATED} and {@link Operation#DELETED}.
- */
- private static enum Operation {
- CREATED("created"), UPDATED("updated"), DELETED("deleted");
-
- private final String value;
-
- private Operation(String value) {
- this.value = value;
- }
- }
+ private static final Logger logger = LoggerFactory
+ .getLogger(ListenerAdapter.class);
+ private final XmlMapper xmlMapper = new XmlMapper();
+ private final SimpleDateFormat rfc3339 = new SimpleDateFormat(
+ "yyyy-MM-dd'T'hh:mm:ssZ");
+
+ private final InstanceIdentifier path;
+ private ListenerRegistration<DataChangeListener> registration;
+ private final String streamName;
+ private Set<Channel> subscribers = new ConcurrentSet<>();
+ private final EventBus eventBus;
+ private final EventBusChangeRecorder eventBusChangeRecorder;
+
+ /**
+ * Creates new {@link ListenerAdapter} listener specified by path and stream
+ * name.
+ *
+ * @param path
+ * Path to data in data store.
+ * @param streamName
+ * The name of the stream.
+ */
+ ListenerAdapter(InstanceIdentifier path, String streamName) {
+ Preconditions.checkNotNull(path);
+ Preconditions
+ .checkArgument(streamName != null && !streamName.isEmpty());
+ this.path = path;
+ this.streamName = streamName;
+ eventBus = new AsyncEventBus(Executors.newSingleThreadExecutor());
+ eventBusChangeRecorder = new EventBusChangeRecorder();
+ eventBus.register(eventBusChangeRecorder);
+ }
+
+ @Override
+ public void onDataChanged(
+ DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
+ if (!change.getCreatedConfigurationData().isEmpty()
+ || !change.getCreatedOperationalData().isEmpty()
+ || !change.getUpdatedConfigurationData().isEmpty()
+ || !change.getUpdatedOperationalData().isEmpty()
+ || !change.getRemovedConfigurationData().isEmpty()
+ || !change.getRemovedOperationalData().isEmpty()) {
+ String xml = prepareXmlFrom(change);
+ Event event = new Event(EventType.NOTIFY);
+ event.setData(xml);
+ eventBus.post(event);
+ }
+ }
+
+ /**
+ * Tracks events of data change by customer.
+ */
+ private final class EventBusChangeRecorder {
+ @Subscribe
+ public void recordCustomerChange(Event event) {
+ if (event.getType() == EventType.REGISTER) {
+ Channel subscriber = event.getSubscriber();
+ if (!subscribers.contains(subscriber)) {
+ subscribers.add(subscriber);
+ }
+ } else if (event.getType() == EventType.DEREGISTER) {
+ subscribers.remove(event.getSubscriber());
+ Notificator
+ .removeListenerIfNoSubscriberExists(ListenerAdapter.this);
+ } else if (event.getType() == EventType.NOTIFY) {
+ for (Channel subscriber : subscribers) {
+ if (subscriber.isActive()) {
+ logger.debug("Data are sent to subscriber {}:",
+ subscriber.remoteAddress());
+ subscriber.writeAndFlush(new TextWebSocketFrame(event
+ .getData()));
+ } else {
+ logger.debug(
+ "Subscriber {} is removed - channel is not active yet.",
+ subscriber.remoteAddress());
+ subscribers.remove(subscriber);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Represents event of specific {@link EventType} type, holds data and
+ * {@link Channel} subscriber.
+ */
+ private final class Event {
+ private final EventType type;
+ private Channel subscriber;
+ private String data;
+
+ /**
+ * Creates new event specified by {@link EventType} type.
+ *
+ * @param type
+ * EventType
+ */
+ public Event(EventType type) {
+ this.type = type;
+ }
+
+ /**
+ * Gets the {@link Channel} subscriber.
+ *
+ * @return Channel
+ */
+ public Channel getSubscriber() {
+ return subscriber;
+ }
+
+ /**
+ * Sets subscriber for event.
+ *
+ * @param subscriber
+ * Channel
+ */
+ public void setSubscriber(Channel subscriber) {
+ this.subscriber = subscriber;
+ }
+
+ /**
+ * Gets event data.
+ *
+ * @return String representation of event data.
+ */
+ public String getData() {
+ return data;
+ }
+
+ /**
+ * Sets event data.
+ *
+ * @param String
+ * data.
+ */
+ public void setData(String data) {
+ this.data = data;
+ }
+
+ /**
+ * Gets event type.
+ *
+ * @return The type of the event.
+ */
+ public EventType getType() {
+ return type;
+ }
+ }
+
+ /**
+ * Type of the event.
+ */
+ private enum EventType {
+ REGISTER, DEREGISTER, NOTIFY;
+ }
+
+ /**
+ * Prepare data in printable form and transform it to String.
+ *
+ * @param change
+ * DataChangeEvent
+ * @return Data in printable form.
+ */
+ private String prepareXmlFrom(
+ DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
+ Document doc = createDocument();
+ Element notificationElement = doc.createElementNS(
+ "urn:ietf:params:xml:ns:netconf:notification:1.0",
+ "notification");
+ doc.appendChild(notificationElement);
+
+ Element eventTimeElement = doc.createElement("eventTime");
+ eventTimeElement.setTextContent(toRFC3339(new Date()));
+ notificationElement.appendChild(eventTimeElement);
+
+ Element dataChangedNotificationEventElement = doc.createElementNS(
+ "urn:opendaylight:params:xml:ns:yang:controller:md:sal:remote",
+ "data-changed-notification");
+ addValuesToDataChangedNotificationEventElement(doc,
+ dataChangedNotificationEventElement, change);
+ notificationElement.appendChild(dataChangedNotificationEventElement);
+
+ try {
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ TransformerFactory tf = TransformerFactory.newInstance();
+ Transformer transformer = tf.newTransformer();
+ transformer
+ .setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
+ transformer.setOutputProperty(OutputKeys.METHOD, "xml");
+ transformer.setOutputProperty(OutputKeys.INDENT, "yes");
+ transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
+ transformer.setOutputProperty(
+ "{http://xml.apache.org/xslt}indent-amount", "4");
+ transformer.transform(new DOMSource(doc), new StreamResult(
+ new OutputStreamWriter(out, "UTF-8")));
+ byte[] charData = out.toByteArray();
+ return new String(charData, "UTF-8");
+ } catch (TransformerException | UnsupportedEncodingException e) {
+ String msg = "Error during transformation of Document into String";
+ logger.error(msg, e);
+ return msg;
+ }
+ }
+
+ /**
+ * Formats data specified by RFC3339.
+ *
+ * @param d
+ * Date
+ * @return Data specified by RFC3339.
+ */
+ private String toRFC3339(Date d) {
+ return rfc3339.format(d).replaceAll("(\\d\\d)(\\d\\d)$", "$1:$2");
+ }
+
+ /**
+ * Creates {@link Document} document.
+ *
+ * @return {@link Document} document.
+ */
+ private Document createDocument() {
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ Document doc = null;
+ try {
+ DocumentBuilder bob = dbf.newDocumentBuilder();
+ doc = bob.newDocument();
+ } catch (ParserConfigurationException e) {
+ return null;
+ }
+ return doc;
+ }
+
+ /**
+ * Adds values to data changed notification event element.
+ *
+ * @param doc
+ * {@link Document}
+ * @param dataChangedNotificationEventElement
+ * {@link Element}
+ * @param change
+ * {@link DataChangeEvent}
+ */
+ private void addValuesToDataChangedNotificationEventElement(Document doc,
+ Element dataChangedNotificationEventElement,
+ DataChangeEvent<InstanceIdentifier, CompositeNode> change) {
+ addValuesFromDataToElement(doc, change.getCreatedConfigurationData(),
+ dataChangedNotificationEventElement, Store.CONFIG,
+ Operation.CREATED);
+ addValuesFromDataToElement(doc, change.getCreatedOperationalData(),
+ dataChangedNotificationEventElement, Store.OPERATION,
+ Operation.CREATED);
+ if (change.getCreatedConfigurationData().isEmpty()) {
+ addValuesFromDataToElement(doc,
+ change.getUpdatedConfigurationData(),
+ dataChangedNotificationEventElement, Store.CONFIG,
+ Operation.UPDATED);
+ }
+ if (change.getCreatedOperationalData().isEmpty()) {
+ addValuesFromDataToElement(doc, change.getUpdatedOperationalData(),
+ dataChangedNotificationEventElement, Store.OPERATION,
+ Operation.UPDATED);
+ }
+ addValuesFromDataToElement(doc, change.getRemovedConfigurationData(),
+ dataChangedNotificationEventElement, Store.CONFIG,
+ Operation.DELETED);
+ addValuesFromDataToElement(doc, change.getRemovedOperationalData(),
+ dataChangedNotificationEventElement, Store.OPERATION,
+ Operation.DELETED);
+ }
+
+ /**
+ * Adds values from data to element.
+ *
+ * @param doc
+ * {@link Document}
+ * @param data
+ * Set of {@link InstanceIdentifier}.
+ * @param element
+ * {@link Element}
+ * @param store
+ * {@link Store}
+ * @param operation
+ * {@link Operation}
+ */
+ private void addValuesFromDataToElement(Document doc,
+ Set<InstanceIdentifier> data, Element element, Store store,
+ Operation operation) {
+ if (data == null || data.isEmpty()) {
+ return;
+ }
+ for (InstanceIdentifier path : data) {
+ Node node = createDataChangeEventElement(doc, path, null, store,
+ operation);
+ element.appendChild(node);
+ }
+ }
+
+ /**
+ * Adds values from data to element.
+ *
+ * @param doc
+ * {@link Document}
+ * @param data
+ * Map of {@link InstanceIdentifier} and {@link CompositeNode}.
+ * @param element
+ * {@link Element}
+ * @param store
+ * {@link Store}
+ * @param operation
+ * {@link Operation}
+ */
+ private void addValuesFromDataToElement(Document doc,
+ Map<InstanceIdentifier, CompositeNode> data, Element element,
+ Store store, Operation operation) {
+ if (data == null || data.isEmpty()) {
+ return;
+ }
+ for (Entry<InstanceIdentifier, CompositeNode> entry : data.entrySet()) {
+ Node node = createDataChangeEventElement(doc, entry.getKey(),
+ entry.getValue(), store, operation);
+ element.appendChild(node);
+ }
+ }
+
+ /**
+ * Creates changed event element from data.
+ *
+ * @param doc
+ * {@link Document}
+ * @param path
+ * Path to data in data store.
+ * @param data
+ * {@link CompositeNode}
+ * @param store
+ * {@link Store}
+ * @param operation
+ * {@link Operation}
+ * @return {@link Node} node represented by changed event element.
+ */
+ private Node createDataChangeEventElement(Document doc,
+ InstanceIdentifier path, CompositeNode data, Store store,
+ Operation operation) {
+ Element dataChangeEventElement = doc.createElement("data-change-event");
+
+ Element pathElement = doc.createElement("path");
+ addPathAsValueToElement(path, pathElement);
+ dataChangeEventElement.appendChild(pathElement);
+
+ Element storeElement = doc.createElement("store");
+ storeElement.setTextContent(store.value);
+ dataChangeEventElement.appendChild(storeElement);
+
+ Element operationElement = doc.createElement("operation");
+ operationElement.setTextContent(operation.value);
+ dataChangeEventElement.appendChild(operationElement);
+
+ if (data != null) {
+ Element dataElement = doc.createElement("data");
+ Node dataAnyXml = translateToXml(path, data);
+ Node adoptedNode = doc.adoptNode(dataAnyXml);
+ dataElement.appendChild(adoptedNode);
+ dataChangeEventElement.appendChild(dataElement);
+ }
+
+ return dataChangeEventElement;
+ }
+
+ /**
+ * Translates {@link CompositeNode} data to XML format.
+ *
+ * @param path
+ * Path to data in data store.
+ * @param data
+ * {@link CompositeNode}
+ * @return Data in XML format.
+ */
+ private Node translateToXml(InstanceIdentifier path, CompositeNode data) {
+ DataNodeContainer schemaNode = ControllerContext.getInstance()
+ .getDataNodeContainerFor(path);
+ if (schemaNode == null) {
+ logger.info(
+ "Path '{}' contains node with unsupported type (supported type is Container or List) or some node was not found.",
+ path);
+ return null;
+ }
+ try {
+ Document xml = xmlMapper.write(data, schemaNode);
+ return xml.getFirstChild();
+ } catch (UnsupportedDataTypeException e) {
+ logger.error(
+ "Error occured during translation of notification to XML.",
+ e);
+ return null;
+ }
+ }
+
+ /**
+ * Adds path as value to element.
+ *
+ * @param path
+ * Path to data in data store.
+ * @param element
+ * {@link Element}
+ */
+ private void addPathAsValueToElement(InstanceIdentifier path,
+ Element element) {
+ // Map< key = namespace, value = prefix>
+ Map<String, String> prefixes = new HashMap<>();
+ InstanceIdentifier instanceIdentifier = path;
+ StringBuilder textContent = new StringBuilder();
+ for (PathArgument pathArgument : instanceIdentifier.getPath()) {
+ textContent.append("/");
+ writeIdentifierWithNamespacePrefix(element, textContent,
+ pathArgument.getNodeType(), prefixes);
+ if (pathArgument instanceof NodeIdentifierWithPredicates) {
+ Map<QName, Object> predicates = ((NodeIdentifierWithPredicates) pathArgument)
+ .getKeyValues();
+ for (QName keyValue : predicates.keySet()) {
+ String predicateValue = String.valueOf(predicates
+ .get(keyValue));
+ textContent.append("[");
+ writeIdentifierWithNamespacePrefix(element, textContent,
+ keyValue, prefixes);
+ textContent.append("='");
+ textContent.append(predicateValue);
+ textContent.append("'");
+ textContent.append("]");
+ }
+ } else if (pathArgument instanceof NodeWithValue) {
+ textContent.append("[.='");
+ textContent.append(((NodeWithValue) pathArgument).getValue());
+ textContent.append("'");
+ textContent.append("]");
+ }
+ }
+ element.setTextContent(textContent.toString());
+ }
+
+ /**
+ * Writes identifier that consists of prefix and QName.
+ *
+ * @param element
+ * {@link Element}
+ * @param textContent
+ * StringBuilder
+ * @param qName
+ * QName
+ * @param prefixes
+ * Map of namespaces and prefixes.
+ */
+ private static void writeIdentifierWithNamespacePrefix(Element element,
+ StringBuilder textContent, QName qName, Map<String, String> prefixes) {
+ String namespace = qName.getNamespace().toString();
+ String prefix = prefixes.get(namespace);
+ if (prefix == null) {
+ prefix = qName.getPrefix();
+ if (prefix == null || prefix.isEmpty()
+ || prefixes.containsValue(prefix)) {
+ prefix = generateNewPrefix(prefixes.values());
+ }
+ }
+
+ element.setAttribute("xmlns:" + prefix, namespace);
+ textContent.append(prefix);
+ prefixes.put(namespace, prefix);
+
+ textContent.append(":");
+ textContent.append(qName.getLocalName());
+ }
+
+ /**
+ * Generates new prefix which consists of four random characters <a-z>.
+ *
+ * @param prefixes
+ * Collection of prefixes.
+ * @return New prefix which consists of four random characters <a-z>.
+ */
+ private static String generateNewPrefix(Collection<String> prefixes) {
+ StringBuilder result = null;
+ Random random = new Random();
+ do {
+ result = new StringBuilder();
+ for (int i = 0; i < 4; i++) {
+ int randomNumber = 0x61 + (Math.abs(random.nextInt()) % 26);
+ result.append(Character.toChars(randomNumber));
+ }
+ } while (prefixes.contains(result.toString()));
+
+ return result.toString();
+ }
+
+ /**
+ * Gets path pointed to data in data store.
+ *
+ * @return Path pointed to data in data store.
+ */
+ public InstanceIdentifier getPath() {
+ return path;
+ }
+
+ /**
+ * Sets {@link ListenerRegistration} registration.
+ *
+ * @param registration
+ * ListenerRegistration<DataChangeListener>
+ */
+ public void setRegistration(
+ ListenerRegistration<DataChangeListener> registration) {
+ this.registration = registration;
+ }
+
+ /**
+ * Gets the name of the stream.
+ *
+ * @return The name of the stream.
+ */
+ public String getStreamName() {
+ return streamName;
+ }
+
+ /**
+ * Removes all subscribers and unregisters event bus change recorder form
+ * event bus.
+ */
+ public void close() throws Exception {
+ subscribers = new ConcurrentSet<>();
+ registration.close();
+ registration = null;
+ eventBus.unregister(eventBusChangeRecorder);
+ }
+
+ /**
+ * Checks if {@link ListenerRegistration} registration exist.
+ *
+ * @return True if exist, false otherwise.
+ */
+ public boolean isListening() {
+ return registration == null ? false : true;
+ }
+
+ /**
+ * Creates event of type {@link EventType#REGISTER}, set {@link Channel}
+ * subscriber to the event and post event into event bus.
+ *
+ * @param subscriber
+ * Channel
+ */
+ public void addSubscriber(Channel subscriber) {
+ if (!subscriber.isActive()) {
+ logger.debug("Channel is not active between websocket server and subscriber {}"
+ + subscriber.remoteAddress());
+ }
+ Event event = new Event(EventType.REGISTER);
+ event.setSubscriber(subscriber);
+ eventBus.post(event);
+ }
+
+ /**
+ * Creates event of type {@link EventType#DEREGISTER}, sets {@link Channel}
+ * subscriber to the event and posts event into event bus.
+ *
+ * @param subscriber
+ */
+ public void removeSubscriber(Channel subscriber) {
+ logger.debug("Subscriber {} is removed.", subscriber.remoteAddress());
+ Event event = new Event(EventType.DEREGISTER);
+ event.setSubscriber(subscriber);
+ eventBus.post(event);
+ }
+
+ /**
+ * Checks if exists at least one {@link Channel} subscriber.
+ *
+ * @return True if exist at least one {@link Channel} subscriber, false
+ * otherwise.
+ */
+ public boolean hasSubscribers() {
+ return !subscribers.isEmpty();
+ }
+
+ /**
+ * Consists of two types {@link Store#CONFIG} and {@link Store#OPERATION}.
+ */
+ private static enum Store {
+ CONFIG("config"), OPERATION("operation");
+
+ private final String value;
+
+ private Store(String value) {
+ this.value = value;
+ }
+ }
+
+ /**
+ * Consists of three types {@link Operation#CREATED},
+ * {@link Operation#UPDATED} and {@link Operation#DELETED}.
+ */
+ private static enum Operation {
+ CREATED("created"), UPDATED("updated"), DELETED("deleted");
+
+ private final String value;
+
+ private Operation(String value) {
+ this.value = value;
+ }
+ }
}
public class Notificator {
private static Map<String, ListenerAdapter> listenersByStreamName = new ConcurrentHashMap<>();
- private static Map<InstanceIdentifier, ListenerAdapter> listenersByInstanceIdentifier = new ConcurrentHashMap<>();
- private static final Lock lock = new ReentrantLock();
+ private static Map<InstanceIdentifier, ListenerAdapter> listenersByInstanceIdentifier = new ConcurrentHashMap<>();
+ private static final Lock lock = new ReentrantLock();
- private Notificator() {
- }
+ private Notificator() {
+ }
/**
* Returns list of all stream names
}
- /**
- * Gets {@link ListenerAdapter} specified by stream name.
- *
- * @param streamName
- * The name of the stream.
- * @return {@link ListenerAdapter} specified by stream name.
- */
- public static ListenerAdapter getListenerFor(String streamName) {
- return listenersByStreamName.get(streamName);
- }
-
- /**
- * Gets {@link ListenerAdapter} listener specified by
- * {@link InstanceIdentifier} path.
- *
- * @param path
- * Path to data in data repository.
- * @return ListenerAdapter
- */
- public static ListenerAdapter getListenerFor(InstanceIdentifier path) {
- return listenersByInstanceIdentifier.get(path);
- }
-
- /**
- * Checks if the listener specified by {@link InstanceIdentifier} path
- * exist.
- *
- * @param path
- * Path to data in data repository.
- * @return True if the listener exist, false otherwise.
- */
- public static boolean existListenerFor(InstanceIdentifier path) {
- return listenersByInstanceIdentifier.containsKey(path);
- }
-
- /**
- * Creates new {@link ListenerAdapter} listener from
- * {@link InstanceIdentifier} path and stream name.
- *
- * @param path
- * Path to data in data repository.
- * @param streamName
- * The name of the stream.
- * @return New {@link ListenerAdapter} listener from
- * {@link InstanceIdentifier} path and stream name.
- */
- public static ListenerAdapter createListener(InstanceIdentifier path,
- String streamName) {
- ListenerAdapter listener = new ListenerAdapter(path, streamName);
- try {
- lock.lock();
- listenersByInstanceIdentifier.put(path, listener);
- listenersByStreamName.put(streamName, listener);
- } finally {
- lock.unlock();
- }
- return listener;
- }
-
- /**
- * Looks for listener determined by {@link InstanceIdentifier} path and
- * removes it.
- *
- * @param path
- * InstanceIdentifier
- */
- public static void removeListener(InstanceIdentifier path) {
- ListenerAdapter listener = listenersByInstanceIdentifier.get(path);
- deleteListener(listener);
- }
-
- /**
- * Creates String representation of stream name from URI. Removes slash from
- * URI in start and end position.
- *
- * @param uri
- * URI for creation stream name.
- * @return String representation of stream name.
- */
- public static String createStreamNameFromUri(String uri) {
- if (uri == null) {
- return null;
- }
- String result = uri;
- if (result.startsWith("/")) {
- result = result.substring(1);
- }
- if (result.endsWith("/")) {
- result = result.substring(0, result.length());
- }
- return result;
- }
-
- /**
- * Removes all listeners.
- */
- public static void removeAllListeners() {
- for (ListenerAdapter listener : listenersByInstanceIdentifier.values()) {
- try {
- listener.close();
- } catch (Exception e) {
- }
- }
- try {
- lock.lock();
- listenersByStreamName = new ConcurrentHashMap<>();
- listenersByInstanceIdentifier = new ConcurrentHashMap<>();
- } finally {
- lock.unlock();
- }
- }
-
- /**
- * Checks if listener has at least one subscriber. In case it doesn't have any, delete
- * listener.
- *
- * @param listener
- * ListenerAdapter
- */
- public static void removeListenerIfNoSubscriberExists(
- ListenerAdapter listener) {
- if (!listener.hasSubscribers()) {
- deleteListener(listener);
- }
- }
-
- /**
- * Delete {@link ListenerAdapter} listener specified in parameter.
- *
- * @param listener
- * ListenerAdapter
- */
- private static void deleteListener(ListenerAdapter listener) {
- if (listener != null) {
- try {
- listener.close();
- } catch (Exception e) {
- }
- try {
- lock.lock();
- listenersByInstanceIdentifier.remove(listener.getPath());
- listenersByStreamName.remove(listener.getStreamName());
- } finally {
- lock.unlock();
- }
- }
- }
+ /**
+ * Gets {@link ListenerAdapter} specified by stream name.
+ *
+ * @param streamName
+ * The name of the stream.
+ * @return {@link ListenerAdapter} specified by stream name.
+ */
+ public static ListenerAdapter getListenerFor(String streamName) {
+ return listenersByStreamName.get(streamName);
+ }
+
+ /**
+ * Gets {@link ListenerAdapter} listener specified by
+ * {@link InstanceIdentifier} path.
+ *
+ * @param path
+ * Path to data in data repository.
+ * @return ListenerAdapter
+ */
+ public static ListenerAdapter getListenerFor(InstanceIdentifier path) {
+ return listenersByInstanceIdentifier.get(path);
+ }
+
+ /**
+ * Checks if the listener specified by {@link InstanceIdentifier} path
+ * exist.
+ *
+ * @param path
+ * Path to data in data repository.
+ * @return True if the listener exist, false otherwise.
+ */
+ public static boolean existListenerFor(InstanceIdentifier path) {
+ return listenersByInstanceIdentifier.containsKey(path);
+ }
+
+ /**
+ * Creates new {@link ListenerAdapter} listener from
+ * {@link InstanceIdentifier} path and stream name.
+ *
+ * @param path
+ * Path to data in data repository.
+ * @param streamName
+ * The name of the stream.
+ * @return New {@link ListenerAdapter} listener from
+ * {@link InstanceIdentifier} path and stream name.
+ */
+ public static ListenerAdapter createListener(InstanceIdentifier path,
+ String streamName) {
+ ListenerAdapter listener = new ListenerAdapter(path, streamName);
+ try {
+ lock.lock();
+ listenersByInstanceIdentifier.put(path, listener);
+ listenersByStreamName.put(streamName, listener);
+ } finally {
+ lock.unlock();
+ }
+ return listener;
+ }
+
+ /**
+ * Looks for listener determined by {@link InstanceIdentifier} path and
+ * removes it.
+ *
+ * @param path
+ * InstanceIdentifier
+ */
+ public static void removeListener(InstanceIdentifier path) {
+ ListenerAdapter listener = listenersByInstanceIdentifier.get(path);
+ deleteListener(listener);
+ }
+
+ /**
+ * Creates String representation of stream name from URI. Removes slash from
+ * URI in start and end position.
+ *
+ * @param uri
+ * URI for creation stream name.
+ * @return String representation of stream name.
+ */
+ public static String createStreamNameFromUri(String uri) {
+ if (uri == null) {
+ return null;
+ }
+ String result = uri;
+ if (result.startsWith("/")) {
+ result = result.substring(1);
+ }
+ if (result.endsWith("/")) {
+ result = result.substring(0, result.length());
+ }
+ return result;
+ }
+
+ /**
+ * Removes all listeners.
+ */
+ public static void removeAllListeners() {
+ for (ListenerAdapter listener : listenersByInstanceIdentifier.values()) {
+ try {
+ listener.close();
+ } catch (Exception e) {
+ }
+ }
+ try {
+ lock.lock();
+ listenersByStreamName = new ConcurrentHashMap<>();
+ listenersByInstanceIdentifier = new ConcurrentHashMap<>();
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Checks if listener has at least one subscriber. In case it doesn't have any, delete
+ * listener.
+ *
+ * @param listener
+ * ListenerAdapter
+ */
+ public static void removeListenerIfNoSubscriberExists(
+ ListenerAdapter listener) {
+ if (!listener.hasSubscribers()) {
+ deleteListener(listener);
+ }
+ }
+
+ /**
+ * Delete {@link ListenerAdapter} listener specified in parameter.
+ *
+ * @param listener
+ * ListenerAdapter
+ */
+ private static void deleteListener(ListenerAdapter listener) {
+ if (listener != null) {
+ try {
+ listener.close();
+ } catch (Exception e) {
+ }
+ try {
+ lock.lock();
+ listenersByInstanceIdentifier.remove(listener.getPath());
+ listenersByStreamName.remove(listener.getStreamName());
+ } finally {
+ lock.unlock();
+ }
+ }
+ }
}
*/
public class WebSocketServer implements Runnable {
- private static final Logger logger = LoggerFactory
- .getLogger(WebSocketServer.class);
-
- public static final int PORT = 8181;
- private EventLoopGroup bossGroup;
- private EventLoopGroup workerGroup;
-
- @Override
- public void run() {
- bossGroup = new NioEventLoopGroup();
- workerGroup = new NioEventLoopGroup();
- try {
- ServerBootstrap b = new ServerBootstrap();
- b.group(bossGroup, workerGroup)
- .channel(NioServerSocketChannel.class)
- .childHandler(new WebSocketServerInitializer());
-
- Channel ch = b.bind(PORT).sync().channel();
- logger.info("Web socket server started at port {}.", PORT);
-
- ch.closeFuture().sync();
- } catch (InterruptedException e) {
- // NOOP
- } finally {
- stop();
- }
- }
-
- /**
- * Stops the web socket server and removes all listeners.
- */
- private void stop() {
- Notificator.removeAllListeners();
- if (bossGroup != null) {
- bossGroup.shutdownGracefully();
- }
- if (workerGroup != null) {
- workerGroup.shutdownGracefully();
- }
- }
+ private static final Logger logger = LoggerFactory
+ .getLogger(WebSocketServer.class);
+
+ public static final int PORT = 8181;
+ private EventLoopGroup bossGroup;
+ private EventLoopGroup workerGroup;
+
+ @Override
+ public void run() {
+ bossGroup = new NioEventLoopGroup();
+ workerGroup = new NioEventLoopGroup();
+ try {
+ ServerBootstrap b = new ServerBootstrap();
+ b.group(bossGroup, workerGroup)
+ .channel(NioServerSocketChannel.class)
+ .childHandler(new WebSocketServerInitializer());
+
+ Channel ch = b.bind(PORT).sync().channel();
+ logger.info("Web socket server started at port {}.", PORT);
+
+ ch.closeFuture().sync();
+ } catch (InterruptedException e) {
+ // NOOP
+ } finally {
+ stop();
+ }
+ }
+
+ /**
+ * Stops the web socket server and removes all listeners.
+ */
+ private void stop() {
+ Notificator.removeAllListeners();
+ if (bossGroup != null) {
+ bossGroup.shutdownGracefully();
+ }
+ if (workerGroup != null) {
+ workerGroup.shutdownGracefully();
+ }
+ }
}
*/
public class WebSocketServerHandler extends SimpleChannelInboundHandler<Object> {
- private static final Logger logger = LoggerFactory
- .getLogger(WebSocketServerHandler.class);
-
- private WebSocketServerHandshaker handshaker;
-
- @Override
- protected void channelRead0(ChannelHandlerContext ctx, Object msg)
- throws Exception {
- if (msg instanceof FullHttpRequest) {
- handleHttpRequest(ctx, (FullHttpRequest) msg);
- } else if (msg instanceof WebSocketFrame) {
- handleWebSocketFrame(ctx, (WebSocketFrame) msg);
- }
- }
-
- /**
- * Checks if HTTP request method is GET and if is possible to decode HTTP
- * result of request.
- *
- * @param ctx
- * ChannelHandlerContext
- * @param req
- * FullHttpRequest
- */
- private void handleHttpRequest(ChannelHandlerContext ctx,
- FullHttpRequest req) throws Exception {
- // Handle a bad request.
- if (!req.getDecoderResult().isSuccess()) {
- sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
- BAD_REQUEST));
- return;
- }
-
- // Allow only GET methods.
- if (req.getMethod() != GET) {
- sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
- FORBIDDEN));
- return;
- }
-
- String streamName = Notificator.createStreamNameFromUri(req.getUri());
- ListenerAdapter listener = Notificator.getListenerFor(streamName);
- if (listener != null) {
- listener.addSubscriber(ctx.channel());
- logger.debug("Subscriber successfully registered.");
- } else {
- logger.error("Listener for stream with name '{}' was not found.",
- streamName);
- sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
- INTERNAL_SERVER_ERROR));
- }
-
- // Handshake
- WebSocketServerHandshakerFactory wsFactory = new WebSocketServerHandshakerFactory(
- getWebSocketLocation(req), null, false);
- handshaker = wsFactory.newHandshaker(req);
- if (handshaker == null) {
- WebSocketServerHandshakerFactory
- .sendUnsupportedWebSocketVersionResponse(ctx.channel());
- } else {
- handshaker.handshake(ctx.channel(), req);
- }
-
- }
-
- /**
- * Checks response status, send response and close connection if necessary
- *
- * @param ctx
- * ChannelHandlerContext
- * @param req
- * HttpRequest
- * @param res
- * FullHttpResponse
- */
- private static void sendHttpResponse(ChannelHandlerContext ctx,
- HttpRequest req, FullHttpResponse res) {
- // Generate an error page if response getStatus code is not OK (200).
- if (res.getStatus().code() != 200) {
- ByteBuf buf = Unpooled.copiedBuffer(res.getStatus().toString(),
- CharsetUtil.UTF_8);
- res.content().writeBytes(buf);
- buf.release();
- setContentLength(res, res.content().readableBytes());
- }
-
- // Send the response and close the connection if necessary.
- ChannelFuture f = ctx.channel().writeAndFlush(res);
- if (!isKeepAlive(req) || res.getStatus().code() != 200) {
- f.addListener(ChannelFutureListener.CLOSE);
- }
- }
-
- /**
- * Handles web socket frame.
- *
- * @param ctx
- * {@link ChannelHandlerContext}
- * @param frame
- * {@link WebSocketFrame}
- */
- private void handleWebSocketFrame(ChannelHandlerContext ctx,
- WebSocketFrame frame) throws IOException {
- if (frame instanceof CloseWebSocketFrame) {
- handshaker.close(ctx.channel(),
- (CloseWebSocketFrame) frame.retain());
- String streamName = Notificator
- .createStreamNameFromUri(((CloseWebSocketFrame) frame)
- .reasonText());
- ListenerAdapter listener = Notificator.getListenerFor(streamName);
- if (listener != null) {
- listener.removeSubscriber(ctx.channel());
- logger.debug("Subscriber successfully registered.");
- }
- Notificator.removeListenerIfNoSubscriberExists(listener);
- return;
- } else if (frame instanceof PingWebSocketFrame) {
- ctx.channel().write(
- new PongWebSocketFrame(frame.content().retain()));
- return;
- }
- }
-
- @Override
- public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
- throws Exception {
- if (cause instanceof java.nio.channels.ClosedChannelException == false) {
- // cause.printStackTrace();
- }
- ctx.close();
- }
-
- /**
- * Get web socket location from HTTP request.
- *
- * @param req
- * HTTP request from which the location will be returned
- * @return String representation of web socket location.
- */
- private static String getWebSocketLocation(HttpRequest req) {
- return "http://" + req.headers().get(HOST) + req.getUri();
- }
+ private static final Logger logger = LoggerFactory
+ .getLogger(WebSocketServerHandler.class);
+
+ private WebSocketServerHandshaker handshaker;
+
+ @Override
+ protected void channelRead0(ChannelHandlerContext ctx, Object msg)
+ throws Exception {
+ if (msg instanceof FullHttpRequest) {
+ handleHttpRequest(ctx, (FullHttpRequest) msg);
+ } else if (msg instanceof WebSocketFrame) {
+ handleWebSocketFrame(ctx, (WebSocketFrame) msg);
+ }
+ }
+
+ /**
+ * Checks if HTTP request method is GET and if is possible to decode HTTP
+ * result of request.
+ *
+ * @param ctx
+ * ChannelHandlerContext
+ * @param req
+ * FullHttpRequest
+ */
+ private void handleHttpRequest(ChannelHandlerContext ctx,
+ FullHttpRequest req) throws Exception {
+ // Handle a bad request.
+ if (!req.getDecoderResult().isSuccess()) {
+ sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
+ BAD_REQUEST));
+ return;
+ }
+
+ // Allow only GET methods.
+ if (req.getMethod() != GET) {
+ sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
+ FORBIDDEN));
+ return;
+ }
+
+ String streamName = Notificator.createStreamNameFromUri(req.getUri());
+ ListenerAdapter listener = Notificator.getListenerFor(streamName);
+ if (listener != null) {
+ listener.addSubscriber(ctx.channel());
+ logger.debug("Subscriber successfully registered.");
+ } else {
+ logger.error("Listener for stream with name '{}' was not found.",
+ streamName);
+ sendHttpResponse(ctx, req, new DefaultFullHttpResponse(HTTP_1_1,
+ INTERNAL_SERVER_ERROR));
+ }
+
+ // Handshake
+ WebSocketServerHandshakerFactory wsFactory = new WebSocketServerHandshakerFactory(
+ getWebSocketLocation(req), null, false);
+ handshaker = wsFactory.newHandshaker(req);
+ if (handshaker == null) {
+ WebSocketServerHandshakerFactory
+ .sendUnsupportedWebSocketVersionResponse(ctx.channel());
+ } else {
+ handshaker.handshake(ctx.channel(), req);
+ }
+
+ }
+
+ /**
+ * Checks response status, send response and close connection if necessary
+ *
+ * @param ctx
+ * ChannelHandlerContext
+ * @param req
+ * HttpRequest
+ * @param res
+ * FullHttpResponse
+ */
+ private static void sendHttpResponse(ChannelHandlerContext ctx,
+ HttpRequest req, FullHttpResponse res) {
+ // Generate an error page if response getStatus code is not OK (200).
+ if (res.getStatus().code() != 200) {
+ ByteBuf buf = Unpooled.copiedBuffer(res.getStatus().toString(),
+ CharsetUtil.UTF_8);
+ res.content().writeBytes(buf);
+ buf.release();
+ setContentLength(res, res.content().readableBytes());
+ }
+
+ // Send the response and close the connection if necessary.
+ ChannelFuture f = ctx.channel().writeAndFlush(res);
+ if (!isKeepAlive(req) || res.getStatus().code() != 200) {
+ f.addListener(ChannelFutureListener.CLOSE);
+ }
+ }
+
+ /**
+ * Handles web socket frame.
+ *
+ * @param ctx
+ * {@link ChannelHandlerContext}
+ * @param frame
+ * {@link WebSocketFrame}
+ */
+ private void handleWebSocketFrame(ChannelHandlerContext ctx,
+ WebSocketFrame frame) throws IOException {
+ if (frame instanceof CloseWebSocketFrame) {
+ handshaker.close(ctx.channel(),
+ (CloseWebSocketFrame) frame.retain());
+ String streamName = Notificator
+ .createStreamNameFromUri(((CloseWebSocketFrame) frame)
+ .reasonText());
+ ListenerAdapter listener = Notificator.getListenerFor(streamName);
+ if (listener != null) {
+ listener.removeSubscriber(ctx.channel());
+ logger.debug("Subscriber successfully registered.");
+ }
+ Notificator.removeListenerIfNoSubscriberExists(listener);
+ return;
+ } else if (frame instanceof PingWebSocketFrame) {
+ ctx.channel().write(
+ new PongWebSocketFrame(frame.content().retain()));
+ return;
+ }
+ }
+
+ @Override
+ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
+ throws Exception {
+ if (cause instanceof java.nio.channels.ClosedChannelException == false) {
+ // cause.printStackTrace();
+ }
+ ctx.close();
+ }
+
+ /**
+ * Get web socket location from HTTP request.
+ *
+ * @param req
+ * HTTP request from which the location will be returned
+ * @return String representation of web socket location.
+ */
+ private static String getWebSocketLocation(HttpRequest req) {
+ return "http://" + req.headers().get(HOST) + req.getUri();
+ }
}
package org.opendaylight.controller.sal.streams.websockets;
-import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
/**
* {@link WebSocketServerInitializer} is used to setup the
- * {@link ChannelPipeline} of a {@link Channel}.
+ * {@link ChannelPipeline} of a {@link io.netty.channel.Channel}.
*/
-public class WebSocketServerInitializer extends
- ChannelInitializer<SocketChannel> {
+public class WebSocketServerInitializer extends ChannelInitializer<SocketChannel> {
- @Override
- protected void initChannel(SocketChannel ch) throws Exception {
- ChannelPipeline pipeline = ch.pipeline();
- pipeline.addLast("codec-http", new HttpServerCodec());
- pipeline.addLast("aggregator", new HttpObjectAggregator(65536));
- pipeline.addLast("handler", new WebSocketServerHandler());
- }
+ @Override
+ protected void initChannel(final SocketChannel ch) throws Exception {
+ ChannelPipeline pipeline = ch.pipeline();
+ pipeline.addLast("codec-http", new HttpServerCodec());
+ pipeline.addLast("aggregator", new HttpObjectAggregator(65536));
+ pipeline.addLast("handler", new WebSocketServerHandler());
+ }
}
<url-pattern>/*</url-pattern>
</servlet-mapping>
- <filter>
- <filter-name>CorsFilter</filter-name>
- <filter-class>org.apache.catalina.filters.CorsFilter</filter-class>
- <init-param>
- <param-name>cors.allowed.origins</param-name>
- <param-value>*</param-value>
- </init-param>
- <init-param>
- <param-name>cors.allowed.methods</param-name>
- <param-value>GET,POST,HEAD,OPTIONS,PUT,DELETE</param-value>
- </init-param>
- <init-param>
- <param-name>cors.allowed.headers</param-name>
- <param-value>Content-Type,X-Requested-With,accept,authorization,
- origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers</param-value>
- </init-param>
- <init-param>
- <param-name>cors.exposed.headers</param-name>
- <param-value>Access-Control-Allow-Origin,Access-Control-Allow-Credentials</param-value>
- </init-param>
- <init-param>
- <param-name>cors.support.credentials</param-name>
- <param-value>true</param-value>
- </init-param>
- <init-param>
- <param-name>cors.preflight.maxage</param-name>
- <param-value>10</param-value>
- </init-param>
- </filter>
- <filter-mapping>
- <filter-name>CorsFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
<security-constraint>
<web-resource-collection>
<web-resource-name>NB api</web-resource-name>
* Test when some data are in one case node and other in another. This isn't
* correct. Next Json validator should return error because nodes has to be
* from one case below concrete choice.
- *
+ *
*/
@Test
public void nodeSchemasOnVariousChoiceCasePathTest() {
* Additionally data are loadef from various choices. This isn't correct.
* Next Json validator should return error because nodes has to be from one
* case below concrete choice.
- *
+ *
*/
@Test
public void nodeSchemasOnVariousChoiceCasePathAndMultipleChoicesTest() {
}
/**
- *
+ *
*/
@Test
public void nodeSchemasInMultipleChoicesTest() {
assertEquals( "Json token type for key " + keyName, expToken, peek );
}
- verifier.verify( jReader, keyName );;
+ verifier.verify( jReader, keyName );
}
if( !expectedMap.isEmpty() ) {
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
- *
+ *
* All tests are commented now because leafref isn't supported now
- *
+ *
*/
public class CnSnToJsonLeafrefType extends YangAndXmlAndDataSchemaLoader {
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
- *
+ *
* CnSn = Composite node and Simple node data structure Class contains test of
* serializing simple nodes data values according data types from YANG schema to
* XML file
- *
+ *
*/
public class CnSnInstanceIdentifierToXmlTest extends YangAndXmlAndDataSchemaLoader {
-
+
@BeforeClass
public static void initialization() throws URISyntaxException {
dataLoad("/instanceidentifier/yang", 4, "instance-identifier-module", "cont");
// String output =
// String.format("<data>" +
-// "\n<cont_m1>" +
-// "\n\t<lf1_m1>" +
-// "\n\t\tlf1 m1 value" +
-// "\n\t</lf1_m1>" +
+// "\n<cont_m1>" +
+// "\n\t<lf1_m1>" +
+// "\n\t\tlf1 m1 value" +
+// "\n\t</lf1_m1>" +
// "\n</cont_m1>" +
// "\n<cont_m2>" +
// "\n\t<lf1_m2>" +
regex.append(".*<data.*");
regex.append(".*xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\"");
regex.append(".*>");
-
-
+
+
regex.append(".*<contB_m1.*\\/>");
regex.append(".*xmlns=\"module:one\"");
regex.append(".*>");
SimpleNodeWrapper lf1_m1 = new SimpleNodeWrapper(uriModule1, "lf1_m1", "lf1 m1 value");
cont_m1.addValue(lf1_m1);
CompositeNodeWrapper contB_m1 = new CompositeNodeWrapper(uriModule1, "contB_m1");
-
+
data.addValue(contB_m1);
data.addValue(cont_m1);
*/
package org.opendaylight.controller.sal.restconf.impl.test;
-import java.util.concurrent.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.yangtools.yang.common.RpcResult;
public class DummyFuture implements Future<RpcResult<TransactionStatus>> {
-
+
private final boolean cancel;
private final boolean isCancelled;
private final boolean isDone;
private final RpcResult<TransactionStatus> result;
-
+
public DummyFuture() {
cancel = false;
isCancelled = false;
isDone = false;
result = null;
}
-
- private DummyFuture(Builder builder) {
+
+ private DummyFuture(final Builder builder) {
cancel = builder.cancel;
isCancelled = builder.isCancelled;
isDone = builder.isDone;
result = builder.result;
}
-
+
public static Builder builder() {
return new DummyFuture.Builder();
}
@Override
- public boolean cancel(boolean mayInterruptIfRunning) {
+ public boolean cancel(final boolean mayInterruptIfRunning) {
return cancel;
}
}
@Override
- public RpcResult<TransactionStatus> get(long timeout, TimeUnit unit) throws InterruptedException,
- ExecutionException, TimeoutException {
+ public RpcResult<TransactionStatus> get(final long timeout, final TimeUnit unit) throws InterruptedException,
+ ExecutionException, TimeoutException {
return result;
}
-
+
public static class Builder {
-
+
private boolean cancel;
private boolean isCancelled;
private boolean isDone;
private RpcResult<TransactionStatus> result;
- public Builder cancel(boolean cancel) {
+ public Builder cancel(final boolean cancel) {
this.cancel = cancel;
return this;
}
-
- public Builder isCancelled(boolean isCancelled) {
+
+ public Builder isCancelled(final boolean isCancelled) {
this.isCancelled = isCancelled;
return this;
}
-
- public Builder isDone(boolean isDone) {
+
+ public Builder isDone(final boolean isDone) {
this.isDone = isDone;
return this;
}
-
- public Builder rpcResult(RpcResult<TransactionStatus> result) {
+
+ public Builder rpcResult(final RpcResult<TransactionStatus> result) {
this.result = result;
return this;
}
-
+
public Future<RpcResult<TransactionStatus>> build() {
return new DummyFuture(this);
}
import java.util.Collection;
-import org.opendaylight.yangtools.yang.common.*;
+import org.opendaylight.yangtools.yang.common.RpcError;
+import org.opendaylight.yangtools.yang.common.RpcResult;
public class DummyRpcResult<T> implements RpcResult<T> {
errors = null;
}
- private DummyRpcResult(Builder<T> builder) {
+ private DummyRpcResult(final Builder<T> builder) {
isSuccessful = builder.isSuccessful;
result = builder.result;
errors = builder.errors;
private T result;
private Collection<RpcError> errors;
- public Builder<T> isSuccessful(boolean isSuccessful) {
+ public Builder<T> isSuccessful(final boolean isSuccessful) {
this.isSuccessful = isSuccessful;
return this;
}
- public Builder<T> result(T result) {
+ public Builder<T> result(final T result) {
this.result = result;
return this;
}
- public Builder<T> errors(Collection<RpcError> errors) {
+ public Builder<T> errors(final Collection<RpcError> errors) {
this.errors = errors;
return this;
}
import java.util.List;
import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.SchemaPath;
+import org.opendaylight.yangtools.yang.model.api.Status;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.UnknownSchemaNode;
public class DummyType implements TypeDefinition<DummyType> {
QName dummyQName = TestUtils.buildQName("dummy type", "simple:uri", "2012-12-17");
*/
package org.opendaylight.controller.sal.restconf.impl.test;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
import java.net.URI;
import java.net.URISyntaxException;
TestUtils.normalizeCompositeNode(prepareCnSn("normalize-node-module"), modules, schemaNodePath);
}
- private CompositeNode prepareCnSn(String namespace) {
+ private CompositeNode prepareCnSn(final String namespace) {
URI uri = null;
if (namespace != null) {
try {
when(brokerFacade.commitConfigurationDataDelete(any(InstanceIdentifier.class))).thenReturn(dummyFuture);
Response response = target(uri).request(MediaType.APPLICATION_XML).delete();
assertEquals(200, response.getStatus());
-
+
dummyFuture = createFuture(TransactionStatus.FAILED);
when(brokerFacade.commitConfigurationDataDelete(any(InstanceIdentifier.class))).thenReturn(dummyFuture);
response = target(uri).request(MediaType.APPLICATION_XML).delete();
assertEquals(500, response.getStatus());
}
-
+
private Future<RpcResult<TransactionStatus>> createFuture(TransactionStatus statusName) {
RpcResult<TransactionStatus> rpcResult = new DummyRpcResult.Builder<TransactionStatus>().result(statusName).build();
return DummyFuture.builder().rpcResult(rpcResult).build();
package org.opendaylight.controller.sal.restconf.impl.test;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.Map.Entry;
+import java.util.Set;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import org.opendaylight.controller.sal.restconf.impl.ControllerContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
import org.opendaylight.controller.sal.restconf.impl.RestconfError;
-import org.opendaylight.controller.sal.restconf.impl.StructuredData;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
+import org.opendaylight.controller.sal.restconf.impl.StructuredData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
Map<String, String> expErrorInfo;
- public ComplexErrorInfoVerifier( Map<String, String> expErrorInfo ) {
+ public ComplexErrorInfoVerifier( final Map<String, String> expErrorInfo ) {
this.expErrorInfo = expErrorInfo;
}
@Override
- public void verifyXML( Node errorInfoNode ) {
+ public void verifyXML( final Node errorInfoNode ) {
Map<String, String> mutableExpMap = Maps.newHashMap( expErrorInfo );
NodeList childNodes = errorInfoNode.getChildNodes();
if( child instanceof Element ) {
String expValue = mutableExpMap.remove( child.getNodeName() );
assertNotNull( "Found unexpected \"error-info\" child node: " +
- child.getNodeName(), expValue );
+ child.getNodeName(), expValue );
assertEquals( "Text content for \"error-info\" child node " +
- child.getNodeName(), expValue, child.getTextContent() );
+ child.getNodeName(), expValue, child.getTextContent() );
}
}
}
@Override
- public void verifyJson( JsonElement errorInfoElement ) {
+ public void verifyJson( final JsonElement errorInfoElement ) {
assertTrue( "\"error-info\" Json element is not an Object",
errorInfoElement.isJsonObject() );
for( Entry<String,String> actual: actualErrorInfo.entrySet() ) {
String expValue = mutableExpMap.remove( actual.getKey() );
assertNotNull( "Found unexpected \"error-info\" child node: " +
- actual.getKey(), expValue );
+ actual.getKey(), expValue );
assertEquals( "Text content for \"error-info\" child node " +
- actual.getKey(), expValue, actual.getValue() );
+ actual.getKey(), expValue, actual.getValue() );
}
if( !mutableExpMap.isEmpty() ) {
String expTextContent;
- public SimpleErrorInfoVerifier( String expErrorInfo ) {
+ public SimpleErrorInfoVerifier( final String expErrorInfo ) {
this.expTextContent = expErrorInfo;
}
- void verifyContent( String actualContent ) {
+ void verifyContent( final String actualContent ) {
assertNotNull( "Actual \"error-info\" text content is null", actualContent );
assertTrue( "", actualContent.contains( expTextContent ) );
}
@Override
- public void verifyXML( Node errorInfoNode ) {
+ public void verifyXML( final Node errorInfoNode ) {
verifyContent( errorInfoNode.getTextContent() );
}
@Override
- public void verifyJson( JsonElement errorInfoElement ) {
+ public void verifyJson( final JsonElement errorInfoElement ) {
verifyContent( errorInfoElement.getAsString() );
}
}
NamespaceContext nsContext = new NamespaceContext() {
@Override
- public Iterator getPrefixes( String namespaceURI ) {
+ public Iterator<?> getPrefixes( final String namespaceURI ) {
return null;
}
@Override
- public String getPrefix( String namespaceURI ) {
+ public String getPrefix( final String namespaceURI ) {
return null;
}
@Override
- public String getNamespaceURI( String prefix ) {
+ public String getNamespaceURI( final String prefix ) {
return "ietf-restconf".equals( prefix ) ? Draft02.RestConfModule.NAMESPACE : null;
}
};
protected Application configure() {
ResourceConfig resourceConfig = new ResourceConfig();
resourceConfig = resourceConfig.registerInstances( mockRestConf, StructuredDataToXmlProvider.INSTANCE,
- StructuredDataToJsonProvider.INSTANCE );
+ StructuredDataToJsonProvider.INSTANCE );
resourceConfig.registerClasses( RestconfDocumentedExceptionMapper.class );
return resourceConfig;
}
- void stageMockEx( RestconfDocumentedException ex ) {
+ void stageMockEx( final RestconfDocumentedException ex ) {
reset( mockRestConf );
when( mockRestConf.readOperationalData( any( String.class ) ) ).thenThrow( ex );
}
- void testJsonResponse( RestconfDocumentedException ex, Status expStatus, ErrorType expErrorType,
- ErrorTag expErrorTag, String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void testJsonResponse( final RestconfDocumentedException ex, final Status expStatus, final ErrorType expErrorType,
+ final ErrorTag expErrorTag, final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
stageMockEx( ex );
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_JSON, expStatus );
verifyJsonResponseBody( stream, expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
@Test
public void testToJsonResponseWithMessageOnly() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error" ), Status.INTERNAL_SERVER_ERROR,
- ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
// To test verification code
-// String json =
-// "{ errors: {" +
-// " error: [{" +
-// " error-tag : \"operation-failed\"" +
-// " ,error-type : \"application\"" +
-// " ,error-message : \"An error occurred\"" +
-// " ,error-info : {" +
-// " session-id: \"123\"" +
-// " ,address: \"1.2.3.4\"" +
-// " }" +
-// " }]" +
-// " }" +
-// "}";
-//
-// verifyJsonResponseBody( new java.io.StringBufferInputStream(json ), ErrorType.APPLICATION,
-// ErrorTag.OPERATION_FAILED, "An error occurred", null,
-// com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
+ // String json =
+ // "{ errors: {" +
+ // " error: [{" +
+ // " error-tag : \"operation-failed\"" +
+ // " ,error-type : \"application\"" +
+ // " ,error-message : \"An error occurred\"" +
+ // " ,error-info : {" +
+ // " session-id: \"123\"" +
+ // " ,address: \"1.2.3.4\"" +
+ // " }" +
+ // " }]" +
+ // " }" +
+ // "}";
+ //
+ // verifyJsonResponseBody( new java.io.StringBufferInputStream(json ), ErrorType.APPLICATION,
+ // ErrorTag.OPERATION_FAILED, "An error occurred", null,
+ // com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
}
@Test
public void testToJsonResponseWithInUseErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.IN_USE ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.IN_USE, "mock error", null, null );
+ ErrorTag.IN_USE ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.IN_USE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithInvalidValueErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.RPC,
- ErrorTag.INVALID_VALUE ),
- Status.BAD_REQUEST, ErrorType.RPC,
- ErrorTag.INVALID_VALUE, "mock error", null, null );
+ ErrorTag.INVALID_VALUE ),
+ Status.BAD_REQUEST, ErrorType.RPC,
+ ErrorTag.INVALID_VALUE, "mock error", null, null );
}
public void testToJsonResponseWithTooBigErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG ),
- Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG, "mock error", null, null );
+ ErrorTag.TOO_BIG ),
+ Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
+ ErrorTag.TOO_BIG, "mock error", null, null );
}
public void testToJsonResponseWithMissingAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.MISSING_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithBadAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.BAD_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownAttributeErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.UNKNOWN_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithBadElementErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT ),
- Status.BAD_REQUEST,
- ErrorType.PROTOCOL, ErrorTag.BAD_ELEMENT, "mock error", null, null );
+ ErrorTag.BAD_ELEMENT ),
+ Status.BAD_REQUEST,
+ ErrorType.PROTOCOL, ErrorTag.BAD_ELEMENT, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownElementErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
+ ErrorTag.UNKNOWN_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
}
@Test
public void testToJsonResponseWithUnknownNamespaceErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
+ ErrorTag.UNKNOWN_NAMESPACE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithMalformedMessageErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
+ ErrorTag.MALFORMED_MESSAGE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
}
@Test
public void testToJsonResponseWithAccessDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED ),
- Status.FORBIDDEN, ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED, "mock error", null, null );
+ ErrorTag.ACCESS_DENIED ),
+ Status.FORBIDDEN, ErrorType.PROTOCOL,
+ ErrorTag.ACCESS_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithLockDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED, "mock error", null, null );
+ ErrorTag.LOCK_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.LOCK_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithResourceDeniedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED, "mock error", null, null );
+ ErrorTag.RESOURCE_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.RESOURCE_DENIED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithRollbackFailedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
+ ErrorTag.ROLLBACK_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithDataExistsErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS, "mock error", null, null );
+ ErrorTag.DATA_EXISTS ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_EXISTS, "mock error", null, null );
}
@Test
public void testToJsonResponseWithDataMissingErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING, "mock error", null, null );
+ ErrorTag.DATA_MISSING ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_MISSING, "mock error", null, null );
}
@Test
public void testToJsonResponseWithOperationNotSupportedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED ),
- Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
+ ErrorTag.OPERATION_NOT_SUPPORTED ),
+ Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithOperationFailedErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorTag.OPERATION_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_FAILED, "mock error", null, null );
}
@Test
public void testToJsonResponseWithPartialOperationErrorTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
+ ErrorTag.PARTIAL_OPERATION ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
}
@Test
public void testToJsonResponseWithErrorAppTag() throws Exception {
testJsonResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag" ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag" ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
}
@Test
assertEquals( "\"error\" Json array element length", 2, arrayElement.size() );
verifyJsonErrorNode( arrayElement.get( 0 ), ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
- "mock error1", null, null );
+ "mock error1", null, null );
verifyJsonErrorNode( arrayElement.get( 1 ), ErrorType.RPC, ErrorTag.ROLLBACK_FAILED,
- "mock error2", null, null );
+ "mock error2", null, null );
}
@Test
String errorInfo = "<address>1.2.3.4</address> <session-id>123</session-id>";
testJsonResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag", errorInfo ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
- new ComplexErrorInfoVerifier( ImmutableMap.of(
- "session-id", "123", "address", "1.2.3.4" ) ) );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag", errorInfo ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
+ new ComplexErrorInfoVerifier( ImmutableMap.of(
+ "session-id", "123", "address", "1.2.3.4" ) ) );
}
@Test
Exception cause = new Exception( "mock exception cause" );
testJsonResponse( new RestconfDocumentedException( "mock error", cause ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
- ErrorTag.OPERATION_FAILED, "mock error", null,
- new SimpleErrorInfoVerifier( cause.getMessage() ) );
+ Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
+ ErrorTag.OPERATION_FAILED, "mock error", null,
+ new SimpleErrorInfoVerifier( cause.getMessage() ) );
}
- void testXMLResponse( RestconfDocumentedException ex, Status expStatus, ErrorType expErrorType,
- ErrorTag expErrorTag, String expErrorMessage,
- String expErrorAppTag, ErrorInfoVerifier errorInfoVerifier ) throws Exception
+ void testXMLResponse( final RestconfDocumentedException ex, final Status expStatus, final ErrorType expErrorType,
+ final ErrorTag expErrorTag, final String expErrorMessage,
+ final String expErrorAppTag, final ErrorInfoVerifier errorInfoVerifier ) throws Exception
{
stageMockEx( ex );
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_XML, expStatus );
verifyXMLResponseBody( stream, expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
@Test
public void testToXMLResponseWithMessageOnly() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error" ), Status.INTERNAL_SERVER_ERROR,
- ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error", null, null );
// To test verification code
-// String xml =
-// "<errors xmlns=\"urn:ietf:params:xml:ns:yang:ietf-restconf\">"+
-// " <error>" +
-// " <error-type>application</error-type>"+
-// " <error-tag>operation-failed</error-tag>"+
-// " <error-message>An error occurred</error-message>"+
-// " <error-info>" +
-// " <session-id>123</session-id>" +
-// " <address>1.2.3.4</address>" +
-// " </error-info>" +
-// " </error>" +
-// "</errors>";
-//
-// verifyXMLResponseBody( new java.io.StringBufferInputStream(xml), ErrorType.APPLICATION,
-// ErrorTag.OPERATION_FAILED, "An error occurred", null,
-// com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
+ // String xml =
+ // "<errors xmlns=\"urn:ietf:params:xml:ns:yang:ietf-restconf\">"+
+ // " <error>" +
+ // " <error-type>application</error-type>"+
+ // " <error-tag>operation-failed</error-tag>"+
+ // " <error-message>An error occurred</error-message>"+
+ // " <error-info>" +
+ // " <session-id>123</session-id>" +
+ // " <address>1.2.3.4</address>" +
+ // " </error-info>" +
+ // " </error>" +
+ // "</errors>";
+ //
+ // verifyXMLResponseBody( new java.io.StringBufferInputStream(xml), ErrorType.APPLICATION,
+ // ErrorTag.OPERATION_FAILED, "An error occurred", null,
+ // com.google.common.collect.ImmutableMap.of( "session-id", "123", "address", "1.2.3.4" ) );
}
@Test
public void testToXMLResponseWithInUseErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.IN_USE ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.IN_USE, "mock error", null, null );
+ ErrorTag.IN_USE ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.IN_USE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithInvalidValueErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.RPC,
- ErrorTag.INVALID_VALUE ),
- Status.BAD_REQUEST, ErrorType.RPC,
- ErrorTag.INVALID_VALUE, "mock error", null, null );
+ ErrorTag.INVALID_VALUE ),
+ Status.BAD_REQUEST, ErrorType.RPC,
+ ErrorTag.INVALID_VALUE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithTooBigErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG ),
- Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
- ErrorTag.TOO_BIG, "mock error", null, null );
+ ErrorTag.TOO_BIG ),
+ Status.REQUEST_ENTITY_TOO_LARGE, ErrorType.TRANSPORT,
+ ErrorTag.TOO_BIG, "mock error", null, null );
}
@Test
public void testToXMLResponseWithMissingAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.MISSING_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MISSING_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithBadAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.BAD_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownAttributeErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
+ ErrorTag.UNKNOWN_ATTRIBUTE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ATTRIBUTE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithBadElementErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.BAD_ELEMENT, "mock error", null, null );
+ ErrorTag.BAD_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.BAD_ELEMENT, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownElementErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
+ ErrorTag.UNKNOWN_ELEMENT ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_ELEMENT, "mock error", null, null );
}
@Test
public void testToXMLResponseWithUnknownNamespaceErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
+ ErrorTag.UNKNOWN_NAMESPACE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.UNKNOWN_NAMESPACE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithMalformedMessageErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE ),
- Status.BAD_REQUEST, ErrorType.PROTOCOL,
- ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
+ ErrorTag.MALFORMED_MESSAGE ),
+ Status.BAD_REQUEST, ErrorType.PROTOCOL,
+ ErrorTag.MALFORMED_MESSAGE, "mock error", null, null );
}
@Test
public void testToXMLResponseWithAccessDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED ),
- Status.FORBIDDEN, ErrorType.PROTOCOL,
- ErrorTag.ACCESS_DENIED, "mock error", null, null );
+ ErrorTag.ACCESS_DENIED ),
+ Status.FORBIDDEN, ErrorType.PROTOCOL,
+ ErrorTag.ACCESS_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithLockDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.LOCK_DENIED, "mock error", null, null );
+ ErrorTag.LOCK_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.LOCK_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithResourceDeniedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.RESOURCE_DENIED, "mock error", null, null );
+ ErrorTag.RESOURCE_DENIED ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.RESOURCE_DENIED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithRollbackFailedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
+ ErrorTag.ROLLBACK_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.ROLLBACK_FAILED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithDataExistsErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_EXISTS, "mock error", null, null );
+ ErrorTag.DATA_EXISTS ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_EXISTS, "mock error", null, null );
}
@Test
public void testToXMLResponseWithDataMissingErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING ),
- Status.CONFLICT, ErrorType.PROTOCOL,
- ErrorTag.DATA_MISSING, "mock error", null, null );
+ ErrorTag.DATA_MISSING ),
+ Status.CONFLICT, ErrorType.PROTOCOL,
+ ErrorTag.DATA_MISSING, "mock error", null, null );
}
@Test
public void testToXMLResponseWithOperationNotSupportedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED ),
- Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
+ ErrorTag.OPERATION_NOT_SUPPORTED ),
+ Status.NOT_IMPLEMENTED, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_NOT_SUPPORTED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithOperationFailedErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.OPERATION_FAILED, "mock error", null, null );
+ ErrorTag.OPERATION_FAILED ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.OPERATION_FAILED, "mock error", null, null );
}
@Test
public void testToXMLResponseWithPartialOperationErrorTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( "mock error", ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
- ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
+ ErrorTag.PARTIAL_OPERATION ),
+ Status.INTERNAL_SERVER_ERROR, ErrorType.PROTOCOL,
+ ErrorTag.PARTIAL_OPERATION, "mock error", null, null );
}
@Test
public void testToXMLResponseWithErrorAppTag() throws Exception {
testXMLResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag" ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag" ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag", null );
}
@Test
String errorInfo = "<address>1.2.3.4</address> <session-id>123</session-id>";
testXMLResponse( new RestconfDocumentedException( new RestconfError(
- ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
- "mock error", "mock-app-tag", errorInfo ) ),
- Status.BAD_REQUEST, ErrorType.APPLICATION,
- ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
- new ComplexErrorInfoVerifier( ImmutableMap.of(
- "session-id", "123", "address", "1.2.3.4" ) ) );
+ ErrorType.APPLICATION, ErrorTag.INVALID_VALUE,
+ "mock error", "mock-app-tag", errorInfo ) ),
+ Status.BAD_REQUEST, ErrorType.APPLICATION,
+ ErrorTag.INVALID_VALUE, "mock error", "mock-app-tag",
+ new ComplexErrorInfoVerifier( ImmutableMap.of(
+ "session-id", "123", "address", "1.2.3.4" ) ) );
}
@Test
Exception cause = new Exception( "mock exception cause" );
testXMLResponse( new RestconfDocumentedException( "mock error", cause ),
- Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
- ErrorTag.OPERATION_FAILED, "mock error", null,
- new SimpleErrorInfoVerifier( cause.getMessage() ) );
+ Status.INTERNAL_SERVER_ERROR, ErrorType.APPLICATION,
+ ErrorTag.OPERATION_FAILED, "mock error", null,
+ new SimpleErrorInfoVerifier( cause.getMessage() ) );
}
@Test
NodeList children = getXMLErrorList( doc, 2 );
verifyXMLErrorNode( children.item( 0 ), ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
- "mock error1", null, null );
+ "mock error1", null, null );
verifyXMLErrorNode( children.item( 1 ), ErrorType.RPC, ErrorTag.ROLLBACK_FAILED,
- "mock error2", null, null );
+ "mock error2", null, null );
}
@Test
stageMockEx( new RestconfDocumentedException( "mock error" ) );
Response resp = target("/operational/foo")
- .request().header( "Accept", MediaType.APPLICATION_JSON ).get();
+ .request().header( "Accept", MediaType.APPLICATION_JSON ).get();
InputStream stream = verifyResponse( resp, MediaType.APPLICATION_JSON,
- Status.INTERNAL_SERVER_ERROR );
+ Status.INTERNAL_SERVER_ERROR );
verifyJsonResponseBody( stream, ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED, "mock error",
- null, null );
+ null, null );
}
@Test
// The StructuredDataToJsonProvider should throw a RestconfDocumentedException with no data
when( mockRestConf.readOperationalData( any( String.class ) ) )
- .thenReturn( new StructuredData( null, null, null ) );
+ .thenReturn( new StructuredData( null, null, null ) );
Response resp = target("/operational/foo").request( MediaType.APPLICATION_JSON ).get();
verifyResponse( resp, MediaType.TEXT_PLAIN, Status.NOT_FOUND );
}
- InputStream verifyResponse( Response resp, String expMediaType, Status expStatus ) {
+ InputStream verifyResponse( final Response resp, final String expMediaType, final Status expStatus ) {
assertEquals( "getMediaType", MediaType.valueOf( expMediaType ), resp.getMediaType() );
assertEquals( "getStatus", expStatus.getStatusCode(), resp.getStatus() );
return stream;
}
- void verifyJsonResponseBody( InputStream stream, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void verifyJsonResponseBody( final InputStream stream, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
JsonArray arrayElement = parseJsonErrorArrayElement( stream );
assertEquals( "\"error\" Json array element length", 1, arrayElement.size() );
verifyJsonErrorNode( arrayElement.get( 0 ), expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
- private JsonArray parseJsonErrorArrayElement( InputStream stream ) throws IOException {
+ private JsonArray parseJsonErrorArrayElement( final InputStream stream ) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ByteStreams.copy( stream, bos );
try {
rootElement = parser.parse(
- new InputStreamReader( new ByteArrayInputStream( bos.toByteArray() ) ) );
+ new InputStreamReader( new ByteArrayInputStream( bos.toByteArray() ) ) );
}
catch( Exception e ) {
throw new IllegalArgumentException( "Invalid JSON response:\n" + bos.toString(), e );
JsonElement errorListElement = errorListEntrySet.iterator().next().getValue();
assertEquals( "\"errors\" child Json element name", "error",
- errorListEntrySet.iterator().next().getKey() );
+ errorListEntrySet.iterator().next().getKey() );
assertTrue( "\"error\" Json element is not an Array", errorListElement.isJsonArray() );
return errorListElement.getAsJsonArray();
}
- void verifyJsonErrorNode( JsonElement errorEntryElement, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) {
+ void verifyJsonErrorNode( final JsonElement errorEntryElement, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) {
JsonElement errorInfoElement = null;
Map<String, String> actualErrorInfo = null;
}
else {
assertTrue( "\"error\" leaf Json element " + leafName +
- " is not a Primitive", leafElement.isJsonPrimitive() );
+ " is not a Primitive", leafElement.isJsonPrimitive() );
leafMap.put( leafName, leafElement.getAsString() );
}
}
}
- void verifyOptionalJsonLeaf( String actualValue, String expValue, String tagName ) {
+ void verifyOptionalJsonLeaf( final String actualValue, final String expValue, final String tagName ) {
if( expValue != null ) {
assertEquals( tagName, expValue, actualValue );
}
}
}
- void verifyXMLResponseBody( InputStream stream, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier )
- throws Exception {
+ void verifyXMLResponseBody( final InputStream stream, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier )
+ throws Exception {
Document doc = parseXMLDocument( stream );
NodeList children = getXMLErrorList( doc, 1 );
verifyXMLErrorNode( children.item( 0 ), expErrorType, expErrorTag, expErrorMessage,
- expErrorAppTag, errorInfoVerifier );
+ expErrorAppTag, errorInfoVerifier );
}
- private Document parseXMLDocument( InputStream stream ) throws IOException {
+ private Document parseXMLDocument( final InputStream stream ) throws IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
factory.setCoalescing(true);
return doc;
}
- void verifyXMLErrorNode( Node errorNode, ErrorType expErrorType, ErrorTag expErrorTag,
- String expErrorMessage, String expErrorAppTag,
- ErrorInfoVerifier errorInfoVerifier ) throws Exception {
+ void verifyXMLErrorNode( final Node errorNode, final ErrorType expErrorType, final ErrorTag expErrorTag,
+ final String expErrorMessage, final String expErrorAppTag,
+ final ErrorInfoVerifier errorInfoVerifier ) throws Exception {
String errorType = (String)ERROR_TYPE.evaluate( errorNode, XPathConstants.STRING );
assertEquals( "error-type", expErrorType.getErrorTypeTag(), errorType );
}
}
- void verifyOptionalXMLLeaf( Node fromNode, XPathExpression xpath, String expValue,
- String tagName ) throws Exception {
+ void verifyOptionalXMLLeaf( final Node fromNode, final XPathExpression xpath, final String expValue,
+ final String tagName ) throws Exception {
if( expValue != null ) {
String actual = (String)xpath.evaluate( fromNode, XPathConstants.STRING );
assertEquals( tagName, expValue, actual );
}
else {
assertNull( "Found unexpected \"error\" leaf entry for: " + tagName,
- xpath.evaluate( fromNode, XPathConstants.NODE ) );
+ xpath.evaluate( fromNode, XPathConstants.NODE ) );
}
}
- NodeList getXMLErrorList( Node fromNode, int count ) throws Exception {
+ NodeList getXMLErrorList( final Node fromNode, final int count ) throws Exception {
NodeList errorList = (NodeList)ERROR_LIST.evaluate( fromNode, XPathConstants.NODESET );
assertNotNull( "Root errors node is empty", errorList );
assertEquals( "Root errors node child count", count, errorList.getLength() );
}
/**
- *
+ *
* Fill missing data (namespaces) and build correct data type in
* {@code compositeNode} according to {@code dataSchemaNode}. The method
* {@link RestconfImpl#createConfigurationData createConfigurationData} is
* Searches module with name {@code searchedModuleName} in {@code modules}.
* If module name isn't specified and module set has only one element then
* this element is returned.
- *
+ *
*/
public static Module resolveModule(String searchedModuleName, Set<Module> modules) {
assertNotNull("Modules can't be null.", modules);
*/
package org.opendaylight.controller.sal.restconf.impl.test.structures;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import org.junit.Test;
Lf lf2 = new Lf("name", "value");
Lf lf3 = new Lf("name1", "value");
Lf lf4 = new Lf("name", "value1");
-
+
assertTrue(lf1.equals(lf2));
assertFalse(lf1.equals(lf3));
assertFalse(lf1.equals(lf4));
*/
package org.opendaylight.controller.sal.restconf.impl.test.structures;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Set;
public class Lst extends YangElement {
- private Set<LstItem> lstItems;
+ private final Set<LstItem> lstItems;
- public Lst(String name) {
+ public Lst(final String name) {
super(name);
lstItems = new HashSet<>();
}
- public Lst addLstItem(LstItem lstItem) {
+ public Lst addLstItem(final LstItem lstItem) {
lstItem.setLstName(name);
while (this.lstItems.contains(lstItem)) {
lstItem.incNumOfEqualItems();
}
@Override
- public boolean equals(Object obj) {
+ public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
result = prime * result + ((lstItems == null) ? 0 : lstItems.hashCode());
return result;
}
-
}
loadAndNormalizeData("/common/augment/xml/dataa.xml", "/common/augment/yang", "main","cont");
loadAndNormalizeData("/common/augment/xml/datab.xml", "/common/augment/yang", "main","cont");
}
-
+
private void loadAndNormalizeData(String xmlPath, String yangPath, String topLevelElementName, String moduleName) {
CompositeNode compNode = TestUtils.readInputToCnSn(xmlPath, false,
XmlToCompositeNodeProvider.INSTANCE);
<cont>
- <lf1>lf1</lf1>
- <lf2>lf2</lf2>
- <cont1>
- <lf11>lf11</lf11>
- </cont1>
- <lst1>
- <lf11>lf1_1</lf11>
- </lst1>
- <lst1>
- <lf11>lf1_2</lf11>
- </lst1>
- <lflst1>lflst1_1</lflst1>
- <lflst1>lflst1_2</lflst1>
- <lflst1>lflst1_3</lflst1>
+ <lf1>lf1</lf1>
+ <lf2>lf2</lf2>
+ <cont1>
+ <lf11>lf11</lf11>
+ </cont1>
+ <lst1>
+ <lf11>lf1_1</lf11>
+ </lst1>
+ <lst1>
+ <lf11>lf1_2</lf11>
+ </lst1>
+ <lflst1>lflst1_1</lflst1>
+ <lflst1>lflst1_2</lflst1>
+ <lflst1>lflst1_3</lflst1>
</cont>
\ No newline at end of file
<cont>
- <e1>45</e1>
- <lf2b>lf2b val</lf2b>
+ <e1>45</e1>
+ <lf2b>lf2b val</lf2b>
</cont>
\ No newline at end of file
<cont>
- <cont1c>
- <lf11c>lf11c val</lf11c>
- </cont1c>
+ <cont1c>
+ <lf11c>lf11c val</lf11c>
+ </cont1c>
</cont>
\ No newline at end of file
<cont>
- <lflst1d>lflst1d_1 val</lflst1d>
- <lflst1d>lflst1d_2 val</lflst1d>
+ <lflst1d>lflst1d_1 val</lflst1d>
+ <lflst1d>lflst1d_2 val</lflst1d>
</cont>
\ No newline at end of file
<cont>
- <lst1b>
- <lf11b>lf11b_1 val</lf11b>
- </lst1b>
- <lst1b>
- <lf11b>lf11b_2 val</lf11b>
- </lst1b>
+ <lst1b>
+ <lf11b>lf11b_1 val</lf11b>
+ </lst1b>
+ <lst1b>
+ <lf11b>lf11b_2 val</lf11b>
+ </lst1b>
</cont>
\ No newline at end of file
<cont>
- <cont1c>
- <lf11c>lf11c val</lf11c>
- </cont1c>
- <lf2b>lf2b value</lf2b>
+ <cont1c>
+ <lf11c>lf11c val</lf11c>
+ </cont1c>
+ <lf2b>lf2b value</lf2b>
</cont>
\ No newline at end of file
<cont>
- <cont1c>
- <lf11c>lf11c val</lf11c>
- </cont1c>
- <lf2b>lf2b value</lf2b>
- <lf2a>lf2b value</lf2a>
+ <cont1c>
+ <lf11c>lf11c val</lf11c>
+ </cont1c>
+ <lf2b>lf2b value</lf2b>
+ <lf2a>lf2b value</lf2a>
</cont>
\ No newline at end of file
<cont>
- <lf1>lf1 val</lf1>
- <lf1a>121</lf1a>
- <lf1ab>lf1ab val</lf1ab>
+ <lf1>lf1 val</lf1>
+ <lf1a>121</lf1a>
+ <lf1ab>lf1ab val</lf1ab>
</cont>
\ No newline at end of file
<cont>
- <lf1aa>lf1aa val</lf1aa>
- <lf1>lf1 val</lf1>
- <lf1a>121</lf1a>
- <lf1aaa>lf1aaa val</lf1aaa>
+ <lf1aa>lf1aa val</lf1aa>
+ <lf1>lf1 val</lf1>
+ <lf1a>121</lf1a>
+ <lf1aaa>lf1aaa val</lf1aaa>
</cont>
\ No newline at end of file
<cont>
- <lf1aaa>lf1aaa value</lf1aaa>
- <lf2b>lf2b value</lf2b>
- <lst4a>
- <lf4ab>33</lf4ab>
- </lst4a>
- <lst4a>
- <lf4ab>33</lf4ab>
- </lst4a>
- <lst4a>
- <lf4ab>37</lf4ab>
- </lst4a>
+ <lf1aaa>lf1aaa value</lf1aaa>
+ <lf2b>lf2b value</lf2b>
+ <lst4a>
+ <lf4ab>33</lf4ab>
+ </lst4a>
+ <lst4a>
+ <lf4ab>33</lf4ab>
+ </lst4a>
+ <lst4a>
+ <lf4ab>37</lf4ab>
+ </lst4a>
</cont>
\ No newline at end of file
<cont>
- <lf1aa>lf1aa val</lf1aa>
- <lf1>lf1 val</lf1>
- <lf1a>121</lf1a>
- <lf1ab>lf1ab value</lf1ab>
+ <lf1aa>lf1aa val</lf1aa>
+ <lf1>lf1 val</lf1>
+ <lf1a>121</lf1a>
+ <lf1ab>lf1ab value</lf1ab>
</cont>
\ No newline at end of file
<cont>
- <cont1>
- <lf11>true</lf11>
- </cont1>
- <lf3>true</lf3>
+ <cont1>
+ <lf11>true</lf11>
+ </cont1>
+ <lf3>true</lf3>
</cont>
\ No newline at end of file
<cont>
- <lf7>200</lf7>
+ <lf7>200</lf7>
</cont>
\ No newline at end of file
<cont>
- <lf5>137</lf5>
+ <lf5>137</lf5>
</cont>
\ No newline at end of file
<cont>
- <lf6>44.33</lf6>
+ <lf6>44.33</lf6>
</cont>
\ No newline at end of file
<cont>
- <lflst1>345</lflst1>
- <lflst1>346</lflst1>
- <lflst1>347</lflst1>
+ <lflst1>345</lflst1>
+ <lflst1>346</lflst1>
+ <lflst1>347</lflst1>
</cont>
\ No newline at end of file
<cont>
- <lf1>121</lf1>
- <lf2>121</lf2>
+ <lf1>121</lf1>
+ <lf2>121</lf2>
</cont>
\ No newline at end of file
<cont>
- <lfnint8Min>-128</lfnint8Min>
- <lfnint8Max>127</lfnint8Max>
- <lfnint16Min>-32768</lfnint16Min>
- <lfnint16Max>32767</lfnint16Max>
- <lfnint32Min>-2147483648</lfnint32Min>
- <lfnint32Max>2147483647</lfnint32Max>
- <lfnint64Min>-9223372036854775808</lfnint64Min>
- <lfnint64Max>9223372036854775807</lfnint64Max>
- <lfnuint8Max>255</lfnuint8Max>
- <lfnuint16Max>65535</lfnuint16Max>
- <lfnuint32Max>4294967295</lfnuint32Max>
- <lfstr>lfstr</lfstr>
- <lfstr1></lfstr1>
- <lfbool1>true</lfbool1>
- <lfbool2>false</lfbool2>
- <lfbool3>bla</lfbool3>
- <lfdecimal1>43.32</lfdecimal1>
- <lfdecimal2>-0.43</lfdecimal2>
- <lfdecimal3>43</lfdecimal3>
- <lfdecimal4>43E3</lfdecimal4>
- <lfdecimal6>33.12345</lfdecimal6>
- <lfenum>enum3</lfenum>
- <lfbits>bit3 bit2</lfbits>
- <lfbinary>ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz</lfbinary>
- <lfempty />
- <lfunion1>324</lfunion1>
- <lfunion2>33.3</lfunion2>
- <lfunion3>55</lfunion3>
- <lfunion4>true</lfunion4>
- <lfunion5>true</lfunion5>
- <lfunion6>10</lfunion6>
- <lfunion7></lfunion7>
- <lfunion8></lfunion8>
- <lfunion9></lfunion9>
- <lfunion10>bt1</lfunion10>
- <lfunion11>33</lfunion11>
- <lfunion12>false</lfunion12>
- <lfunion13>b1</lfunion13>
- <lfunion14>zero</lfunion14>
- <identityref1 xmlns:x="simple:data:types">x:iden</identityref1>
+ <lfnint8Min>-128</lfnint8Min>
+ <lfnint8Max>127</lfnint8Max>
+ <lfnint16Min>-32768</lfnint16Min>
+ <lfnint16Max>32767</lfnint16Max>
+ <lfnint32Min>-2147483648</lfnint32Min>
+ <lfnint32Max>2147483647</lfnint32Max>
+ <lfnint64Min>-9223372036854775808</lfnint64Min>
+ <lfnint64Max>9223372036854775807</lfnint64Max>
+ <lfnuint8Max>255</lfnuint8Max>
+ <lfnuint16Max>65535</lfnuint16Max>
+ <lfnuint32Max>4294967295</lfnuint32Max>
+ <lfstr>lfstr</lfstr>
+ <lfstr1></lfstr1>
+ <lfbool1>true</lfbool1>
+ <lfbool2>false</lfbool2>
+ <lfbool3>bla</lfbool3>
+ <lfdecimal1>43.32</lfdecimal1>
+ <lfdecimal2>-0.43</lfdecimal2>
+ <lfdecimal3>43</lfdecimal3>
+ <lfdecimal4>43E3</lfdecimal4>
+ <lfdecimal6>33.12345</lfdecimal6>
+ <lfenum>enum3</lfenum>
+ <lfbits>bit3 bit2</lfbits>
+ <lfbinary>ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz</lfbinary>
+ <lfempty />
+ <lfunion1>324</lfunion1>
+ <lfunion2>33.3</lfunion2>
+ <lfunion3>55</lfunion3>
+ <lfunion4>true</lfunion4>
+ <lfunion5>true</lfunion5>
+ <lfunion6>10</lfunion6>
+ <lfunion7></lfunion7>
+ <lfunion8></lfunion8>
+ <lfunion9></lfunion9>
+ <lfunion10>bt1</lfunion10>
+ <lfunion11>33</lfunion11>
+ <lfunion12>false</lfunion12>
+ <lfunion13>b1</lfunion13>
+ <lfunion14>zero</lfunion14>
+ <identityref1 xmlns:x="simple:data:types">x:iden</identityref1>
</cont>
\ No newline at end of file
<cont1>
- <lf11>lf</lf11>
- <lflst11>56</lflst11>
- <lflst11>55</lflst11>
- <lflst11>57</lflst11>
- <lflst12>lflst12 str3</lflst12>
- <lst11>
- <lst112>
- <lf1121>lf1121 str22</lf1121>
- </lst112>
- <lf111>141</lf111>
- <lf112>lf112 str2</lf112>
- <lst111>
- <lf1111>55</lf1111>
- </lst111>
- <cont111>
- <lflst1111>4097</lflst1111>
- <lflst1111>2049</lflst1111>
- <lflst1111>1025</lflst1111>
- <lst1111>
- <lf1111A>lf1111A str22</lf1111A>
- <lf1111B>8</lf1111B>
- </lst1111>
- <lf1111>lf1111 str2</lf1111>
- <lst1111>
- <lf1111B>5</lf1111B>
- <lf1111A>lf1111A str21</lf1111A>
- </lst1111>
- </cont111>
- <lst111>
- <lf1111>56</lf1111>
- </lst111>
- <lst112>
- <lf1121>lf1121 str21</lf1121>
- </lst112>
- </lst11>
- <lflst12>lflst12 str1</lflst12>
- <lst11>
- <lf111>140</lf111>
- <lf112>lf112 str</lf112>
- <cont111>
- <lf1111>lf1111 str</lf1111>
- <lflst1111>2048</lflst1111>
- <lflst1111>1024</lflst1111>
- <lflst1111>4096</lflst1111>
- <lst1111>
- <lf1111A>lf1111A str11</lf1111A>
- <lf1111B>4</lf1111B>
- </lst1111>
- <lst1111>
- <lf1111A>lf1111A str12</lf1111A>
- <lf1111B>7</lf1111B>
- </lst1111>
- </cont111>
- <lst111>
- <lf1111>65</lf1111>
- </lst111>
- <lst112>
- <lf1121>lf1121 str11</lf1121>
- </lst112>
- </lst11>
- <lflst12>lflst12 str2</lflst12>
+ <lf11>lf</lf11>
+ <lflst11>56</lflst11>
+ <lflst11>55</lflst11>
+ <lflst11>57</lflst11>
+ <lflst12>lflst12 str3</lflst12>
+ <lst11>
+ <lst112>
+ <lf1121>lf1121 str22</lf1121>
+ </lst112>
+ <lf111>141</lf111>
+ <lf112>lf112 str2</lf112>
+ <lst111>
+ <lf1111>55</lf1111>
+ </lst111>
+ <cont111>
+ <lflst1111>4097</lflst1111>
+ <lflst1111>2049</lflst1111>
+ <lflst1111>1025</lflst1111>
+ <lst1111>
+ <lf1111A>lf1111A str22</lf1111A>
+ <lf1111B>8</lf1111B>
+ </lst1111>
+ <lf1111>lf1111 str2</lf1111>
+ <lst1111>
+ <lf1111B>5</lf1111B>
+ <lf1111A>lf1111A str21</lf1111A>
+ </lst1111>
+ </cont111>
+ <lst111>
+ <lf1111>56</lf1111>
+ </lst111>
+ <lst112>
+ <lf1121>lf1121 str21</lf1121>
+ </lst112>
+ </lst11>
+ <lflst12>lflst12 str1</lflst12>
+ <lst11>
+ <lf111>140</lf111>
+ <lf112>lf112 str</lf112>
+ <cont111>
+ <lf1111>lf1111 str</lf1111>
+ <lflst1111>2048</lflst1111>
+ <lflst1111>1024</lflst1111>
+ <lflst1111>4096</lflst1111>
+ <lst1111>
+ <lf1111A>lf1111A str11</lf1111A>
+ <lf1111B>4</lf1111B>
+ </lst1111>
+ <lst1111>
+ <lf1111A>lf1111A str12</lf1111A>
+ <lf1111B>7</lf1111B>
+ </lst1111>
+ </cont111>
+ <lst111>
+ <lf1111>65</lf1111>
+ </lst111>
+ <lst112>
+ <lf1121>lf1121 str11</lf1121>
+ </lst112>
+ </lst11>
+ <lflst12>lflst12 str2</lflst12>
</cont1>
<cont1>
- <lst11>
- <lf111>1</lf111>
- <lst111></lst111>
- <lst111></lst111>
- <lst111>
- <lf1111></lf1111>
- </lst111>
- <lst111>
- <lf1111>35</lf1111>
- </lst111>
- <cont111></cont111>
- </lst11>
- <lst11>
- <lf111>2</lf111>
- <cont111>
- <lf1111></lf1111>
- <lflst1111></lflst1111>
- <lflst1111>1024</lflst1111>
- <lflst1111>4096</lflst1111>
- <lst1111>
- <lf1111B>4</lf1111B>
- </lst1111>
- <lst1111>
- <lf1111A>lf1111A str12</lf1111A>
- </lst1111>
- </cont111>
- <lst112></lst112>
- </lst11>
- <lst11>
- <lf111>3</lf111>
- <cont111>
- <lf1111></lf1111>
- <lflst1111></lflst1111>
- <lflst1111></lflst1111>
- <lst1111></lst1111>
- <lst1111></lst1111>
- </cont111>
- </lst11>
+ <lst11>
+ <lf111>1</lf111>
+ <lst111></lst111>
+ <lst111></lst111>
+ <lst111>
+ <lf1111></lf1111>
+ </lst111>
+ <lst111>
+ <lf1111>35</lf1111>
+ </lst111>
+ <cont111></cont111>
+ </lst11>
+ <lst11>
+ <lf111>2</lf111>
+ <cont111>
+ <lf1111></lf1111>
+ <lflst1111></lflst1111>
+ <lflst1111>1024</lflst1111>
+ <lflst1111>4096</lflst1111>
+ <lst1111>
+ <lf1111B>4</lf1111B>
+ </lst1111>
+ <lst1111>
+ <lf1111A>lf1111A str12</lf1111A>
+ </lst1111>
+ </cont111>
+ <lst112></lst112>
+ </lst11>
+ <lst11>
+ <lf111>3</lf111>
+ <cont111>
+ <lf1111></lf1111>
+ <lflst1111></lflst1111>
+ <lflst1111></lflst1111>
+ <lst1111></lst1111>
+ <lst1111></lst1111>
+ </cont111>
+ </lst11>
</cont1>
module basic-module {
- namespace "basic:module";
-
- prefix "basmod";
-
- import referenced-module {prefix refmo; revision-date 2013-12-2;}
-
- revision 2013-12-2 {
- }
-
- container cont {
- container cont1 {
- leaf lf11 {
- type identityref {
- base "refmo:iden";
- }
- }
- }
- leaf lfStr {
- type string;
- }
- leaf lfInt8 {
- type int8;
- }
-
- leaf lfInt16 {
- type int16;
- }
-
- leaf lfInt32 {
- type int32;
- }
-
- leaf lfInt64 {
- type int64;
- }
-
- leaf lfUint8 {
- type uint8;
- }
-
- leaf lfUint16 {
- type uint16;
- }
-
- leaf lfUint32 {
- type uint32;
- }
-
- leaf lfUint64 {
- type uint64;
- }
-
- leaf lfBinary {
- type binary;
- }
-
- leaf lfBits {
- type bits {
- bit one;
- bit two;
- bit three;
- }
- }
-
- leaf lfEnumeration {
- type enumeration {
- enum enum1;
- enum enum2;
- enum enum3;
- }
- }
-
- leaf lfEmpty {
- type empty;
- }
-
- leaf lfBoolean {
- type boolean;
- }
-
- leaf lfUnion {
- type union {
- type int8;
- type string;
- type bits {
- bit first;
- bit second;
- }
- type boolean;
- }
- }
-
- leaf lfLfref {
- type leafref {
- path "/cont/lfBoolean";
- }
- }
-
- leaf lfInIdentifier {
- type instance-identifier;
- }
-
- }
-
-}
\ No newline at end of file
+ namespace "basic:module";
+
+ prefix "basmod";
+
+ import referenced-module {prefix refmo; revision-date 2013-12-2;}
+
+ revision 2013-12-2 {
+ }
+
+ container cont {
+ container cont1 {
+ leaf lf11 {
+ type identityref {
+ base "refmo:iden";
+ }
+ }
+ }
+ leaf lfStr {
+ type string;
+ }
+ leaf lfInt8 {
+ type int8;
+ }
+
+ leaf lfInt16 {
+ type int16;
+ }
+
+ leaf lfInt32 {
+ type int32;
+ }
+
+ leaf lfInt64 {
+ type int64;
+ }
+
+ leaf lfUint8 {
+ type uint8;
+ }
+
+ leaf lfUint16 {
+ type uint16;
+ }
+
+ leaf lfUint32 {
+ type uint32;
+ }
+
+ leaf lfUint64 {
+ type uint64;
+ }
+
+ leaf lfBinary {
+ type binary;
+ }
+
+ leaf lfBits {
+ type bits {
+ bit one;
+ bit two;
+ bit three;
+ }
+ }
+
+ leaf lfEnumeration {
+ type enumeration {
+ enum enum1;
+ enum enum2;
+ enum enum3;
+ }
+ }
+
+ leaf lfEmpty {
+ type empty;
+ }
+
+ leaf lfBoolean {
+ type boolean;
+ }
+
+ leaf lfUnion {
+ type union {
+ type int8;
+ type string;
+ type bits {
+ bit first;
+ bit second;
+ }
+ type boolean;
+ }
+ }
+
+ leaf lfLfref {
+ type leafref {
+ path "/cont/lfBoolean";
+ }
+ }
+
+ leaf lfInIdentifier {
+ type instance-identifier;
+ }
+
+ }
+
+}
<input xmlns="test:module">
- <cont>
- <cont1>
- <lf11>lf1 data</lf11>
- <lf12>lf2 data</lf12>
- </cont1>
- </cont>
+ <cont>
+ <cont1>
+ <lf11>lf1 data</lf11>
+ <lf12>lf2 data</lf12>
+ </cont1>
+ </cont>
</input>
\ No newline at end of file
<cont xmlns="instance:identifier:module">
<cont1>
- <lst11 xmlns="augment:module" xmlns:c="augment:augment:module">
- <keyvalue111>value1</keyvalue111>
- <keyvalue112>value2</keyvalue112>
- <lf111 xmlns="augment:augment:module" xmlns:a="instance:identifier:module" xmlns:b="augment:module" >/a:cont/a:cont1/b:lst11[b:keyvalue111="value1"][b:keyvalue112="value2"]/c:lf112</lf111>
- <lf112 xmlns="augment:augment:module">lf112 value</lf112>
- </lst11>
+ <lst11 xmlns="augment:module" xmlns:c="augment:augment:module">
+ <keyvalue111>value1</keyvalue111>
+ <keyvalue112>value2</keyvalue112>
+ <lf111 xmlns="augment:augment:module" xmlns:a="instance:identifier:module" xmlns:b="augment:module" >/a:cont/a:cont1/b:lst11[b:keyvalue111="value1"][b:keyvalue112="value2"]/c:lf112</lf111>
+ <lf112 xmlns="augment:augment:module">lf112 value</lf112>
+ </lst11>
</cont1>
</cont>
<interfaces xmlns="urn:ietf:params:xml:ns:yang:ietf-interfaces" >
- <interface>
- <name>eth0</name>
- <type>ethernetCsmacd</type>
- <enabled>false</enabled>
- <description>some interface</description>
- </interface>
+ <interface>
+ <name>eth0</name>
+ <type>ethernetCsmacd</type>
+ <enabled>false</enabled>
+ <description>some interface</description>
+ </interface>
</interfaces>
\ No newline at end of file
<cont>
- <lf1>str0</lf1>
- <lf2></lf2>
- <lf3/>
- <lflst1>121</lflst1>
- <lflst1>131</lflst1>
- <lflst1>str1</lflst1>
- <lst1>
- <lf11>str2</lf11>
- </lst1>
- <cont1>
- <lf11>100</lf11>
- </cont1>
+ <lf1>str0</lf1>
+ <lf2></lf2>
+ <lf3/>
+ <lflst1>121</lflst1>
+ <lflst1>131</lflst1>
+ <lflst1>str1</lflst1>
+ <lst1>
+ <lf11>str2</lf11>
+ </lst1>
+ <cont1>
+ <lf11>100</lf11>
+ </cont1>
</cont>
<cont>
- <lst1>
- <lf11>str0</lf11>
- <lflst11>121</lflst11>
- <lflst11>131</lflst11>
- <lflst11>str1</lflst11>
- <lst11>
- <lf111>str2</lf111>
- </lst11>
- <cont11>
- <lf111>100</lf111>
- </cont11>
- </lst1>
- <lst1>
- <lflst11>221</lflst11>
- <cont11>
- <lf111>100</lf111>
- </cont11>
- </lst1>
- <lf1>lf1</lf1>
+ <lst1>
+ <lf11>str0</lf11>
+ <lflst11>121</lflst11>
+ <lflst11>131</lflst11>
+ <lflst11>str1</lflst11>
+ <lst11>
+ <lf111>str2</lf111>
+ </lst11>
+ <cont11>
+ <lf111>100</lf111>
+ </cont11>
+ </lst1>
+ <lst1>
+ <lflst11>221</lflst11>
+ <cont11>
+ <lf111>100</lf111>
+ </cont11>
+ </lst1>
+ <lf1>lf1</lf1>
</cont>
<cont>
- <lf1></lf1>
- <lflst1></lflst1>
- <lflst1></lflst1>
- <lst1>
- <lf11></lf11>
- </lst1>
+ <lf1></lf1>
+ <lflst1></lflst1>
+ <lflst1></lflst1>
+ <lst1>
+ <lf11></lf11>
+ </lst1>
</cont>
<cont xmlns="general:module" xmlns:x="x:namespace" xmlns:y="y:namespace">
- <cont1 xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
- <lf11 xmlns="identityref:module" xmlns:c="c:namespace">iden</lf11>
+ <cont1 xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
+ <lf11 xmlns="identityref:module" xmlns:c="c:namespace">iden</lf11>
</cont1>
</cont>
\ No newline at end of file
<cont xmlns:x="x:namespace" xmlns:y="y:namespace">
- <cont1 xmlns="identityref:module" xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
- <lf11 xmlns:c="c:namespace">iden</lf11>
+ <cont1 xmlns="identityref:module" xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
+ <lf11 xmlns:c="c:namespace">iden</lf11>
</cont1>
</cont>
\ No newline at end of file
<cont xmlns="identityref:module" xmlns:x="x:namespace" xmlns:y="y:namespace">
- <cont1 xmlns:c="identity:module" xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
- <lf11>z:iden</lf11>
+ <cont1 xmlns:c="identity:module" xmlns:z="z:namespace" xmlns:a="a:namespace" xmlns:b="b:namespace">
+ <lf11>z:iden</lf11>
</cont1>
</cont>
\ No newline at end of file
<cont>
- <cont1>
- <lf11>x:iden</lf11>
+ <cont1>
+ <lf11>x:iden</lf11>
</cont1>
</cont>
\ No newline at end of file
<cont>
<cont1>
- <lf11>iden</lf11>
+ <lf11>iden</lf11>
</cont1>
</cont>
\ No newline at end of file
*/
package org.opendaylight.controller.sal.rest.doc;
+import java.util.Collection;
+import java.util.Collections;
+
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.impl.ApiDocGenerator;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Collection;
-import java.util.Collections;
-
-
-public class DocProvider implements BundleActivator,
- ServiceTrackerCustomizer<Broker, Broker>,
- Provider,
- AutoCloseable {
-
- private Logger _logger = LoggerFactory.getLogger(DocProvider.class);
-
- private ServiceTracker<Broker, Broker> brokerServiceTracker;
- private BundleContext bundleContext;
- private Broker.ProviderSession session;
-
- @Override
- public void close() throws Exception {
- stop(bundleContext);
- }
-
- @Override
- public void onSessionInitiated(Broker.ProviderSession providerSession) {
- SchemaService schemaService = providerSession.getService(SchemaService.class);
- ApiDocGenerator.getInstance().setSchemaService(schemaService);
-
- _logger.debug("Restconf API Explorer started");
-
- }
-
- @Override
- public Collection<ProviderFunctionality> getProviderFunctionality() {
- return Collections.emptySet();
- }
-
- @Override
- public void start(BundleContext context) throws Exception {
- bundleContext = context;
- brokerServiceTracker = new ServiceTracker(context, Broker.class, this);
- brokerServiceTracker.open();
- }
-
- @Override
- public void stop(BundleContext context) throws Exception {
- if (brokerServiceTracker != null)
- brokerServiceTracker.close();
-
- if (session != null)
- session.close();
- }
-
- @Override
- public Broker addingService(ServiceReference<Broker> reference) {
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- return broker;
- }
-
- @Override
- public void modifiedService(ServiceReference<Broker> reference, Broker service) {
- if (session != null)
- session.close();
-
- Broker broker = bundleContext.getService(reference);
- session = broker.registerProvider(this, bundleContext);
- }
-
- @Override
- public void removedService(ServiceReference<Broker> reference, Broker service) {
- bundleContext.ungetService(reference);
- }
+public class DocProvider implements BundleActivator, ServiceTrackerCustomizer<Broker, Broker>, Provider, AutoCloseable {
+
+ private static final Logger _logger = LoggerFactory.getLogger(DocProvider.class);
+
+ private ServiceTracker<Broker, Broker> brokerServiceTracker;
+ private BundleContext bundleContext;
+ private Broker.ProviderSession session;
+
+ @Override
+ public void close() throws Exception {
+ stop(bundleContext);
+ }
+
+ @Override
+ public void onSessionInitiated(final Broker.ProviderSession providerSession) {
+ SchemaService schemaService = providerSession.getService(SchemaService.class);
+ ApiDocGenerator.getInstance().setSchemaService(schemaService);
+
+ _logger.debug("Restconf API Explorer started");
+ }
+
+ @Override
+ public Collection<ProviderFunctionality> getProviderFunctionality() {
+ return Collections.emptySet();
+ }
+
+ @Override
+ public void start(final BundleContext context) throws Exception {
+ bundleContext = context;
+ brokerServiceTracker = new ServiceTracker<>(context, Broker.class, this);
+ brokerServiceTracker.open();
+ }
+
+ @Override
+ public void stop(final BundleContext context) throws Exception {
+ if (brokerServiceTracker != null) {
+ brokerServiceTracker.close();
+ }
+
+ if (session != null) {
+ session.close();
+ }
+ }
+
+ @Override
+ public Broker addingService(final ServiceReference<Broker> reference) {
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ return broker;
+ }
+
+ @Override
+ public void modifiedService(final ServiceReference<Broker> reference, final Broker service) {
+ if (session != null) {
+ session.close();
+ }
+
+ Broker broker = bundleContext.getService(reference);
+ session = broker.registerProvider(this, bundleContext);
+ }
+
+ @Override
+ public void removedService(final ServiceReference<Broker> reference, final Broker service) {
+ bundleContext.ungetService(reference);
+ }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
-import com.google.common.base.Preconditions;
+import java.io.IOException;
+import java.net.URI;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.ws.rs.core.UriInfo;
+
import org.json.JSONException;
import org.json.JSONObject;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder;
-import org.opendaylight.controller.sal.rest.doc.swagger.*;
+import org.opendaylight.controller.sal.rest.doc.swagger.Api;
+import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
+import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
+import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
+import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
+import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.model.api.*;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-import java.net.URI;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.*;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
+import com.google.common.base.Preconditions;
/**
* This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation.
*/
public class ApiDocGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
-
- private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
- private ObjectMapper mapper = new ObjectMapper();
- private final ModelGenerator jsonConverter = new ModelGenerator();
-
- private SchemaService schemaService;
-
- private final String API_VERSION = "1.0.0";
- private final String SWAGGER_VERSION = "1.2";
- private final String RESTCONF_CONTEXT_ROOT = "restconf";
- private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
-
- //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
- private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
-
- private ApiDocGenerator(){
- mapper.registerModule(new JsonOrgModule());
- mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
- }
-
- /**
- * Returns singleton instance
- * @return
- */
- public static ApiDocGenerator getInstance() {
- return INSTANCE;
- }
-
- /**
- *
- * @param schemaService
- */
- public void setSchemaService(SchemaService schemaService) {
- this.schemaService = schemaService;
- }
- /**
- *
- * @param uriInfo
- * @return list of modules converted to swagger compliant resource list.
- */
- public ResourceList getResourceListing(UriInfo uriInfo) {
-
- Preconditions.checkState(schemaService != null);
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
-
- Set<Module> modules = schemaContext.getModules();
-
- ResourceList resourceList = new ResourceList();
- resourceList.setApiVersion(API_VERSION);
- resourceList.setSwaggerVersion(SWAGGER_VERSION);
-
- List<Resource> resources = new ArrayList<>(modules.size());
- _logger.info("Modules found [{}]", modules.size());
-
- for (Module module : modules) {
- Resource resource = new Resource();
- String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
-
- _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
- ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
-
- if (doc != null) {
- URI uri = uriInfo.getRequestUriBuilder().
- path(generateCacheKey(module.getName(), revisionString)).
- build();
-
- resource.setPath(uri.toASCIIString());
- resources.add(resource);
- } else {
- _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
- }
- }
+ private static final Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
- resourceList.setApis(resources);
+ private static final ApiDocGenerator INSTANCE = new ApiDocGenerator();
+ private final ObjectMapper mapper = new ObjectMapper();
+ private final ModelGenerator jsonConverter = new ModelGenerator();
- return resourceList;
- }
+ private SchemaService schemaService;
- public ApiDeclaration getApiDeclaration(String module, String revision, UriInfo uriInfo) {
+ private static final String API_VERSION = "1.0.0";
+ private static final String SWAGGER_VERSION = "1.2";
+ private static final String RESTCONF_CONTEXT_ROOT = "restconf";
+ private final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
- //Lookup cache
- String cacheKey = generateCacheKey(module, revision);
+ //For now its {@link HashMap}. It will be changed to thread-safe Map when schema change listener is implemented.
+ private final Map<String, ApiDeclaration> MODULE_DOC_CACHE = new HashMap<String, ApiDeclaration>();
- if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
- _logger.debug("Serving from cache for {}", cacheKey);
- return MODULE_DOC_CACHE.get(cacheKey);
+ private ApiDocGenerator(){
+ mapper.registerModule(new JsonOrgModule());
+ mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
}
- Date rev = null;
- try {
- rev = SIMPLE_DATE_FORMAT.parse(revision);
- } catch (ParseException e) {
- throw new IllegalArgumentException(e);
+ /**
+ * Returns singleton instance
+ * @return
+ */
+ public static ApiDocGenerator getInstance() {
+ return INSTANCE;
}
- SchemaContext schemaContext = schemaService.getGlobalContext();
- Preconditions.checkState(schemaContext != null);
+ /**
+ *
+ * @param schemaService
+ */
+ public void setSchemaService(final SchemaService schemaService) {
+ this.schemaService = schemaService;
+ }
+ /**
+ *
+ * @param uriInfo
+ * @return list of modules converted to swagger compliant resource list.
+ */
+ public ResourceList getResourceListing(final UriInfo uriInfo) {
+
+ Preconditions.checkState(schemaService != null);
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Set<Module> modules = schemaContext.getModules();
+
+ ResourceList resourceList = new ResourceList();
+ resourceList.setApiVersion(API_VERSION);
+ resourceList.setSwaggerVersion(SWAGGER_VERSION);
+
+ List<Resource> resources = new ArrayList<>(modules.size());
+ _logger.info("Modules found [{}]", modules.size());
+
+ for (Module module : modules) {
+ Resource resource = new Resource();
+ String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision());
+
+ _logger.debug("Working on [{},{}]...", module.getName(), revisionString);
+ ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo);
+
+ if (doc != null) {
+ URI uri = uriInfo.getRequestUriBuilder().
+ path(generateCacheKey(module.getName(), revisionString)).
+ build();
+
+ resource.setPath(uri.toASCIIString());
+ resources.add(resource);
+ } else {
+ _logger.debug("Could not generate doc for {},{}", module.getName(), revisionString);
+ }
+ }
+
+ resourceList.setApis(resources);
+
+ return resourceList;
+ }
+
+ public ApiDeclaration getApiDeclaration(final String module, final String revision, final UriInfo uriInfo) {
+
+ //Lookup cache
+ String cacheKey = generateCacheKey(module, revision);
- Module m = schemaContext.findModuleByName(module, rev);
- Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+ if (MODULE_DOC_CACHE.containsKey(cacheKey)) {
+ _logger.debug("Serving from cache for {}", cacheKey);
+ return MODULE_DOC_CACHE.get(cacheKey);
+ }
+
+ Date rev = null;
+ try {
+ rev = SIMPLE_DATE_FORMAT.parse(revision);
+ } catch (ParseException e) {
+ throw new IllegalArgumentException(e);
+ }
- String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
+ SchemaContext schemaContext = schemaService.getGlobalContext();
+ Preconditions.checkState(schemaContext != null);
+
+ Module m = schemaContext.findModuleByName(module, rev);
+ Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision);
+
+ String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme())
.append("://")
.append(uriInfo.getBaseUri().getHost())
.append(":")
.append(RESTCONF_CONTEXT_ROOT)
.toString();
- ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
- MODULE_DOC_CACHE.put(cacheKey, doc);
- return doc;
- }
+ ApiDeclaration doc = getSwaggerDocSpec(m, basePath);
+ MODULE_DOC_CACHE.put(cacheKey, doc);
+ return doc;
+ }
- public ApiDeclaration getSwaggerDocSpec(Module m, String basePath) {
- ApiDeclaration doc = new ApiDeclaration();
- doc.setApiVersion(API_VERSION);
- doc.setSwaggerVersion(SWAGGER_VERSION);
- doc.setBasePath(basePath);
- doc.setProduces(Arrays.asList("application/json", "application/xml"));
+ public ApiDeclaration getSwaggerDocSpec(final Module m, final String basePath) {
+ ApiDeclaration doc = new ApiDeclaration();
+ doc.setApiVersion(API_VERSION);
+ doc.setSwaggerVersion(SWAGGER_VERSION);
+ doc.setBasePath(basePath);
+ doc.setProduces(Arrays.asList("application/json", "application/xml"));
- List<Api> apis = new ArrayList<Api>();
+ List<Api> apis = new ArrayList<Api>();
- Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
- _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
- for (DataSchemaNode node : dataSchemaNodes) {
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ Set<DataSchemaNode> dataSchemaNodes = m.getChildNodes();
+ _logger.debug("child nodes size [{}]", dataSchemaNodes.size());
+ for (DataSchemaNode node : dataSchemaNodes) {
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
+ _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName());
- List<Parameter> pathParams = null;
- if (node.isConfiguration()) {
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/config/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, true);
+ List<Parameter> pathParams = null;
+ if (node.isConfiguration()) {
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/config/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, true);
+ }
+
+ pathParams = new ArrayList<Parameter>();
+ String resourcePath = "/operational/" + m.getName() + ":";
+ addApis(node, apis, resourcePath, pathParams, false);
+ }
}
- pathParams = new ArrayList<Parameter>();
- String resourcePath = "/operational/" + m.getName() + ":";
- addApis(node, apis, resourcePath, pathParams, false);
- }
- }
+ Set<RpcDefinition> rpcs = m.getRpcs();
+ for (RpcDefinition rpcDefinition : rpcs) {
+ String resourcePath = "/operations/" + m.getName() + ":";
+ addRpcs(rpcDefinition, apis, resourcePath);
- Set<RpcDefinition> rpcs = m.getRpcs();
- for (RpcDefinition rpcDefinition : rpcs) {
- String resourcePath = "/operations/" + m.getName() + ":";
- addRpcs(rpcDefinition, apis, resourcePath);
+ }
+ _logger.debug("Number of APIs found [{}]", apis.size());
+ doc.setApis(apis);
+ JSONObject models = null;
+
+ try {
+ models = jsonConverter.convertToJsonSchema(m);
+ doc.setModels(models);
+ _logger.debug(mapper.writeValueAsString(doc));
+ } catch (IOException | JSONException e) {
+ e.printStackTrace();
+ }
+ return doc;
}
- _logger.debug("Number of APIs found [{}]", apis.size());
- doc.setApis(apis);
- JSONObject models = null;
-
- try {
- models = jsonConverter.convertToJsonSchema(m);
- doc.setModels(models);
- _logger.debug(mapper.writeValueAsString(doc));
- } catch (IOException | JSONException e) {
- e.printStackTrace();
+
+ private String generateCacheKey(final Module m) {
+ return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
}
- return doc;
- }
-
- private String generateCacheKey(Module m) {
- return generateCacheKey(m.getName(), SIMPLE_DATE_FORMAT.format(m.getRevision()));
- }
-
- private String generateCacheKey(String module, String revision) {
- return module + "," + revision;
- }
-
- private void addApis(DataSchemaNode node,
- List<Api> apis,
- String parentPath,
- List<Parameter> parentPathParams,
- boolean addConfigApi) {
-
- Api api = new Api();
- List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
-
- String resourcePath = parentPath + createPath(node, pathParams) + "/";
- _logger.debug("Adding path: [{}]", resourcePath);
- api.setPath(resourcePath);
- api.setOperations(operations(node, pathParams, addConfigApi));
- apis.add(api);
- if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- DataNodeContainer schemaNode = (DataNodeContainer) node;
- Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
-
- for (DataSchemaNode childNode : dataSchemaNodes) {
- addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
- }
+ private String generateCacheKey(final String module, final String revision) {
+ return module + "," + revision;
}
- }
+ private void addApis(final DataSchemaNode node,
+ final List<Api> apis,
+ final String parentPath,
+ final List<Parameter> parentPathParams,
+ final boolean addConfigApi) {
+
+ Api api = new Api();
+ List<Parameter> pathParams = new ArrayList<Parameter>(parentPathParams);
+
+ String resourcePath = parentPath + createPath(node, pathParams) + "/";
+ _logger.debug("Adding path: [{}]", resourcePath);
+ api.setPath(resourcePath);
+ api.setOperations(operations(node, pathParams, addConfigApi));
+ apis.add(api);
+ if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
+ DataNodeContainer schemaNode = (DataNodeContainer) node;
+ Set<DataSchemaNode> dataSchemaNodes = schemaNode.getChildNodes();
+
+ for (DataSchemaNode childNode : dataSchemaNodes) {
+ addApis(childNode, apis, resourcePath, pathParams, addConfigApi);
+ }
+ }
- private void addRpcs(RpcDefinition rpcDefn, List<Api> apis, String parentPath) {
- Api rpc = new Api();
- String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
- rpc.setPath(resourcePath);
+ }
+
+ private void addRpcs(final RpcDefinition rpcDefn, final List<Api> apis, final String parentPath) {
+ Api rpc = new Api();
+ String resourcePath = parentPath + rpcDefn.getQName().getLocalName();
+ rpc.setPath(resourcePath);
- Operation operationSpec = new Operation();
- operationSpec.setMethod("POST");
- operationSpec.setNotes(rpcDefn.getDescription());
- operationSpec.setNickname(rpcDefn.getQName().getLocalName());
- rpc.setOperations(Arrays.asList(operationSpec));
+ Operation operationSpec = new Operation();
+ operationSpec.setMethod("POST");
+ operationSpec.setNotes(rpcDefn.getDescription());
+ operationSpec.setNickname(rpcDefn.getQName().getLocalName());
+ rpc.setOperations(Arrays.asList(operationSpec));
- apis.add(rpc);
- }
+ apis.add(rpc);
+ }
- /**
- * @param node
- * @param pathParams
- * @return
- */
- private List<Operation> operations(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig) {
- List<Operation> operations = new ArrayList<>();
+ /**
+ * @param node
+ * @param pathParams
+ * @return
+ */
+ private List<Operation> operations(final DataSchemaNode node, final List<Parameter> pathParams, final boolean isConfig) {
+ List<Operation> operations = new ArrayList<>();
- OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
- operations.add(getBuilder.pathParams(pathParams).build());
+ OperationBuilder.Get getBuilder = new OperationBuilder.Get(node);
+ operations.add(getBuilder.pathParams(pathParams).build());
- if (isConfig) {
- OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
- operations.add(postBuilder.pathParams(pathParams).build());
+ if (isConfig) {
+ OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
+ operations.add(postBuilder.pathParams(pathParams).build());
- OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
- operations.add(putBuilder.pathParams(pathParams).build());
+ OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
+ operations.add(putBuilder.pathParams(pathParams).build());
- OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
- operations.add(deleteBuilder.pathParams(pathParams).build());
+ OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
+ operations.add(deleteBuilder.pathParams(pathParams).build());
+ }
+ return operations;
}
- return operations;
- }
-
- private String createPath(final DataSchemaNode schemaNode, List<Parameter> pathParams) {
- ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
- StringBuilder path = new StringBuilder();
- QName _qName = schemaNode.getQName();
- String localName = _qName.getLocalName();
- path.append(localName);
-
- if ((schemaNode instanceof ListSchemaNode)) {
- final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
- for (final QName listKey : listKeys) {
- {
- DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
- pathListParams.add(((LeafSchemaNode) _dataChildByName));
-
- String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
- path.append(pathParamIdentifier);
-
- Parameter pathParam = new Parameter();
- pathParam.setName(listKey.getLocalName());
- pathParam.setDescription(_dataChildByName.getDescription());
- pathParam.setType("string");
- pathParam.setParamType("path");
-
- pathParams.add(pathParam);
+
+ private String createPath(final DataSchemaNode schemaNode, final List<Parameter> pathParams) {
+ ArrayList<LeafSchemaNode> pathListParams = new ArrayList<LeafSchemaNode>();
+ StringBuilder path = new StringBuilder();
+ QName _qName = schemaNode.getQName();
+ String localName = _qName.getLocalName();
+ path.append(localName);
+
+ if ((schemaNode instanceof ListSchemaNode)) {
+ final List<QName> listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition();
+ for (final QName listKey : listKeys) {
+ {
+ DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey);
+ pathListParams.add(((LeafSchemaNode) _dataChildByName));
+
+ String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}").toString();
+ path.append(pathParamIdentifier);
+
+ Parameter pathParam = new Parameter();
+ pathParam.setName(listKey.getLocalName());
+ pathParam.setDescription(_dataChildByName.getDescription());
+ pathParam.setType("string");
+ pathParam.setParamType("path");
+
+ pathParams.add(pathParam);
+ }
+ }
}
- }
+ return path.toString();
}
- return path.toString();
- }
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
-import org.opendaylight.yangtools.yang.model.api.*;
-import org.opendaylight.yangtools.yang.model.api.type.*;
+import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceCaseNode;
+import org.opendaylight.yangtools.yang.model.api.ChoiceNode;
+import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
+import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition.Bit;
import org.opendaylight.yangtools.yang.model.api.type.EnumTypeDefinition.EnumPair;
-import org.opendaylight.yangtools.yang.model.util.*;
+import org.opendaylight.yangtools.yang.model.api.type.IdentityrefTypeDefinition;
+import org.opendaylight.yangtools.yang.model.api.type.LengthConstraint;
+import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition;
+import org.opendaylight.yangtools.yang.model.util.BooleanType;
+import org.opendaylight.yangtools.yang.model.util.Decimal64;
+import org.opendaylight.yangtools.yang.model.util.EnumerationType;
+import org.opendaylight.yangtools.yang.model.util.ExtendedType;
+import org.opendaylight.yangtools.yang.model.util.Int16;
+import org.opendaylight.yangtools.yang.model.util.Int32;
+import org.opendaylight.yangtools.yang.model.util.Int64;
+import org.opendaylight.yangtools.yang.model.util.Int8;
+import org.opendaylight.yangtools.yang.model.util.StringType;
+import org.opendaylight.yangtools.yang.model.util.Uint16;
+import org.opendaylight.yangtools.yang.model.util.Uint32;
+import org.opendaylight.yangtools.yang.model.util.Uint64;
+import org.opendaylight.yangtools.yang.model.util.Uint8;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.util.*;
-
/**
* Generates JSON Schema for data defined in Yang
*/
public class ModelGenerator {
- private static Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
-
- private static final String BASE_64 = "base64";
- private static final String BINARY_ENCODING_KEY = "binaryEncoding";
- private static final String MEDIA_KEY = "media";
- private static final String ONE_OF_KEY = "oneOf";
- private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
- private static final String MAX_ITEMS = "maxItems";
- private static final String MIN_ITEMS = "minItems";
- private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
- private static final String SCHEMA_KEY = "$schema";
- private static final String MAX_LENGTH_KEY = "maxLength";
- private static final String MIN_LENGTH_KEY = "minLength";
- private static final String REQUIRED_KEY = "required";
- private static final String REF_KEY = "$ref";
- private static final String ITEMS_KEY = "items";
- private static final String TYPE_KEY = "type";
- private static final String PROPERTIES_KEY = "properties";
- private static final String DESCRIPTION_KEY = "description";
- private static final String OBJECT_TYPE = "object";
- private static final String ARRAY_TYPE = "array";
- private static final String ENUM = "enum";
- private static final String INTEGER = "integer";
- private static final String NUMBER = "number";
- private static final String BOOLEAN = "boolean";
- private static final String STRING = "string";
-
- private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
-
- static {
- Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
- tempMap1.put(StringType.class , STRING);
- tempMap1.put(BooleanType.class , BOOLEAN);
- tempMap1.put(Int8.class , INTEGER);
- tempMap1.put(Int16.class , INTEGER);
- tempMap1.put(Int32.class , INTEGER);
- tempMap1.put(Int64.class , INTEGER);
- tempMap1.put(Uint16.class , INTEGER);
- tempMap1.put(Uint32.class , INTEGER);
- tempMap1.put(Uint64.class , INTEGER);
- tempMap1.put(Uint8.class , INTEGER);
- tempMap1.put(Decimal64.class , NUMBER);
- tempMap1.put(EnumerationType.class , ENUM);
- //TODO: Binary type
-
- YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
- }
-
- public ModelGenerator(){
- }
-
- public JSONObject convertToJsonSchema(Module module) throws IOException, JSONException {
- JSONObject models = new JSONObject();
- processContainers(module, models);
- processRPCs(module, models);
-
- return models;
- }
-
-
-
- private void processContainers(Module module, JSONObject models) throws IOException, JSONException {
-
- String moduleName = module.getName();
- Set<DataSchemaNode> childNodes = module.getChildNodes();
-
- for(DataSchemaNode childNode : childNodes){
- JSONObject moduleJSON=null;
- String filename = childNode.getQName().getLocalName();
- /*
- * For every container in the module
- */
- if(childNode instanceof ContainerSchemaNode) {
- moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
- }
-
- if(moduleJSON!=null) {
- _logger.debug("Adding model for [{}]", filename);
- moduleJSON.put("id", filename);
- models.put(filename, moduleJSON);
- }
+ private static final Logger _logger = LoggerFactory.getLogger(ModelGenerator.class);
+
+ private static final String BASE_64 = "base64";
+ private static final String BINARY_ENCODING_KEY = "binaryEncoding";
+ private static final String MEDIA_KEY = "media";
+ private static final String ONE_OF_KEY = "oneOf";
+ private static final String UNIQUE_ITEMS_KEY = "uniqueItems";
+ private static final String MAX_ITEMS = "maxItems";
+ private static final String MIN_ITEMS = "minItems";
+ private static final String SCHEMA_URL = "http://json-schema.org/draft-04/schema";
+ private static final String SCHEMA_KEY = "$schema";
+ private static final String MAX_LENGTH_KEY = "maxLength";
+ private static final String MIN_LENGTH_KEY = "minLength";
+ private static final String REQUIRED_KEY = "required";
+ private static final String REF_KEY = "$ref";
+ private static final String ITEMS_KEY = "items";
+ private static final String TYPE_KEY = "type";
+ private static final String PROPERTIES_KEY = "properties";
+ private static final String DESCRIPTION_KEY = "description";
+ private static final String OBJECT_TYPE = "object";
+ private static final String ARRAY_TYPE = "array";
+ private static final String ENUM = "enum";
+ private static final String INTEGER = "integer";
+ private static final String NUMBER = "number";
+ private static final String BOOLEAN = "boolean";
+ private static final String STRING = "string";
+
+ private static final Map<Class<? extends TypeDefinition<?>>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING;
+
+ static {
+ Map<Class<? extends TypeDefinition<?>>, String> tempMap1 = new HashMap<Class<? extends TypeDefinition<?>>, String>(10);
+ tempMap1.put(StringType.class , STRING);
+ tempMap1.put(BooleanType.class , BOOLEAN);
+ tempMap1.put(Int8.class , INTEGER);
+ tempMap1.put(Int16.class , INTEGER);
+ tempMap1.put(Int32.class , INTEGER);
+ tempMap1.put(Int64.class , INTEGER);
+ tempMap1.put(Uint16.class , INTEGER);
+ tempMap1.put(Uint32.class , INTEGER);
+ tempMap1.put(Uint64.class , INTEGER);
+ tempMap1.put(Uint8.class , INTEGER);
+ tempMap1.put(Decimal64.class , NUMBER);
+ tempMap1.put(EnumerationType.class , ENUM);
+ //TODO: Binary type
+
+ YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1);
+ }
+
+ public ModelGenerator(){
+ }
+
+ public JSONObject convertToJsonSchema(final Module module) throws IOException, JSONException {
+ JSONObject models = new JSONObject();
+ processContainers(module, models);
+ processRPCs(module, models);
+
+ return models;
+ }
+
+
+
+ private void processContainers(final Module module, final JSONObject models) throws IOException, JSONException {
+
+ String moduleName = module.getName();
+ Set<DataSchemaNode> childNodes = module.getChildNodes();
+
+ for(DataSchemaNode childNode : childNodes){
+ JSONObject moduleJSON=null;
+ String filename = childNode.getQName().getLocalName();
+ /*
+ * For every container in the module
+ */
+ if(childNode instanceof ContainerSchemaNode) {
+ moduleJSON = processContainer((ContainerSchemaNode)childNode, moduleName, true, models);
+ }
+
+ if(moduleJSON!=null) {
+ _logger.debug("Adding model for [{}]", filename);
+ moduleJSON.put("id", filename);
+ models.put(filename, moduleJSON);
+ }
+ }
+
+ }
+
+
+ /**
+ * Process the RPCs for a Module
+ * Spits out a file each of the name <rpcName>-input.json
+ * and <rpcName>-output.json for each RPC that contains
+ * input & output elements
+ *
+ * @param module
+ * @throws JSONException
+ * @throws IOException
+ */
+ private void processRPCs(final Module module, final JSONObject models) throws JSONException, IOException {
+
+ Set<RpcDefinition> rpcs = module.getRpcs();
+ String moduleName = module.getName();
+ for(RpcDefinition rpc: rpcs) {
+
+ ContainerSchemaNode input = rpc.getInput();
+ if(input!=null) {
+ JSONObject inputJSON = processContainer(input, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-input";
+ inputJSON.put("id", filename);
+ //writeToFile(filename, inputJSON.toString(2), moduleName);
+ models.put(filename, inputJSON);
+ }
+
+ ContainerSchemaNode output = rpc.getOutput();
+ if(output!=null) {
+ JSONObject outputJSON = processContainer(output, moduleName, true, models);
+ String filename = rpc.getQName().getLocalName() + "-output";
+ outputJSON.put("id", filename);
+ models.put(filename, outputJSON);
+ }
+ }
+ }
+
+
+ /**
+ * Processes the container node and populates the moduleJSON
+ *
+ * @param container
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processContainer(final ContainerSchemaNode container, final String moduleName, final boolean addSchemaStmt, final JSONObject models) throws JSONException, IOException{
+ JSONObject moduleJSON = getSchemaTemplate();
+ if(addSchemaStmt) {
+ moduleJSON = getSchemaTemplate();
+ } else {
+ moduleJSON = new JSONObject();
+ }
+ moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
+
+ String containerDescription = container.getDescription();
+ moduleJSON.put(DESCRIPTION_KEY, containerDescription);
+
+ Set<DataSchemaNode> containerChildren = container.getChildNodes();
+ JSONObject properties = processChildren(containerChildren, moduleName, models);
+ moduleJSON.put(PROPERTIES_KEY, properties);
+ return moduleJSON;
+ }
+
+ /**
+ * Processes the nodes
+ * @param nodes
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChildren(final Set<DataSchemaNode> nodes, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ JSONObject properties = new JSONObject();
+
+ for(DataSchemaNode node : nodes){
+ String name = node.getQName().getLocalName();
+ JSONObject property = null;
+ if(node instanceof LeafSchemaNode) {
+ property = processLeafNode((LeafSchemaNode)node);
+ } else if (node instanceof ListSchemaNode) {
+ property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
+
+ } else if (node instanceof LeafListSchemaNode) {
+ property = processLeafListNode((LeafListSchemaNode)node);
+
+ } else if (node instanceof ChoiceNode) {
+ property = processChoiceNode((ChoiceNode)node, moduleName, models);
+
+ } else if (node instanceof AnyXmlSchemaNode) {
+ property = processAnyXMLNode((AnyXmlSchemaNode)node);
+
+ } else if (node instanceof ContainerSchemaNode) {
+ property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
+
+ } else {
+ throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
+ }
+
+ property.putOpt(DESCRIPTION_KEY, node.getDescription());
+ properties.put(name, property);
+ }
+ return properties;
+ }
+
+ /**
+ *
+ * @param listNode
+ * @throws JSONException
+ */
+ private JSONObject processLeafListNode(final LeafListSchemaNode listNode) throws JSONException {
+ JSONObject props = new JSONObject();
+ props.put(TYPE_KEY, ARRAY_TYPE);
+
+ JSONObject itemsVal = new JSONObject();
+ processTypeDef(listNode.getType(), itemsVal);
+ props.put(ITEMS_KEY, itemsVal);
+
+ ConstraintDefinition constraints = listNode.getConstraints();
+ processConstraints(constraints, props);
+
+ return props;
}
- }
-
-
- /**
- * Process the RPCs for a Module
- * Spits out a file each of the name <rpcName>-input.json
- * and <rpcName>-output.json for each RPC that contains
- * input & output elements
- *
- * @param module
- * @throws JSONException
- * @throws IOException
- */
- private void processRPCs(Module module, JSONObject models) throws JSONException, IOException {
-
- Set<RpcDefinition> rpcs = module.getRpcs();
- String moduleName = module.getName();
- for(RpcDefinition rpc: rpcs) {
-
- ContainerSchemaNode input = rpc.getInput();
- if(input!=null) {
- JSONObject inputJSON = processContainer(input, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-input";
- inputJSON.put("id", filename);
- //writeToFile(filename, inputJSON.toString(2), moduleName);
- models.put(filename, inputJSON);
- }
-
- ContainerSchemaNode output = rpc.getOutput();
- if(output!=null) {
- JSONObject outputJSON = processContainer(output, moduleName, true, models);
- String filename = rpc.getQName().getLocalName() + "-output";
- outputJSON.put("id", filename);
- models.put(filename, outputJSON);
- }
+ /**
+ *
+ * @param choiceNode
+ * @param moduleName
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processChoiceNode(final ChoiceNode choiceNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<ChoiceCaseNode> cases = choiceNode.getCases();
+
+ JSONArray choiceProps = new JSONArray();
+ for(ChoiceCaseNode choiceCase: cases) {
+ String choiceName = choiceCase.getQName().getLocalName();
+ JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
+ JSONObject choiceObj = new JSONObject();
+ choiceObj.put(choiceName, choiceProp);
+ choiceObj.put(TYPE_KEY, OBJECT_TYPE);
+ choiceProps.put(choiceObj);
+ }
+
+ JSONObject oneOfProps = new JSONObject();
+ oneOfProps.put(ONE_OF_KEY, choiceProps);
+ oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+
+ return oneOfProps;
}
- }
-
-
- /**
- * Processes the container node and populates the moduleJSON
- *
- * @param container
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt, JSONObject models) throws JSONException, IOException{
- JSONObject moduleJSON = getSchemaTemplate();
- if(addSchemaStmt) {
- moduleJSON = getSchemaTemplate();
- } else {
- moduleJSON = new JSONObject();
+
+
+ /**
+ *
+ * @param constraints
+ * @param props
+ * @throws JSONException
+ */
+ private void processConstraints(final ConstraintDefinition constraints, final JSONObject props) throws JSONException {
+ boolean isMandatory = constraints.isMandatory();
+ props.put(REQUIRED_KEY, isMandatory);
+
+ Integer minElements = constraints.getMinElements();
+ Integer maxElements = constraints.getMaxElements();
+ if(minElements !=null) {
+ props.put(MIN_ITEMS, minElements);
+ }
+ if(maxElements !=null) {
+ props.put(MAX_ITEMS, maxElements);
+ }
}
- moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
-
- String containerDescription = container.getDescription();
- moduleJSON.put(DESCRIPTION_KEY, containerDescription);
-
- Set<DataSchemaNode> containerChildren = ((ContainerSchemaNode)container).getChildNodes();
- JSONObject properties = processChildren(containerChildren, moduleName, models);
- moduleJSON.put(PROPERTIES_KEY, properties);
- return moduleJSON;
- }
-
- /**
- * Processes the nodes
- * @param nodes
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChildren(Set<DataSchemaNode> nodes, String moduleName, JSONObject models) throws JSONException, IOException {
-
- JSONObject properties = new JSONObject();
-
- for(DataSchemaNode node : nodes){
- String name = node.getQName().getLocalName();
- JSONObject property = null;
- if(node instanceof LeafSchemaNode) {
- property = processLeafNode((LeafSchemaNode)node);
- } else if (node instanceof ListSchemaNode) {
- property = processListSchemaNode((ListSchemaNode)node, moduleName, models);
-
- } else if (node instanceof LeafListSchemaNode) {
- property = processLeafListNode((LeafListSchemaNode)node);
-
- } else if (node instanceof ChoiceNode) {
- property = processChoiceNode((ChoiceNode)node, moduleName, models);
-
- } else if (node instanceof AnyXmlSchemaNode) {
- property = processAnyXMLNode((AnyXmlSchemaNode)node);
-
- } else if (node instanceof ContainerSchemaNode) {
- property = processContainer((ContainerSchemaNode)node, moduleName, false, models);
-
- } else {
- throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass());
- }
-
- property.putOpt(DESCRIPTION_KEY, node.getDescription());
- properties.put(name, property);
+
+ /**
+ * Parses a ListSchema node.
+ *
+ * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
+ * must be in a separate JSON schema file. Hence, we have to write
+ * some properties to a new file, while continuing to process the rest.
+ *
+ * @param listNode
+ * @param moduleName
+ * @return
+ * @throws JSONException
+ * @throws IOException
+ */
+ private JSONObject processListSchemaNode(final ListSchemaNode listNode, final String moduleName, final JSONObject models) throws JSONException, IOException {
+
+ Set<DataSchemaNode> listChildren = listNode.getChildNodes();
+ String fileName = listNode.getQName().getLocalName();
+
+ JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
+ JSONObject childSchema = getSchemaTemplate();
+ childSchema.put(TYPE_KEY, OBJECT_TYPE);
+ childSchema.put(PROPERTIES_KEY, childSchemaProperties);
+
+ /*
+ * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
+ * must be in a separate JSON schema file. Hence, we have to write
+ * some properties to a new file, while continuing to process the rest.
+ */
+ //writeToFile(fileName, childSchema.toString(2), moduleName);
+ childSchema.put("id", fileName);
+ models.put(fileName, childSchema);
+
+
+ JSONObject listNodeProperties = new JSONObject();
+ listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
+
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY,fileName );
+ listNodeProperties.put(ITEMS_KEY, items);
+
+ return listNodeProperties;
+
}
- return properties;
- }
-
- /**
- *
- * @param listNode
- * @throws JSONException
- */
- private JSONObject processLeafListNode(LeafListSchemaNode listNode) throws JSONException {
- JSONObject props = new JSONObject();
- props.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject itemsVal = new JSONObject();
- processTypeDef(listNode.getType(), itemsVal);
- props.put(ITEMS_KEY, itemsVal);
-
- ConstraintDefinition constraints = listNode.getConstraints();
- processConstraints(constraints, props);
-
- return props;
- }
-
- /**
- *
- * @param choiceNode
- * @param moduleName
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processChoiceNode(ChoiceNode choiceNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<ChoiceCaseNode> cases = choiceNode.getCases();
-
- JSONArray choiceProps = new JSONArray();
- for(ChoiceCaseNode choiceCase: cases) {
- String choiceName = choiceCase.getQName().getLocalName();
- JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models);
- JSONObject choiceObj = new JSONObject();
- choiceObj.put(choiceName, choiceProp);
- choiceObj.put(TYPE_KEY, OBJECT_TYPE);
- choiceProps.put(choiceObj);
+
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processLeafNode(final LeafSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
+
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+
+ processConstraints(leafNode.getConstraints(), property);
+ processTypeDef(leafNode.getType(), property);
+
+ return property;
}
- JSONObject oneOfProps = new JSONObject();
- oneOfProps.put(ONE_OF_KEY, choiceProps);
- oneOfProps.put(TYPE_KEY, OBJECT_TYPE);
+ /**
+ *
+ * @param leafNode
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject processAnyXMLNode(final AnyXmlSchemaNode leafNode) throws JSONException {
+ JSONObject property = new JSONObject();
- return oneOfProps;
- }
+ String leafDescription = leafNode.getDescription();
+ property.put(DESCRIPTION_KEY, leafDescription);
+ processConstraints(leafNode.getConstraints(), property);
- /**
- *
- * @param constraints
- * @param props
- * @throws JSONException
- */
- private void processConstraints(ConstraintDefinition constraints, JSONObject props) throws JSONException {
- boolean isMandatory = constraints.isMandatory();
- props.put(REQUIRED_KEY, isMandatory);
+ return property;
+ }
- Integer minElements = constraints.getMinElements();
- Integer maxElements = constraints.getMaxElements();
- if(minElements !=null) {
- props.put(MIN_ITEMS, minElements);
+ /**
+ * @param property
+ * @throws JSONException
+ */
+ private void processTypeDef(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+
+ if(leafTypeDef instanceof ExtendedType){
+ processExtendedType(leafTypeDef, property);
+ } else if (leafTypeDef instanceof EnumerationType) {
+ processEnumType((EnumerationType)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof BitsTypeDefinition) {
+ processBitsType((BitsTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof UnionTypeDefinition) {
+ processUnionType((UnionTypeDefinition)leafTypeDef, property);
+
+ } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
+ property.putOpt(TYPE_KEY, "object");
+ } else if (leafTypeDef instanceof BinaryTypeDefinition) {
+ processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
+ } else {
+ //System.out.println("In else: " + leafTypeDef.getClass());
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
+ if(jsonType==null) {
+ jsonType = "object";
+ }
+ property.putOpt(TYPE_KEY, jsonType);
+ }
}
- if(maxElements !=null) {
- props.put(MAX_ITEMS, maxElements);
+
+ /**
+ *
+ * @param leafTypeDef
+ * @param property
+ * @throws JSONException
+ */
+ private void processExtendedType(final TypeDefinition<?> leafTypeDef, final JSONObject property) throws JSONException {
+ Object leafBaseType = leafTypeDef.getBaseType();
+ if(leafBaseType instanceof ExtendedType){
+ //recursively process an extended type until we hit a base type
+ processExtendedType((TypeDefinition<?>)leafBaseType, property);
+ } else {
+ List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
+ for(LengthConstraint lengthConstraint: lengthConstraints) {
+ Number min = lengthConstraint.getMin();
+ Number max = lengthConstraint.getMax();
+ property.putOpt(MIN_LENGTH_KEY, min);
+ property.putOpt(MAX_LENGTH_KEY, max);
+ }
+ String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
+ property.putOpt(TYPE_KEY,jsonType );
+ }
+
}
- }
-
- /**
- * Parses a ListSchema node.
- *
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
- * must be in a separate JSON schema file. Hence, we have to write
- * some properties to a new file, while continuing to process the rest.
- *
- * @param listNode
- * @param moduleName
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models) throws JSONException, IOException {
-
- Set<DataSchemaNode> listChildren = listNode.getChildNodes();
- String fileName = listNode.getQName().getLocalName();
-
- JSONObject childSchemaProperties = processChildren(listChildren, moduleName, models);
- JSONObject childSchema = getSchemaTemplate();
- childSchema.put(TYPE_KEY, OBJECT_TYPE);
- childSchema.put(PROPERTIES_KEY, childSchemaProperties);
-
- /*
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties
- * must be in a separate JSON schema file. Hence, we have to write
- * some properties to a new file, while continuing to process the rest.
- */
- //writeToFile(fileName, childSchema.toString(2), moduleName);
- childSchema.put("id", fileName);
- models.put(fileName, childSchema);
-
-
- JSONObject listNodeProperties = new JSONObject();
- listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject items = new JSONObject();
- items.put(REF_KEY,fileName );
- listNodeProperties.put(ITEMS_KEY, items);
-
- return listNodeProperties;
-
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processLeafNode(LeafSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
- processTypeDef(leafNode.getType(), property);
-
- return property;
- }
-
- /**
- *
- * @param leafNode
- * @return
- * @throws JSONException
- */
- private JSONObject processAnyXMLNode(AnyXmlSchemaNode leafNode) throws JSONException {
- JSONObject property = new JSONObject();
-
- String leafDescription = leafNode.getDescription();
- property.put(DESCRIPTION_KEY, leafDescription);
-
- processConstraints(leafNode.getConstraints(), property);
-
- return property;
- }
-
- /**
- * @param property
- * @throws JSONException
- */
- private void processTypeDef(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
-
- if(leafTypeDef instanceof ExtendedType){
- processExtendedType(leafTypeDef, property);
- } else if (leafTypeDef instanceof EnumerationType) {
- processEnumType((EnumerationType)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof BitsTypeDefinition) {
- processBitsType((BitsTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof UnionTypeDefinition) {
- processUnionType((UnionTypeDefinition)leafTypeDef, property);
-
- } else if (leafTypeDef instanceof IdentityrefTypeDefinition) {
- property.putOpt(TYPE_KEY, "object");
- } else if (leafTypeDef instanceof BinaryTypeDefinition) {
- processBinaryType((BinaryTypeDefinition)leafTypeDef, property);
- } else {
- //System.out.println("In else: " + leafTypeDef.getClass());
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafTypeDef.getClass());
- if(jsonType==null) {
- jsonType = "object";
- }
- property.putOpt(TYPE_KEY, jsonType);
+
+ /*
+ *
+ */
+ private void processBinaryType(final BinaryTypeDefinition binaryType, final JSONObject property) throws JSONException {
+ property.put(TYPE_KEY, STRING);
+ JSONObject media = new JSONObject();
+ media.put(BINARY_ENCODING_KEY, BASE_64);
+ property.put(MEDIA_KEY, media);
}
- }
-
- /**
- *
- * @param leafTypeDef
- * @param property
- * @throws JSONException
- */
- private void processExtendedType(TypeDefinition<?> leafTypeDef, JSONObject property) throws JSONException {
- Object leafBaseType = leafTypeDef.getBaseType();
- if(leafBaseType instanceof ExtendedType){
- //recursively process an extended type until we hit a base type
- processExtendedType((TypeDefinition<?>)leafBaseType, property);
- } else {
- List<LengthConstraint> lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints();
- for(LengthConstraint lengthConstraint: lengthConstraints) {
- Number min = lengthConstraint.getMin();
- Number max = lengthConstraint.getMax();
- property.putOpt(MIN_LENGTH_KEY, min);
- property.putOpt(MAX_LENGTH_KEY, max);
- }
- String jsonType = YANG_TYPE_TO_JSON_TYPE_MAPPING.get(leafBaseType.getClass());
- property.putOpt(TYPE_KEY,jsonType );
+
+ /**
+ *
+ * @param enumLeafType
+ * @param property
+ * @throws JSONException
+ */
+ private void processEnumType(final EnumerationType enumLeafType, final JSONObject property) throws JSONException {
+ List<EnumPair> enumPairs = enumLeafType.getValues();
+ List<String> enumNames = new ArrayList<String>();
+ for(EnumPair enumPair: enumPairs) {
+ enumNames.add(enumPair.getName());
+ }
+ property.putOpt(ENUM, new JSONArray(enumNames));
}
- }
-
- /*
- *
- */
- private void processBinaryType(BinaryTypeDefinition binaryType, JSONObject property) throws JSONException {
- property.put(TYPE_KEY, STRING);
- JSONObject media = new JSONObject();
- media.put(BINARY_ENCODING_KEY, BASE_64);
- property.put(MEDIA_KEY, media);
- }
-
- /**
- *
- * @param enumLeafType
- * @param property
- * @throws JSONException
- */
- private void processEnumType(EnumerationType enumLeafType, JSONObject property) throws JSONException {
- List<EnumPair> enumPairs = enumLeafType.getValues();
- List<String> enumNames = new ArrayList<String>();
- for(EnumPair enumPair: enumPairs) {
- enumNames.add(enumPair.getName());
+ /**
+ *
+ * @param bitsType
+ * @param property
+ * @throws JSONException
+ */
+ private void processBitsType(final BitsTypeDefinition bitsType, final JSONObject property) throws JSONException{
+ property.put(TYPE_KEY, ARRAY_TYPE);
+ property.put(MIN_ITEMS, 0);
+ property.put(UNIQUE_ITEMS_KEY, true);
+ JSONArray enumValues = new JSONArray();
+
+ List<Bit> bits = bitsType.getBits();
+ for(Bit bit: bits) {
+ enumValues.put(bit.getName());
+ }
+ JSONObject itemsValue = new JSONObject();
+ itemsValue.put(ENUM, enumValues);
+ property.put(ITEMS_KEY, itemsValue);
}
- property.putOpt(ENUM, new JSONArray(enumNames));
- }
-
- /**
- *
- * @param bitsType
- * @param property
- * @throws JSONException
- */
- private void processBitsType(BitsTypeDefinition bitsType, JSONObject property) throws JSONException{
- property.put(TYPE_KEY, ARRAY_TYPE);
- property.put(MIN_ITEMS, 0);
- property.put(UNIQUE_ITEMS_KEY, true);
- JSONArray enumValues = new JSONArray();
-
- List<Bit> bits = bitsType.getBits();
- for(Bit bit: bits) {
- enumValues.put(bit.getName());
+
+
+ /**
+ *
+ * @param unionType
+ * @param property
+ * @throws JSONException
+ */
+ private void processUnionType(final UnionTypeDefinition unionType, final JSONObject property) throws JSONException{
+
+ List<TypeDefinition<?>> unionTypes = unionType.getTypes();
+ JSONArray unionArray = new JSONArray();
+ for(TypeDefinition<?> typeDef: unionTypes) {
+ unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+ }
+ property.put(TYPE_KEY, unionArray);
}
- JSONObject itemsValue = new JSONObject();
- itemsValue.put(ENUM, enumValues);
- property.put(ITEMS_KEY, itemsValue);
- }
-
-
- /**
- *
- * @param unionType
- * @param property
- * @throws JSONException
- */
- private void processUnionType(UnionTypeDefinition unionType, JSONObject property) throws JSONException{
-
- List<TypeDefinition<?>> unionTypes = unionType.getTypes();
- JSONArray unionArray = new JSONArray();
- for(TypeDefinition<?> typeDef: unionTypes) {
- unionArray.put(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass()));
+
+
+ /**
+ * Helper method to generate a pre-filled
+ * JSON schema object.
+ * @return
+ * @throws JSONException
+ */
+ private JSONObject getSchemaTemplate() throws JSONException {
+ JSONObject schemaJSON = new JSONObject();
+ schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
+
+ return schemaJSON;
}
- property.put(TYPE_KEY, unionArray);
- }
-
-
- /**
- * Helper method to generate a pre-filled
- * JSON schema object.
- * @return
- * @throws JSONException
- */
- private JSONObject getSchemaTemplate() throws JSONException {
- JSONObject schemaJSON = new JSONObject();
- schemaJSON.put(SCHEMA_KEY, SCHEMA_URL);
-
- return schemaJSON;
- }
}
</init-param>
<init-param>
<param-name>cors.allowed.headers</param-name>
- <param-value>Content-Type,X-Requested-With,accept,authorization,
+ <param-value>Content-Type,X-Requested-With,accept,authorization,
origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers</param-value>
</init-param>
<init-param>
/**
-* Generated file
+ * Generated file
-* Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
-* Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
-* Generated at: Wed Feb 05 11:31:30 CET 2014
-*
-* Do not modify this file unless it is present under src/main directory
-*/
+ * Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
+ * Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
+ * Generated at: Wed Feb 05 11:31:30 CET 2014
+ *
+ * Do not modify this file unless it is present under src/main directory
+ */
package org.opendaylight.controller.config.yang.config.kitchen_service.impl;
-import org.opendaylight.controller.config.yang.config.kitchen_service.impl.AbstractKitchenServiceModule;
import org.opendaylight.controller.sample.kitchen.api.EggsType;
import org.opendaylight.controller.sample.kitchen.api.KitchenService;
import org.opendaylight.controller.sample.kitchen.impl.KitchenServiceImpl;
import org.slf4j.LoggerFactory;
/**
-*
-*/
+ *
+ */
public final class KitchenServiceModule extends AbstractKitchenServiceModule {
private static final Logger log = LoggerFactory.getLogger(KitchenServiceModule.class);
- public KitchenServiceModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ public KitchenServiceModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
super(identifier, dependencyResolver);
}
- public KitchenServiceModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier, org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- KitchenServiceModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public KitchenServiceModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final KitchenServiceModule oldModule, final java.lang.AutoCloseable oldInstance) {
super(identifier, dependencyResolver, oldModule, oldInstance);
}
}
@Override
- public boolean makeBreakfast( EggsType eggs, Class<? extends ToastType> toast, int toastDoneness ) {
+ public boolean makeBreakfast( final EggsType eggs, final Class<? extends ToastType> toast, final int toastDoneness ) {
return kitchenService.makeBreakfast( eggs, toast, toastDoneness );
}
}
/**
-* Generated file
+ * Generated file
-* Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
-* Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
-* Generated at: Wed Feb 05 11:31:30 CET 2014
-*
-* Do not modify this file unless it is present under src/main directory
-*/
+ * Generated from: yang module name: toaster-consumer-impl yang module local name: toaster-consumer-impl
+ * Generated by: org.opendaylight.controller.config.yangjmxgenerator.plugin.JMXGenerator
+ * Generated at: Wed Feb 05 11:31:30 CET 2014
+ *
+ * Do not modify this file unless it is present under src/main directory
+ */
package org.opendaylight.controller.config.yang.config.kitchen_service.impl;
-import org.opendaylight.controller.config.yang.config.kitchen_service.impl.AbstractKitchenServiceModuleFactory;
-
/**
-*
-*/
-public class KitchenServiceModuleFactory extends AbstractKitchenServiceModuleFactory
-{
-
+ *
+ */
+public class KitchenServiceModuleFactory extends AbstractKitchenServiceModuleFactory {
}
<type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-data-broker</type>
<name>ref_binding-data-broker</name>
</data-broker>
-
+
<notification-service>
<type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
binding:binding-notification-service
final OpendaylightGroupStatisticsService groupStatsService,
final OpendaylightMeterStatisticsService meterStatsService,
final OpendaylightPortStatisticsService portStatsService,
- final OpendaylightQueueStatisticsService queueStatsService,
+ final OpendaylightQueueStatisticsService queueStatsService,
final StatisticsRequestScheduler srScheduler) {
this.dps = Preconditions.checkNotNull(dps);
this.targetNodeKey = Preconditions.checkNotNull(nodeKey);
this.srScheduler.addRequestToSchedulerQueue(flowTableStats);
this.srScheduler.addRequestToSchedulerQueue(flowStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(nodeConnectorStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(groupStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(groupDescStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(meterStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(meterConfigStats);
-
+
this.srScheduler.addRequestToSchedulerQueue(queueStats);
}
-
+
public synchronized void start(final Timer timer) {
flowStats.start(dps);
groupDescStats.start(dps);
meterStats.close();
queueStats.close();
- //Clean up queued statistics request from scheduler queue
+ //Clean up queued statistics request from scheduler queue
srScheduler.removeRequestsFromSchedulerQueue(this.getNodeRef());
logger.debug("Statistics handler for {} shut down", targetNodeKey.getId());
private OpendaylightFlowTableStatisticsService flowTableStatsService;
private OpendaylightQueueStatisticsService queueStatsService;
-
+
private final StatisticsRequestScheduler srScheduler;
public StatisticsProvider(final DataProviderService dataService) {
flowTableStatsService = rpcRegistry.getRpcService(OpendaylightFlowTableStatisticsService.class);
queueStatsService = rpcRegistry.getRpcService(OpendaylightQueueStatisticsService.class);
this.srScheduler.start();
-
+
// Start receiving notifications
this.listenerRegistration = nps.registerNotificationListener(this.updateCommiter);
/**
* Main responsibility of the class is to check the MD-SAL data store read/write
- * transaction accumulation level and send statistics request if number of pending
+ * transaction accumulation level and send statistics request if number of pending
* read/write transactions are zero.
* @author avishnoi@in.ibm.com
*
private final Timer timer = new Timer("request-monitor", true);
// We need ordered retrieval, and O(1) contains operation
- private final Map<AbstractStatsTracker,Integer> requestQueue =
+ private final Map<AbstractStatsTracker,Integer> requestQueue =
Collections.synchronizedMap(new LinkedHashMap<AbstractStatsTracker,Integer>());
-
+
private Long PendingTransactions;
-
+
private long lastRequestTime = System.nanoTime();
-
+
private static final long REQUEST_MONITOR_INTERVAL = 1000;
-
+
private final TimerTask task = new TimerTask() {
@Override
public void run() {
public StatisticsRequestScheduler(){
PendingTransactions = (long) 0;
}
-
+
public void addRequestToSchedulerQueue(AbstractStatsTracker statsRequest){
requestQueue.put(statsRequest, null);
}
-
+
public void removeRequestsFromSchedulerQueue(NodeRef node){
AbstractStatsTracker stats = null;
synchronized(requestQueue){
}
@Override
public void onStatusUpdated(DataModificationTransaction transaction, TransactionStatus status) {
-
+
AbstractStatsTracker stats = null;
synchronized(PendingTransactions){
switch(status){
}
sendStatsRequest(stats);
}
-
+
private void sendStatsRequest(AbstractStatsTracker stats){
if(stats != null){
try{
LLDPDiscoveryListener(LLDPDiscoveryProvider manager) {
this.manager = manager;
}
-
+
public void onPacketReceived(PacketReceived lldp) {
NodeConnectorRef src = LLDPDiscoveryUtils.lldpToNodeConnectorRef(lldp.getPayload());
if(src != null) {
ldb.setDestination(lldp.getIngress());
ldb.setSource(new NodeConnectorRef(src));
LinkDiscovered ld = ldb.build();
-
+
manager.getNotificationService().publish(ld);
LLDPLinkAger.getInstance().put(ld);
}
}
-
+
}
public static LLDPLinkAger getInstance() {
return instance;
}
-
+
public void put(LinkDiscovered link) {
Date expires = new Date();
expires.setTime(expires.getTime() + LLDPDiscoveryUtils.LLDP_EXPIRATION_TIME);
linkToDate.put(link, expires);
}
-
+
public void close() {
timer.cancel();
}
-
+
private class LLDPAgingTask extends TimerTask {
@Override
}
}
}
-
+
}
-
+
}
}
public class LLDPDiscoveryUtils {
static Logger LOG = LoggerFactory.getLogger(LLDPDiscoveryUtils.class);
-
+
public static final Long LLDP_INTERVAL = (long) (1000*5); // Send LLDP every five seconds
public static final Long LLDP_EXPIRATION_TIME = LLDP_INTERVAL*3; // Let up to three intervals pass before we decide we are expired.
-
+
public static String macToString(byte[] mac) {
StringBuilder b = new StringBuilder();
for (int i = 0; i < mac.length; i++) {
return b.toString();
}
-
+
public static NodeConnectorRef lldpToNodeConnectorRef(byte[] payload) {
Ethernet ethPkt = new Ethernet();
try {
if (ethPkt.getPayload() instanceof LLDP) {
LLDP lldp = (LLDP) ethPkt.getPayload();
-
+
try {
NodeId srcNodeId = null;
NodeConnectorId srcNodeConnectorId = null;
import static org.opendaylight.md.controller.topology.manager.FlowCapableNodeMapping.toTopologyNode;
import static org.opendaylight.md.controller.topology.manager.FlowCapableNodeMapping.toTopologyNodeId;
-import java.util.concurrent.Future;
-
import org.opendaylight.controller.md.sal.binding.util.TypeSafeDataReader;
-import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeConnectorUpdated;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeUpdated;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.topology.discovery.rev130819.FlowTopologyDiscoveryListener;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeUpdated;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.OpendaylightInventoryListener;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TopologyId;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TpId;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyBuilder;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeKey;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPoint;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointKey;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.common.RpcResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.JdkFutureAdapters;
-
-class FlowCapableTopologyExporter implements //
- FlowTopologyDiscoveryListener, //
- OpendaylightInventoryListener //
-{
- protected final static Logger LOG = LoggerFactory.getLogger(FlowCapableTopologyExporter.class);
- public static TopologyKey topology = new TopologyKey(new TopologyId("flow:1"));
+import com.google.common.base.Preconditions;
- // FIXME: Flow capable topology exporter should use transaction chaining API
- private DataProviderService dataService;
+class FlowCapableTopologyExporter implements FlowTopologyDiscoveryListener, OpendaylightInventoryListener {
+ private final InstanceIdentifier<Topology> topology;
+ private final OperationProcessor processor;
- public DataProviderService getDataService() {
- return dataService;
- }
-
- public void setDataService(final DataProviderService dataService) {
- this.dataService = dataService;
- }
-
- private InstanceIdentifier<Topology> topologyPath;
-
- public void start() {
- TopologyBuilder tb = new TopologyBuilder();
- tb.setKey(topology);
- topologyPath = InstanceIdentifier.builder(NetworkTopology.class).child(Topology.class, topology).build();
- Topology top = tb.build();
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.putOperationalData(topologyPath, top);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ FlowCapableTopologyExporter(final OperationProcessor processor, final InstanceIdentifier<Topology> topology) {
+ this.processor = Preconditions.checkNotNull(processor);
+ this.topology = Preconditions.checkNotNull(topology);
}
@Override
- public synchronized void onNodeRemoved(final NodeRemoved notification) {
- NodeId nodeId = toTopologyNodeId(getNodeKey(notification.getNodeRef()).getId());
- InstanceIdentifier<Node> nodeInstance = toNodeIdentifier(notification.getNodeRef());
-
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.removeOperationalData(nodeInstance);
- removeAffectedLinks(tx, nodeId);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ public void onNodeRemoved(final NodeRemoved notification) {
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ NodeId nodeId = toTopologyNodeId(getNodeKey(notification.getNodeRef()).getId());
+ InstanceIdentifier<Node> nodeInstance = toNodeIdentifier(notification.getNodeRef());
+ transaction.removeOperationalData(nodeInstance);
+ removeAffectedLinks(transaction, nodeId);
+ }
+ });
}
@Override
- public synchronized void onNodeUpdated(final NodeUpdated notification) {
+ public void onNodeUpdated(final NodeUpdated notification) {
FlowCapableNodeUpdated fcnu = notification.getAugmentation(FlowCapableNodeUpdated.class);
if (fcnu != null) {
- Node node = toTopologyNode(toTopologyNodeId(notification.getId()), notification.getNodeRef());
- InstanceIdentifier<Node> path = getNodePath(toTopologyNodeId(notification.getId()));
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.putOperationalData(path, node);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ Node node = toTopologyNode(toTopologyNodeId(notification.getId()), notification.getNodeRef());
+ InstanceIdentifier<Node> path = getNodePath(toTopologyNodeId(notification.getId()));
+ transaction.putOperationalData(path, node);
+ }
+ });
}
}
@Override
- public synchronized void onNodeConnectorRemoved(final NodeConnectorRemoved notification) {
- InstanceIdentifier<TerminationPoint> tpInstance = toTerminationPointIdentifier(notification
- .getNodeConnectorRef());
- TpId tpId = toTerminationPointId(getNodeConnectorKey(notification.getNodeConnectorRef()).getId());
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.removeOperationalData(tpInstance);
- removeAffectedLinks(tx, tpId);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ public void onNodeConnectorRemoved(final NodeConnectorRemoved notification) {
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ InstanceIdentifier<TerminationPoint> tpInstance = toTerminationPointIdentifier(notification
+ .getNodeConnectorRef());
+ TpId tpId = toTerminationPointId(getNodeConnectorKey(notification.getNodeConnectorRef()).getId());
+ transaction.removeOperationalData(tpInstance);
+ removeAffectedLinks(transaction, tpId);
+ }
+ });
}
@Override
- public synchronized void onNodeConnectorUpdated(final NodeConnectorUpdated notification) {
- FlowCapableNodeConnectorUpdated fcncu = notification.getAugmentation(FlowCapableNodeConnectorUpdated.class);
+ public void onNodeConnectorUpdated(final NodeConnectorUpdated notification) {
+ final FlowCapableNodeConnectorUpdated fcncu = notification.getAugmentation(FlowCapableNodeConnectorUpdated.class);
if (fcncu != null) {
- NodeId nodeId = toTopologyNodeId(getNodeKey(notification.getNodeConnectorRef()).getId());
- TerminationPoint point = toTerminationPoint(toTerminationPointId(notification.getId()),
- notification.getNodeConnectorRef());
- InstanceIdentifier<TerminationPoint> path = tpPath(nodeId, point.getKey().getTpId());
-
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.putOperationalData(path, point);
- if ((fcncu.getState() != null && fcncu.getState().isLinkDown())
- || (fcncu.getConfiguration() != null && fcncu.getConfiguration().isPORTDOWN())) {
- removeAffectedLinks(tx, point.getTpId());
- }
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ NodeId nodeId = toTopologyNodeId(getNodeKey(notification.getNodeConnectorRef()).getId());
+ TerminationPoint point = toTerminationPoint(toTerminationPointId(notification.getId()),
+ notification.getNodeConnectorRef());
+ InstanceIdentifier<TerminationPoint> path = tpPath(nodeId, point.getKey().getTpId());
+
+ transaction.putOperationalData(path, point);
+ if ((fcncu.getState() != null && fcncu.getState().isLinkDown())
+ || (fcncu.getConfiguration() != null && fcncu.getConfiguration().isPORTDOWN())) {
+ removeAffectedLinks(transaction, point.getTpId());
+ }
+ }
+ });
}
}
@Override
- public synchronized void onLinkDiscovered(final LinkDiscovered notification) {
- Link link = toTopologyLink(notification);
- InstanceIdentifier<Link> path = linkPath(link);
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.putOperationalData(path, link);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
-
+ public void onLinkDiscovered(final LinkDiscovered notification) {
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ Link link = toTopologyLink(notification);
+ InstanceIdentifier<Link> path = linkPath(link);
+ transaction.putOperationalData(path, link);
+ }
+ });
}
@Override
- public synchronized void onLinkOverutilized(final LinkOverutilized notification) {
+ public void onLinkOverutilized(final LinkOverutilized notification) {
// NOOP
}
@Override
- public synchronized void onLinkRemoved(final LinkRemoved notification) {
- InstanceIdentifier<Link> path = linkPath(toTopologyLink(notification));
- DataModificationTransaction tx = dataService.beginTransaction();
- tx.removeOperationalData(path);
- listenOnTransactionState(tx.getIdentifier(),tx.commit());
+ public void onLinkRemoved(final LinkRemoved notification) {
+ processor.enqueueOperation(new TopologyOperation() {
+ @Override
+ public void applyOperation(final DataModificationTransaction transaction) {
+ transaction.removeOperationalData(linkPath(toTopologyLink(notification)));
+ }
+ });
}
@Override
- public synchronized void onLinkUtilizationNormal(final LinkUtilizationNormal notification) {
+ public void onLinkUtilizationNormal(final LinkUtilizationNormal notification) {
// NOOP
}
- private static InstanceIdentifier<Node> toNodeIdentifier(final NodeRef ref) {
+ private InstanceIdentifier<Node> toNodeIdentifier(final NodeRef ref) {
org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey invNodeKey = getNodeKey(ref);
-
NodeKey nodeKey = new NodeKey(toTopologyNodeId(invNodeKey.getId()));
- return InstanceIdentifier.builder(NetworkTopology.class).child(Topology.class, topology)
- .child(Node.class, nodeKey).build();
+ return topology.child(Node.class, nodeKey);
}
private InstanceIdentifier<TerminationPoint> toTerminationPointIdentifier(final NodeConnectorRef ref) {
private void removeAffectedLinks(final DataModificationTransaction transaction, final NodeId id) {
TypeSafeDataReader reader = TypeSafeDataReader.forReader(transaction);
-
- Topology topologyData = reader.readOperationalData(topologyPath);
- if (topologyData == null) {
- return;
- }
- for (Link link : topologyData.getLink()) {
- if (id.equals(link.getSource().getSourceNode()) || id.equals(link.getDestination().getDestNode())) {
- InstanceIdentifier<Link> path = topologyPath.child(Link.class, link.getKey());
- transaction.removeOperationalData(path);
+ Topology topologyData = reader.readOperationalData(topology);
+ if (topologyData != null) {
+ for (Link link : topologyData.getLink()) {
+ if (id.equals(link.getSource().getSourceNode()) || id.equals(link.getDestination().getDestNode())) {
+ transaction.removeOperationalData(linkPath(link));
+ }
}
}
}
private void removeAffectedLinks(final DataModificationTransaction transaction, final TpId id) {
TypeSafeDataReader reader = TypeSafeDataReader.forReader(transaction);
- Topology topologyData = reader.readOperationalData(topologyPath);
- if (topologyData == null) {
- return;
- }
- for (Link link : topologyData.getLink()) {
- if (id.equals(link.getSource().getSourceTp()) || id.equals(link.getDestination().getDestTp())) {
- InstanceIdentifier<Link> path = topologyPath.child(Link.class, link.getKey());
- transaction.removeOperationalData(path);
+ Topology topologyData = reader.readOperationalData(topology);
+ if (topologyData != null) {
+ for (Link link : topologyData.getLink()) {
+ if (id.equals(link.getSource().getSourceTp()) || id.equals(link.getDestination().getDestTp())) {
+ transaction.removeOperationalData(linkPath(link));
+ }
}
}
}
private InstanceIdentifier<Node> getNodePath(final NodeId nodeId) {
- NodeKey nodeKey = new NodeKey(nodeId);
- return InstanceIdentifier.builder(NetworkTopology.class).child(Topology.class, topology)
- .child(Node.class, nodeKey).build();
+ return topology.child(Node.class, new NodeKey(nodeId));
}
private InstanceIdentifier<TerminationPoint> tpPath(final NodeId nodeId, final TpId tpId) {
NodeKey nodeKey = new NodeKey(nodeId);
TerminationPointKey tpKey = new TerminationPointKey(tpId);
- return InstanceIdentifier.builder(NetworkTopology.class).child(Topology.class, topology)
- .child(Node.class, nodeKey).child(TerminationPoint.class, tpKey).build();
+ return topology.child(Node.class, nodeKey).child(TerminationPoint.class, tpKey);
}
private InstanceIdentifier<Link> linkPath(final Link link) {
- InstanceIdentifier<Link> linkInstanceId = InstanceIdentifier.builder(NetworkTopology.class)
- .child(Topology.class, topology).child(Link.class, link.getKey()).build();
- return linkInstanceId;
- }
-
- /**
- * @param txId transaction identificator
- * @param future transaction result
- */
- private static void listenOnTransactionState(final Object txId, Future<RpcResult<TransactionStatus>> future) {
- Futures.addCallback(JdkFutureAdapters.listenInPoolThread(future),new FutureCallback<RpcResult<TransactionStatus>>() {
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error("Topology export failed for Tx:{}", txId, t);
-
- }
-
- @Override
- public void onSuccess(RpcResult<TransactionStatus> result) {
- if(!result.isSuccessful()) {
- LOG.error("Topology export failed for Tx:{}", txId);
- }
- }
- });
+ return topology.child(Link.class, link.getKey());
}
}
*/
package org.opendaylight.md.controller.topology.manager;
+import java.util.concurrent.ExecutionException;
+
import org.opendaylight.controller.sal.binding.api.AbstractBindingAwareProvider;
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ProviderContext;
import org.opendaylight.controller.sal.binding.api.NotificationProviderService;
+import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TopologyId;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyBuilder;
+import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
import org.opendaylight.yangtools.concepts.Registration;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.NotificationListener;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
public class FlowCapableTopologyProvider extends AbstractBindingAwareProvider implements AutoCloseable {
private final static Logger LOG = LoggerFactory.getLogger(FlowCapableTopologyProvider.class);
+ private Registration<NotificationListener> listenerRegistration;
+ private Thread thread;
- private DataProviderService dataService;
-
- public DataProviderService getDataService() {
- return this.dataService;
- }
+ /**
+ * Gets called on start of a bundle.
+ *
+ * @param session
+ */
+ @Override
+ public synchronized void onSessionInitiated(final ProviderContext session) {
+ final DataProviderService dataService = session.getSALService(DataProviderService.class);
+ final NotificationProviderService notificationService = session.getSALService(NotificationProviderService.class);
- public void setDataService(final DataProviderService dataService) {
- this.dataService = dataService;
- }
+ final String name = "flow:1";
+ final TopologyKey key = new TopologyKey(new TopologyId(name));
+ final InstanceIdentifier<Topology> path = InstanceIdentifier
+ .builder(NetworkTopology.class)
+ .child(Topology.class, key)
+ .build();
- private NotificationProviderService notificationService;
+ final OperationProcessor processor = new OperationProcessor(dataService);
+ final FlowCapableTopologyExporter listener = new FlowCapableTopologyExporter(processor, path);
+ this.listenerRegistration = notificationService.registerNotificationListener(listener);
- public NotificationProviderService getNotificationService() {
- return this.notificationService;
- }
+ final DataModificationTransaction tx = dataService.beginTransaction();
+ tx.putOperationalData(path, new TopologyBuilder().setKey(key).build());
+ try {
+ tx.commit().get();
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.warn("Initial topology export failed, continuing anyway", e);
+ }
- public void setNotificationService(final NotificationProviderService notificationService) {
- this.notificationService = notificationService;
+ thread = new Thread(processor);
+ thread.setDaemon(true);
+ thread.setName("FlowCapableTopologyExporter-" + name);
+ thread.start();
}
- private final FlowCapableTopologyExporter exporter = new FlowCapableTopologyExporter();
- private Registration<NotificationListener> listenerRegistration;
-
@Override
- public void close() {
-
- FlowCapableTopologyProvider.LOG.info("FlowCapableTopologyProvider stopped.");
- dataService = null;
- notificationService = null;
+ public synchronized void close() throws InterruptedException {
+ LOG.info("FlowCapableTopologyProvider stopped.");
if (this.listenerRegistration != null) {
try {
this.listenerRegistration.close();
} catch (Exception e) {
- throw new IllegalStateException("Exception during close of listener registration.",e);
+ LOG.error("Failed to close listener registration", e);
}
+ listenerRegistration = null;
+ }
+ if (thread != null) {
+ thread.interrupt();
+ thread.join();
+ thread = null;
}
- }
-
- /**
- * Gets called on start of a bundle.
- *
- * @param session
- */
- @Override
- public void onSessionInitiated(final ProviderContext session) {
- dataService = session.getSALService(DataProviderService.class);
- notificationService = session.getSALService(NotificationProviderService.class);
- this.exporter.setDataService(dataService);
- this.exporter.start();
- this.listenerRegistration = notificationService.registerNotificationListener(this.exporter);
- ;
}
/**
*/
@Override
public void stopImpl(final BundleContext context) {
- this.close();
+ try {
+ this.close();
+ } catch (InterruptedException e) {
+ LOG.error("Failed to stop provider", e);
+ }
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.md.controller.topology.manager;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
+import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
+import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.yangtools.yang.common.RpcResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+final class OperationProcessor implements Runnable {
+ private static final Logger LOG = LoggerFactory.getLogger(OperationProcessor.class);
+ private static final int MAX_TRANSACTION_OPERATIONS = 100;
+ private static final int OPERATION_QUEUE_DEPTH = 500;
+
+ private final BlockingQueue<TopologyOperation> queue = new LinkedBlockingQueue<>(OPERATION_QUEUE_DEPTH);
+ // FIXME: Flow capable topology exporter should use transaction chaining API
+ private final DataProviderService dataService;
+
+ OperationProcessor(final DataProviderService dataService) {
+ this.dataService = Preconditions.checkNotNull(dataService);
+ }
+
+ void enqueueOperation(final TopologyOperation task) {
+ try {
+ queue.put(task);
+ } catch (InterruptedException e) {
+ LOG.warn("Interrupted while submitting task {}", task, e);
+ }
+ }
+
+ @Override
+ public void run() {
+ try {
+ for (;;) {
+ TopologyOperation op = queue.take();
+
+ LOG.debug("New operations available, starting transaction");
+ final DataModificationTransaction tx = dataService.beginTransaction();
+
+ int ops = 0;
+ do {
+ op.applyOperation(tx);
+
+ ops++;
+ if (ops < MAX_TRANSACTION_OPERATIONS) {
+ op = queue.poll();
+ } else {
+ op = null;
+ }
+ } while (op != null);
+
+ LOG.debug("Processed {} operations, submitting transaction", ops);
+
+ try {
+ final RpcResult<TransactionStatus> s = tx.commit().get();
+ if (!s.isSuccessful()) {
+ LOG.error("Topology export failed for Tx:{}", tx.getIdentifier());
+ }
+ } catch (ExecutionException e) {
+ LOG.error("Topology export transaction {} failed", tx.getIdentifier(), e.getCause());
+ }
+ }
+ } catch (InterruptedException e) {
+ LOG.info("Interrupted processing, terminating", e);
+ }
+
+ // Drain all events, making sure any blocked threads are unblocked
+ while (!queue.isEmpty()) {
+ queue.poll();
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.md.controller.topology.manager;
+
+import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
+
+/**
+ * Internal interface for submitted operations. Implementations of this
+ * interface are enqueued and batched into data store transactions.
+ */
+interface TopologyOperation {
+ /**
+ * Execute the operation on top of the transaction.
+ *
+ * @param transaction Datastore transaction
+ */
+ void applyOperation(DataModificationTransaction transaction);
+}
\ No newline at end of file
@Override
public Node connect (String type, String connectionIdentifier, Map<ConnectionConstants, String> params) {
IPluginInConnectionService s = pluginService.get(type);
- if (s != null) return s.connect(connectionIdentifier, params);
+ if (s != null) {
+ return s.connect(connectionIdentifier, params);
+ }
return null;
}
for (String pluginType : this.pluginService.keySet()) {
IPluginInConnectionService s = pluginService.get(pluginType);
Node node = s.connect(connectionIdentifier, params);
- if (node != null) return node;
+ if (node != null) {
+ return node;
+ }
}
}
return null;
<module>opendaylight/hosttracker_new/implementation</module>
<module>opendaylight/containermanager/api</module>
<module>opendaylight/containermanager/implementation</module>
+ <module>opendaylight/containermanager/shell</module>
<module>opendaylight/appauth</module>
<module>opendaylight/switchmanager/api</module>
<module>opendaylight/switchmanager/implementation</module>
<module>opendaylight/commons/opendaylight</module>
<module>opendaylight/commons/parent</module>
<module>opendaylight/commons/logback_settings</module>
+ <module>opendaylight/commons/filter-valve</module>
<!-- Karaf Distribution -->
<module>features/base</module>
+ <module>features/controller</module>
<module>opendaylight/dummy-console</module>
+ <module>opendaylight/karaf-branding</module>
<module>opendaylight/distribution/opendaylight-karaf</module>
</modules>
<scm>