<parent>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>commons.opendaylight</artifactId>\r
- <version>1.4.2-SNAPSHOT</version>\r
+ <version>1.5.0-SNAPSHOT</version>\r
<relativePath>../../opendaylight/commons/opendaylight</relativePath>\r
</parent>\r
<artifactId>features-adsal-compatibility</artifactId>\r
<packaging>jar</packaging>\r
<properties>\r
<features.file>features.xml</features.file>\r
- <feature.test.version>0.6.2-SNAPSHOT</feature.test.version>\r
+ <feature.test.version>0.7.0-SNAPSHOT</feature.test.version>\r
</properties>\r
<dependencies>\r
<!--\r
<dependency>\r
<groupId>org.opendaylight.yangtools</groupId>\r
<artifactId>features-yangtools</artifactId>\r
- <version>0.6.2-SNAPSHOT</version>\r
+ <version>0.7.0-SNAPSHOT</version>\r
<classifier>features</classifier>\r
<type>xml</type>\r
</dependency>\r
<dependency>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>features-mdsal</artifactId>\r
- <version>1.1-SNAPSHOT</version>\r
+ <version>1.2.0-SNAPSHOT</version>\r
<classifier>features</classifier>\r
<type>xml</type>\r
</dependency>\r
<dependency>\r
<groupId>org.opendaylight.openflowplugin</groupId>\r
<artifactId>features-openflowplugin</artifactId>\r
- <version>0.0.3-SNAPSHOT</version>\r
+ <version>0.1.0-SNAPSHOT</version>\r
<classifier>features</classifier>\r
<type>xml</type>\r
</dependency>\r
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-adsal</artifactId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-empty</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<type>zip</type>
</dependency>
<!-- Bundle dependencies -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-akka</artifactId>
<features.file>features.xml</features.file>
<!-- Optional TODO: Move these properties to your parent pom and possibly
DependencyManagement section of your parent pom -->
- <branding.version>1.0.0-SNAPSHOT</branding.version>
- <karaf.resources.version>1.4.2-SNAPSHOT</karaf.resources.version>
+ <branding.version>1.1.0-SNAPSHOT</branding.version>
+ <karaf.resources.version>1.5.0-SNAPSHOT</karaf.resources.version>
<karaf.version>3.0.1</karaf.version>
- <feature.test.version>0.6.2-SNAPSHOT</feature.test.version>
- <karaf.empty.version>1.4.2-SNAPSHOT</karaf.empty.version>
+ <feature.test.version>0.7.0-SNAPSHOT</feature.test.version>
+ <karaf.empty.version>1.5.0-SNAPSHOT</karaf.empty.version>
<surefire.version>2.16</surefire.version>
</properties>
<dependencies>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.openflowplugin</groupId>
<artifactId>features-openflowplugin</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
Necessary TODO: Add repo entries for the repositories of features you refer to
in this feature file but do not define here.
Examples:
- <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.6.2-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.controller/features-mdsal/1.1-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.0.3-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.7.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.controller/features-mdsal/1.2.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.1.0-SNAPSHOT/xml/features</repository>
-->
<feature name='odl-akka-all' version='${project.version}' description='OpenDaylight :: Akka :: All'>
<!--
* Basic MD-SAL Provider
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider '>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<feature version='${project.version}'>odl-controller-model</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
* Basic MD-SAL Model feature
<feature name='odl-controller-model' version='${project.version}' description='OpenDaylight :: controller :: Model'>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-binding</feature>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-models</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-binding</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-models</feature>
<bundle>mvn:org.opendaylight.controller/controller-model/${project.version}</bundle>
... whatever other bundles you need
</feature>
* Config Subsystem example - the config file is your config subsystem configuration
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
<configfile finalname="etc/opendaylight/karaf/80-controller.xml">mvn:org.opendaylight.controller/controller-config/${project.version}/xml/config</configfile>
... whatever other bundles you need
* Basic MD-SAL Provider that uses openflowplugin-flow-services (which brings along odl-mdsal-broker)
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='0.0.3-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
+ <feature version='0.1.0-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
</feature>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-base</artifactId>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-test</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
</dependency>
<!-- dependency for opendaylight-karaf-empty for use by testing -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-empty</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<type>zip</type>
</dependency>
</dependencies>
<feature>odl-base-jackson</feature>
<feature>odl-base-spring-security</feature>
</feature>
- <feature name="odl-base-dummy-console" description="Temporary Dummy Console" version="1.1.0-SNAPSHOT">
- <bundle>mvn:org.opendaylight.controller/dummy-console/1.1.0-SNAPSHOT</bundle>
+ <feature name="odl-base-dummy-console" description="Temporary Dummy Console" version="1.2.0-SNAPSHOT">
+ <bundle>mvn:org.opendaylight.controller/dummy-console/1.2.0-SNAPSHOT</bundle>
</feature>
<feature name="odl-base-felix-dm" description="Felix Dependency Manager" version="${felix.dependencymanager.version}">
<bundle>mvn:org.osgi/org.osgi.compendium/${osgi.compendium.version}</bundle>
<bundle>wrap:mvn:io.netty/netty-common/${netty.version}</bundle>
<bundle>wrap:mvn:io.netty/netty-handler/${netty.version}</bundle>
<bundle>wrap:mvn:io.netty/netty-codec-http/${netty.version}</bundle>
- <bundle>mvn:org.opendaylight.controller.thirdparty/ganymed/1.1-SNAPSHOT</bundle>
+ <bundle>mvn:org.opendaylight.controller.thirdparty/ganymed/1.2.0-SNAPSHOT</bundle>
</feature>
<feature name="odl-base-jersey" description="Jersey" version="${jersey.version}">
<feature>odl-base-gemini-web</feature>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../../opendaylight/config/</relativePath>
</parent>
<artifactId>features-config-netty</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../../opendaylight/config/</relativePath>
</parent>
<artifactId>features-config-persister</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../../opendaylight/config/</relativePath>
</parent>
<artifactId>features-config</artifactId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-empty</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<type>zip</type>
</dependency>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>controller-features</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>extras-features</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../../opendaylight/md-sal</relativePath>
</parent>
<artifactId>features-flow</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../../opendaylight/md-sal</relativePath>
</parent>
<artifactId>features-mdsal</artifactId>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-test</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
</dependency>
</dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../../opendaylight/md-sal</relativePath>
</parent>
<!--
-->
<artifactId>features-netconf-connector</artifactId>
<!-- Optional TODO: Uncomment version if you are not using a parent pom.xml
- <version>1.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
-->
<packaging>jar</packaging>
<properties>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.openflowplugin</groupId>
<artifactId>features-openflowplugin</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
Necessary TODO: Add repo entries for the repositories of features you refer to
in this feature file but do not define here.
Examples:
- <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.6.2-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.controller/features-mdsal/1.1-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.0.3-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.7.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.controller/features-mdsal/1.2.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.1.0-SNAPSHOT/xml/features</repository>
-->
<repository>mvn:org.opendaylight.controller/features-mdsal/${mdsal.version}/xml/features</repository>
<repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
* Basic MD-SAL Provider
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider '>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<feature version='${project.version}'>odl-controller-model</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
* Basic MD-SAL Model feature
<feature name='odl-controller-model' version='${project.version}' description='OpenDaylight :: controller :: Model'>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-binding</feature>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-models</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-binding</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-models</feature>
<bundle>mvn:org.opendaylight.controller/controller-model/${project.version}</bundle>
... whatever other bundles you need
</feature>
* Config Subsystem example - the config file is your config subsystem configuration
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
<configfile finalname="etc/opendaylight/karaf/80-controller.xml">mvn:org.opendaylight.controller/controller-config/${project.version}/xml/config</configfile>
... whatever other bundles you need
* Basic MD-SAL Provider that uses openflowplugin-flow-services (which brings along odl-mdsal-broker)
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='0.0.3-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
+ <feature version='0.1.0-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
</feature>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../../opendaylight/netconf</relativePath>
</parent>
<artifactId>features-netconf</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-nsf</artifactId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-empty</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<type>zip</type>
</dependency>
<!-- Feature Dependencies -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-controller</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>features-protocol-framework</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../../opendaylight/md-sal</relativePath>
</parent>
<!--
<artifactId>features-restconf</artifactId>
<groupId>org.opendaylight.controller</groupId>
<!-- Optional TODO: Uncomment version if you are not using a parent pom.xml
- <version>1.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
-->
<packaging>jar</packaging>
<properties>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.openflowplugin</groupId>
<artifactId>features-openflowplugin</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
Necessary TODO: Add repo entries for the repositories of features you refer to
in this feature file but do not define here.
Examples:
- <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.6.2-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.controller/features-mdsal/1.1-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.0.3-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.7.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.controller/features-mdsal/1.2.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.1.0-SNAPSHOT/xml/features</repository>
-->
<repository>mvn:org.opendaylight.controller/features-mdsal/${mdsal.version}/xml/features</repository>
<repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
* Basic MD-SAL Provider
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider '>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<feature version='${project.version}'>odl-controller-model</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
* Basic MD-SAL Model feature
<feature name='odl-controller-model' version='${project.version}' description='OpenDaylight :: controller :: Model'>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-binding</feature>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-models</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-binding</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-models</feature>
<bundle>mvn:org.opendaylight.controller/controller-model/${project.version}</bundle>
... whatever other bundles you need
</feature>
* Config Subsystem example - the config file is your config subsystem configuration
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
<configfile finalname="etc/opendaylight/karaf/80-controller.xml">mvn:org.opendaylight.controller/controller-config/${project.version}/xml/config</configfile>
... whatever other bundles you need
* Basic MD-SAL Provider that uses openflowplugin-flow-services (which brings along odl-mdsal-broker)
<feature name='odl-controller-provider' version='${project.version}' description='OpenDaylight :: controller :: Provider'>
- <feature version='0.0.3-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
+ <feature version='0.1.0-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
<bundle>mvn:org.opendaylight.controller/controller-provider/${project.version}</bundle>
... whatever other bundles you need
</feature>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>itests-controller</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>base-features-it</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../opendaylight/commons/opendaylight</relativePath>
</parent>
<artifactId>itests-controller</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>appauth</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<groupId>org.opendaylight.controller.archetypes</groupId>
<artifactId>odl-model-project</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>maven-archetype</packaging>
<properties>
<nexusproxy>http://nexus.opendaylight.org/content</nexusproxy>\r
<nexus.repository.release>opendaylight.release</nexus.repository.release>
<nexus.repository.snapshot>opendaylight.release</nexus.repository.snaphot>
- <yang.version>0.6.2-SNAPSHOT</yang.version>\r
- <yang.codegen.version>0.6.2-SNAPSHOT</yang.codegen.version>\r
+ <yang.version>0.7.0-SNAPSHOT</yang.version>\r
+ <yang.codegen.version>0.7.0-SNAPSHOT</yang.codegen.version>\r
<bundle.plugin.version>2.3.7</bundle.plugin.version>\r
</properties>\r
<scm>\r
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-configfile-archetype</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>maven-archetype</packaging>
<parent>
<groupId>org.opendaylight.controller.archetypes</groupId>
<artifactId>archetypes-parent</artifactId>
- <version>0.1.1-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-distro-archetype</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>maven-archetype</packaging>
<name>distribution-karaf-archetype</name>
<properties>
<!-- Optional TODO: Move these properties to your parent pom and possibly
DependencyManagement section of your parent pom -->
- <branding.version>1.0.0-SNAPSHOT</branding.version>
- <karaf.resources.version>1.4.2-SNAPSHOT</karaf.resources.version>
+ <branding.version>1.1.0-SNAPSHOT</branding.version>
+ <karaf.resources.version>1.5.0-SNAPSHOT</karaf.resources.version>
<karaf.version>3.0.1</karaf.version>
</properties>
<dependency>
<groupId>org.opendaylight.openflowplugin</groupId>
<artifactId>features-openflowplugin</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
<scope>runtime</scope>
<parent>
<groupId>org.opendaylight.controller.archetypes</groupId>
<artifactId>archetypes-parent</artifactId>
- <version>0.1.1-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>opendaylight-karaf-features-archetype</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>maven-archetype</packaging>
<name>opendaylight-karaf-features-archetype</name>
<features.file>features.xml</features.file>
<!-- Optional TODO: Move these properties to your parent pom and possibly
DependencyManagement section of your parent pom -->
- <branding.version>1.0.0-SNAPSHOT</branding.version>
- <karaf.resources.version>1.4.2-SNAPSHOT</karaf.resources.version>
+ <branding.version>1.1.0-SNAPSHOT</branding.version>
+ <karaf.resources.version>1.5.0-SNAPSHOT</karaf.resources.version>
<karaf.version>3.0.1</karaf.version>
- <feature.test.version>0.6.2-SNAPSHOT</feature.test.version>
- <karaf.empty.version>1.4.2-SNAPSHOT</karaf.empty.version>
+ <feature.test.version>0.7.0-SNAPSHOT</feature.test.version>
+ <karaf.empty.version>1.5.0-SNAPSHOT</karaf.empty.version>
<surefire.version>2.16</surefire.version>
</properties>
<dependencies>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
<dependency>
<groupId>org.opendaylight.openflowplugin</groupId>
<artifactId>features-openflowplugin</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<classifier>features</classifier>
<type>xml</type>
</dependency>
Necessary TODO: Add repo entries for the repositories of features you refer to
in this feature file but do not define here.
Examples:
- <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.6.2-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.controller/features-mdsal/1.1-SNAPSHOT/xml/features</repository>
- <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.0.3-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.yangtools/features-yangtools/0.7.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.controller/features-mdsal/1.2.0-SNAPSHOT/xml/features</repository>
+ <repository>mvn:org.opendaylight.openflowplugin/features-openflowplugin/0.1.0-SNAPSHOT/xml/features</repository>
-->
<feature name='odl-${repoName}-all' version='${symbol_dollar}{project.version}' description='OpenDaylight :: ${repoName} :: All'>
<!--
* Basic MD-SAL Provider
<feature name='odl-${repoName}-provider' version='${symbol_dollar}{project.version}' description='OpenDaylight :: ${repoName} :: Provider '>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<feature version='${symbol_dollar}{project.version}'>odl-${repoName}-model</feature>
<bundle>mvn:${groupId}/${repoName}-provider/${symbol_dollar}{project.version}</bundle>
... whatever other bundles you need
* Basic MD-SAL Model feature
<feature name='odl-${repoName}-model' version='${symbol_dollar}{project.version}' description='OpenDaylight :: ${repoName} :: Model'>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-binding</feature>
- <feature version='0.6.2-SNAPSHOT'>odl-yangtools-models</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-binding</feature>
+ <feature version='0.7.0-SNAPSHOT'>odl-yangtools-models</feature>
<bundle>mvn:${groupId}/${repoName}-model/${symbol_dollar}{project.version}</bundle>
... whatever other bundles you need
</feature>
* Config Subsystem example - the config file is your config subsystem configuration
<feature name='odl-${repoName}-provider' version='${symbol_dollar}{project.version}' description='OpenDaylight :: ${repoName} :: Provider'>
- <feature version='1.1-SNAPSHOT'>odl-mdsal-broker</feature>
+ <feature version='1.2.0-SNAPSHOT'>odl-mdsal-broker</feature>
<bundle>mvn:${groupId}/${repoName}-provider/${symbol_dollar}{project.version}</bundle>
<configfile finalname="etc/opendaylight/karaf/80-${repoName}.xml">mvn:${groupId}/${repoName}-config/${symbol_dollar}{project.version}/xml/config</configfile>
... whatever other bundles you need
* Basic MD-SAL Provider that uses openflowplugin-flow-services (which brings along odl-mdsal-broker)
<feature name='odl-${repoName}-provider' version='${symbol_dollar}{project.version}' description='OpenDaylight :: ${repoName} :: Provider'>
- <feature version='0.0.3-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
+ <feature version='0.1.0-SNAPSHOT'>odl-openflowplugin-flow-services</feature>
<bundle>mvn:${groupId}/${repoName}-provider/${symbol_dollar}{project.version}</bundle>
... whatever other bundles you need
</feature>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<scm>
</scm>
<groupId>org.opendaylight.controller.archetypes</groupId>
<artifactId>archetypes-parent</artifactId>
- <version>0.1.1-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<distributionManagement>
<!-- OpenDayLight Released artifact -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>arphandler</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>clustering.services.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementation/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>clustering.services</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>clustering.services-implementation</artifactId>
- <version>0.4.3-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<!-- Sonar properties using jacoco to retrieve integration test results -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>clustering.stub</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>clustering.test</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
<artifactId>checkstyle</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<scm>
<connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
<developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>concepts</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>checkstyle</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
</plugin>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../opendaylight</relativePath>
</parent>
<artifactId>filter-valve</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>commons.httpclient</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>checkstyle</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
</plugin>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>pom</packaging>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>checkstyle</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
</plugin>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../opendaylight</relativePath>
</parent>
<artifactId>liblldp</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.9.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.parent</artifactId>
- <version>1.0.2-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<relativePath>../parent</relativePath>
</parent>
<artifactId>commons.logback_settings</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<scm>
<connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
<developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
<parent>
<groupId>org.opendaylight.odlparent</groupId>
<artifactId>odlparent</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath></relativePath>
</parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<packaging>pom</packaging>
<prerequisites>
<maven>3.0</maven>
<akka.version>2.3.4</akka.version>
<aopalliance.version>1.0.0</aopalliance.version>
- <appauth.version>0.4.2-SNAPSHOT</appauth.version>
- <archetype-app-northbound>0.0.1-SNAPSHOT</archetype-app-northbound>
+ <appauth.version>0.5.0-SNAPSHOT</appauth.version>
+ <archetype-app-northbound>0.1.0-SNAPSHOT</archetype-app-northbound>
<aries.util.version>1.1.0</aries.util.version>
- <arphandler.version>0.5.2-SNAPSHOT</arphandler.version>
+ <arphandler.version>0.6.0-SNAPSHOT</arphandler.version>
<!-- Controller Modules Versions -->
<asm.version>4.1</asm.version>
<!-- Plugin Versions -->
<bouncycastle.version>1.50</bouncycastle.version>
<bundle.plugin.version>2.4.0</bundle.plugin.version>
- <bundlescanner.api.version>0.4.2-SNAPSHOT</bundlescanner.api.version>
- <bundlescanner.implementation.version>0.4.2-SNAPSHOT</bundlescanner.implementation.version>
- <bundlescanner.version>0.4.2-SNAPSHOT</bundlescanner.version>
+ <bundlescanner.api.version>0.5.0-SNAPSHOT</bundlescanner.api.version>
+ <bundlescanner.implementation.version>0.5.0-SNAPSHOT</bundlescanner.implementation.version>
+ <bundlescanner.version>0.5.0-SNAPSHOT</bundlescanner.version>
<checkstyle.version>2.12</checkstyle.version>
- <clustering.services.version>0.5.1-SNAPSHOT</clustering.services.version>
- <clustering.services_implementation.version>0.4.3-SNAPSHOT</clustering.services_implementation.version>
- <clustering.stub.version>0.4.2-SNAPSHOT</clustering.stub.version>
- <clustering.test.version>0.4.2-SNAPSHOT</clustering.test.version>
- <commmons.northbound.version>0.4.2-SNAPSHOT</commmons.northbound.version>
+ <clustering.services.version>0.6.0-SNAPSHOT</clustering.services.version>
+ <clustering.services_implementation.version>0.5.0-SNAPSHOT</clustering.services_implementation.version>
+ <clustering.stub.version>0.5.0-SNAPSHOT</clustering.stub.version>
+ <clustering.test.version>0.5.0-SNAPSHOT</clustering.test.version>
+ <commmons.northbound.version>0.5.0-SNAPSHOT</commmons.northbound.version>
<!-- Third Party Versions -->
<codahale.metrics.version>3.0.1</codahale.metrics.version>
<commons.tomcat.api>7.0.53.v201406060720</commons.tomcat.api>
<commons.tomcat.util>7.0.53.v201406070630</commons.tomcat.util>
- <commons.checkstyle.version>0.0.3-SNAPSHOT</commons.checkstyle.version>
+ <commons.checkstyle.version>0.1.0-SNAPSHOT</commons.checkstyle.version>
<commons.fileupload.version>1.2.2</commons.fileupload.version>
- <commons.httpclient.version>0.1.2-SNAPSHOT</commons.httpclient.version>
+ <commons.httpclient.version>0.2.0-SNAPSHOT</commons.httpclient.version>
<commons.io.version>2.4</commons.io.version>
<commons.lang3.version>3.1</commons.lang3.version>
- <commons.logback_settings.version>0.0.2-SNAPSHOT</commons.logback_settings.version>
+ <commons.logback_settings.version>0.1.0-SNAPSHOT</commons.logback_settings.version>
<commons.net.version>3.0.1</commons.net.version>
- <commons.opendaylight.commons.httpclient>0.1.2-SNAPSHOT</commons.opendaylight.commons.httpclient>
- <commons.opendaylight.concepts.version>0.5.2-SNAPSHOT</commons.opendaylight.concepts.version>
- <commons.opendaylight.version>1.4.2-SNAPSHOT</commons.opendaylight.version>
- <commons.parent.version>1.0.2-SNAPSHOT</commons.parent.version>
+ <commons.opendaylight.commons.httpclient>0.2.0-SNAPSHOT</commons.opendaylight.commons.httpclient>
+ <commons.opendaylight.concepts.version>0.6.0-SNAPSHOT</commons.opendaylight.concepts.version>
+ <commons.opendaylight.version>1.5.0-SNAPSHOT</commons.opendaylight.version>
+ <commons.parent.version>1.1.0-SNAPSHOT</commons.parent.version>
<compiler.version>2.3.2</compiler.version>
- <commons.httpclient.version>0.1.2-SNAPSHOT</commons.httpclient.version>
- <concepts.version>0.5.2-SNAPSHOT</concepts.version>
+ <commons.httpclient.version>0.2.0-SNAPSHOT</commons.httpclient.version>
+ <concepts.version>0.6.0-SNAPSHOT</concepts.version>
<concurrentlinkedhashmap.version>1.4</concurrentlinkedhashmap.version>
- <config.version>0.2.5-SNAPSHOT</config.version>
- <aaa.version>0.1.0-SNAPSHOT</aaa.version>
+ <config.version>0.3.0-SNAPSHOT</config.version>
+ <aaa.version>0.2.0-SNAPSHOT</aaa.version>
<config.configfile.directory>etc/opendaylight/karaf</config.configfile.directory>
<config.clustering.configfile>05-clustering.xml</config.clustering.configfile>
<config.netty.configfile>00-netty.xml</config.netty.configfile>
<config.toaster.configfile>03-toaster-sample.xml</config.toaster.configfile>
<config.restconf.configfile>10-rest-connector.xml</config.restconf.configfile>
<config.netconf.connector.configfile>99-netconf-connector.xml</config.netconf.connector.configfile>
- <configuration.implementation.version>0.4.3-SNAPSHOT</configuration.implementation.version>
- <configuration.version>0.4.3-SNAPSHOT</configuration.version>
- <connectionmanager.version>0.1.2-SNAPSHOT</connectionmanager.version>
- <containermanager.it.version>0.5.2-SNAPSHOT</containermanager.it.version>
- <containermanager.northbound.version>0.4.2-SNAPSHOT</containermanager.northbound.version>
- <containermanager.shell.version>0.5.2-SNAPSHOT</containermanager.shell.version>
- <containermanager.version>0.5.2-SNAPSHOT</containermanager.version>
- <controllermanager.northbound.version>0.0.2-SNAPSHOT</controllermanager.northbound.version>
- <devices.web.version>0.4.2-SNAPSHOT</devices.web.version>
- <dummy-console.version>1.1.0-SNAPSHOT</dummy-console.version>
+ <configuration.implementation.version>0.5.0-SNAPSHOT</configuration.implementation.version>
+ <configuration.version>0.5.0-SNAPSHOT</configuration.version>
+ <connectionmanager.version>0.2.0-SNAPSHOT</connectionmanager.version>
+ <containermanager.it.version>0.6.0-SNAPSHOT</containermanager.it.version>
+ <containermanager.northbound.version>0.5.0-SNAPSHOT</containermanager.northbound.version>
+ <containermanager.shell.version>0.6.0-SNAPSHOT</containermanager.shell.version>
+ <containermanager.version>0.6.0-SNAPSHOT</containermanager.version>
+ <controllermanager.northbound.version>0.1.0-SNAPSHOT</controllermanager.northbound.version>
+ <devices.web.version>0.5.0-SNAPSHOT</devices.web.version>
+ <dummy-console.version>1.2.0-SNAPSHOT</dummy-console.version>
<eclipse.persistence.version>2.5.0</eclipse.persistence.version>
<eclipse.jdt.core.compiler.batch.version>3.8.0.I20120518-2145</eclipse.jdt.core.compiler.batch.version>
<!-- enforcer version -->
<exi.nagasena.version>0000.0002.0038.0</exi.nagasena.version>
<felix.util.version>1.6.0</felix.util.version>
- <filtervalve.version>1.4.2-SNAPSHOT</filtervalve.version>
+ <filtervalve.version>1.5.0-SNAPSHOT</filtervalve.version>
<findbugs.maven.plugin.version>2.4.0</findbugs.maven.plugin.version>
- <flowprogrammer.northbound.version>0.4.2-SNAPSHOT</flowprogrammer.northbound.version>
- <flows.web.version>0.4.2-SNAPSHOT</flows.web.version>
- <forwarding.staticrouting>0.5.2-SNAPSHOT</forwarding.staticrouting>
- <forwarding.staticrouting.northbound.version>0.4.2-SNAPSHOT</forwarding.staticrouting.northbound.version>
- <forwardingrulesmanager.implementation.version>0.4.2-SNAPSHOT</forwardingrulesmanager.implementation.version>
- <forwardingrulesmanager.version>0.6.0-SNAPSHOT</forwardingrulesmanager.version>
- <ganymed.version>1.1-SNAPSHOT</ganymed.version>
- <hosttracker.api.version>0.5.2-SNAPSHOT</hosttracker.api.version>
- <hosttracker.implementation.version>0.5.2-SNAPSHOT</hosttracker.implementation.version>
- <hosttracker.northbound.version>0.4.2-SNAPSHOT</hosttracker.northbound.version>
- <hosttracker.shell.version>1.0.0-SNAPSHOT</hosttracker.shell.version>
- <hosttracker_new.api.version>0.4.2-SNAPSHOT</hosttracker_new.api.version>
- <hosttracker_new.implementation.version>0.4.2-SNAPSHOT</hosttracker_new.implementation.version>
- <httpservice-bridge.northbound.version>0.0.2-SNAPSHOT</httpservice-bridge.northbound.version>
- <ietf-inet-types.version>2010.09.24.4-SNAPSHOT</ietf-inet-types.version>
- <ietf-restconf.version>2013.10.19.1-SNAPSHOT</ietf-restconf.version>
- <ietf-topology.version>2013.10.21.2-SNAPSHOT</ietf-topology.version>
- <ietf-yang-types.version>2010.09.24.4-SNAPSHOT</ietf-yang-types.version>
+ <flowprogrammer.northbound.version>0.5.0-SNAPSHOT</flowprogrammer.northbound.version>
+ <flows.web.version>0.5.0-SNAPSHOT</flows.web.version>
+ <forwarding.staticrouting>0.6.0-SNAPSHOT</forwarding.staticrouting>
+ <forwarding.staticrouting.northbound.version>0.5.0-SNAPSHOT</forwarding.staticrouting.northbound.version>
+ <forwardingrulesmanager.implementation.version>0.5.0-SNAPSHOT</forwardingrulesmanager.implementation.version>
+ <forwardingrulesmanager.version>0.7.0-SNAPSHOT</forwardingrulesmanager.version>
+ <ganymed.version>1.2.0-SNAPSHOT</ganymed.version>
+ <hosttracker.api.version>0.6.0-SNAPSHOT</hosttracker.api.version>
+ <hosttracker.implementation.version>0.6.0-SNAPSHOT</hosttracker.implementation.version>
+ <hosttracker.northbound.version>0.5.0-SNAPSHOT</hosttracker.northbound.version>
+ <hosttracker.shell.version>1.1.0-SNAPSHOT</hosttracker.shell.version>
+ <hosttracker_new.api.version>0.5.0-SNAPSHOT</hosttracker_new.api.version>
+ <hosttracker_new.implementation.version>0.5.0-SNAPSHOT</hosttracker_new.implementation.version>
+ <httpservice-bridge.northbound.version>0.1.0-SNAPSHOT</httpservice-bridge.northbound.version>
+ <ietf-inet-types.version>2010.09.24.7-SNAPSHOT</ietf-inet-types.version>
+ <ietf-restconf.version>2013.10.19.7-SNAPSHOT</ietf-restconf.version>
+ <ietf-topology.version>2013.10.21.7-SNAPSHOT</ietf-topology.version>
+ <ietf-yang-types.version>2010.09.24.7-SNAPSHOT</ietf-yang-types.version>
<jdepend.maven.plugin.version>2.0-beta-2</jdepend.maven.plugin.version>
<jmxGeneratorPath>src/main/yang-gen-config</jmxGeneratorPath>
- <jolokia-bridge.version>0.0.2-SNAPSHOT</jolokia-bridge.version>
+ <jolokia-bridge.version>0.1.0-SNAPSHOT</jolokia-bridge.version>
<jolokia.version>1.1.4</jolokia.version>
<jsr305.api.version>2.0.1</jsr305.api.version>
<jsr311.api.version>1.1.1</jsr311.api.version>
<jsr311.v2.api.version>2.0</jsr311.v2.api.version>
- <karaf.branding.version>1.0.0-SNAPSHOT</karaf.branding.version>
+ <karaf.branding.version>1.1.0-SNAPSHOT</karaf.branding.version>
<karaf.shell.version>3.0.0</karaf.shell.version>
<karaf.version>3.0.1</karaf.version>
<leveldb.version>0.7</leveldb.version>
<leveldbjni.version>1.8</leveldbjni.version>
<lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
<logback.version>1.0.9</logback.version>
- <logging.bridge.version>0.4.2-SNAPSHOT</logging.bridge.version>
+ <logging.bridge.version>0.5.0-SNAPSHOT</logging.bridge.version>
<maven.plugin.api.version>3.0.5</maven.plugin.api.version>
<mimepull.version>1.9.4</mimepull.version>
- <mdsal.version>1.1-SNAPSHOT</mdsal.version>
- <netconf.version>0.2.5-SNAPSHOT</netconf.version>
- <networkconfig.bridgedomain.northbound.version>0.0.3-SNAPSHOT</networkconfig.bridgedomain.northbound.version>
- <networkconfig.neutron.implementation.version>0.4.2-SNAPSHOT</networkconfig.neutron.implementation.version>
- <networkconfig.neutron.northbound.version>0.4.2-SNAPSHOT</networkconfig.neutron.northbound.version>
- <networkconfig.neutron.version>0.4.2-SNAPSHOT</networkconfig.neutron.version>
+ <mdsal.version>1.2.0-SNAPSHOT</mdsal.version>
+ <netconf.version>0.3.0-SNAPSHOT</netconf.version>
+ <networkconfig.bridgedomain.northbound.version>0.1.0-SNAPSHOT</networkconfig.bridgedomain.northbound.version>
+ <networkconfig.neutron.implementation.version>0.5.0-SNAPSHOT</networkconfig.neutron.implementation.version>
+ <networkconfig.neutron.northbound.version>0.5.0-SNAPSHOT</networkconfig.neutron.northbound.version>
+ <networkconfig.neutron.version>0.5.0-SNAPSHOT</networkconfig.neutron.version>
<!-- ODL repository / plugin repository -->
<nexusproxy>http://nexus.opendaylight.org/content</nexusproxy>
- <northbound.commons.version>0.4.2-SNAPSHOT</northbound.commons.version>
- <northbound.hosttracker.version>1.4.2-SNAPSHOT</northbound.hosttracker.version>
- <northbound.jolokia.version>1.4.2-SNAPSHOT</northbound.jolokia.version>
- <opendaylight-l2-types.version>2013.08.27.4-SNAPSHOT</opendaylight-l2-types.version>
- <osgi-brandfragment.web.version>0.0.2-SNAPSHOT</osgi-brandfragment.web.version>
+ <northbound.commons.version>0.5.0-SNAPSHOT</northbound.commons.version>
+ <northbound.hosttracker.version>1.5.0-SNAPSHOT</northbound.hosttracker.version>
+ <northbound.jolokia.version>1.5.0-SNAPSHOT</northbound.jolokia.version>
+ <opendaylight-l2-types.version>2013.08.27.7-SNAPSHOT</opendaylight-l2-types.version>
+ <osgi-brandfragment.web.version>0.1.0-SNAPSHOT</osgi-brandfragment.web.version>
<pax.exam.version>4.0.0</pax.exam.version>
<parboiled.version>1.1.6</parboiled.version>
<parboiled.scala.version>1.1.6</parboiled.scala.version>
<propertymavenplugin.version>1.0-alpha-2</propertymavenplugin.version>
<protobuf.version>2.5.0</protobuf.version>
- <protocol-framework.version>0.5.0-SNAPSHOT</protocol-framework.version>
- <protocol_plugins.openflow.version>0.4.2-SNAPSHOT</protocol_plugins.openflow.version>
- <protocol_plugins.stub.version>0.4.2-SNAPSHOT</protocol_plugins.stub.version>
- <routing.dijkstra_implementation.version>0.4.2-SNAPSHOT</routing.dijkstra_implementation.version>
- <sal.connection.version>0.1.2-SNAPSHOT</sal.connection.version>
- <sal.implementation.version>0.4.2-SNAPSHOT</sal.implementation.version>
- <sal.networkconfiguration.version>0.0.3-SNAPSHOT</sal.networkconfiguration.version>
- <sal.version>0.8.1-SNAPSHOT</sal.version>
+ <protocol-framework.version>0.6.0-SNAPSHOT</protocol-framework.version>
+ <protocol_plugins.openflow.version>0.5.0-SNAPSHOT</protocol_plugins.openflow.version>
+ <protocol_plugins.stub.version>0.5.0-SNAPSHOT</protocol_plugins.stub.version>
+ <routing.dijkstra_implementation.version>0.5.0-SNAPSHOT</routing.dijkstra_implementation.version>
+ <sal.connection.version>0.2.0-SNAPSHOT</sal.connection.version>
+ <sal.implementation.version>0.5.0-SNAPSHOT</sal.implementation.version>
+ <sal.networkconfiguration.version>0.1.0-SNAPSHOT</sal.networkconfiguration.version>
+ <sal.version>0.9.0-SNAPSHOT</sal.version>
<salGeneratorPath>src/main/yang-gen-sal</salGeneratorPath>
- <samples.loadbalancer>0.5.2-SNAPSHOT</samples.loadbalancer>
- <samples.loadbalancer.northbound.version>0.4.2-SNAPSHOT</samples.loadbalancer.northbound.version>
- <samples.simpleforwarding.version>0.4.2-SNAPSHOT</samples.simpleforwarding.version>
- <sanitytest.version>0.4.2-SNAPSHOT</sanitytest.version>
+ <samples.loadbalancer>0.6.0-SNAPSHOT</samples.loadbalancer>
+ <samples.loadbalancer.northbound.version>0.5.0-SNAPSHOT</samples.loadbalancer.northbound.version>
+ <samples.simpleforwarding.version>0.5.0-SNAPSHOT</samples.simpleforwarding.version>
+ <sanitytest.version>0.5.0-SNAPSHOT</sanitytest.version>
<scala.version>2.10</scala.version>
<scala.micro.version>4</scala.micro.version>
- <security.version>0.4.2-SNAPSHOT</security.version>
- <karaf.security.version>0.4.2-SNAPSHOT</karaf.security.version>
+ <security.version>0.5.0-SNAPSHOT</security.version>
+ <karaf.security.version>0.5.0-SNAPSHOT</karaf.security.version>
<shapeless.version>1.2.4</shapeless.version>
<sitedeploy>dav:http://nexus.opendaylight.org/content/sites/site</sitedeploy>
<sonar.branch>${user.name}-private-view</sonar.branch>
<spring-security-karaf.version>3.1.4.RELEASE</spring-security-karaf.version>
<spring-security.version>3.1.3.RELEASE</spring-security.version>
<spring.version>3.1.3.RELEASE</spring.version>
- <statistics.northbound.version>0.4.2-SNAPSHOT</statistics.northbound.version>
- <statisticsmanager.implementation.version>0.4.2-SNAPSHOT</statisticsmanager.implementation.version>
- <statisticsmanager.version>0.5.1-SNAPSHOT</statisticsmanager.version>
- <subnets.northbound.version>0.4.2-SNAPSHOT</subnets.northbound.version>
+ <statistics.northbound.version>0.5.0-SNAPSHOT</statistics.northbound.version>
+ <statisticsmanager.implementation.version>0.5.0-SNAPSHOT</statisticsmanager.implementation.version>
+ <statisticsmanager.version>0.6.0-SNAPSHOT</statisticsmanager.version>
+ <subnets.northbound.version>0.5.0-SNAPSHOT</subnets.northbound.version>
<surefire.version>2.15</surefire.version>
- <switchmanager.api.version>0.7.1-SNAPSHOT</switchmanager.api.version>
- <switchmanager.implementation.version>0.4.2-SNAPSHOT</switchmanager.implementation.version>
- <switchmanager.northbound.version>0.4.2-SNAPSHOT</switchmanager.northbound.version>
+ <switchmanager.api.version>0.8.0-SNAPSHOT</switchmanager.api.version>
+ <switchmanager.implementation.version>0.5.0-SNAPSHOT</switchmanager.implementation.version>
+ <switchmanager.northbound.version>0.5.0-SNAPSHOT</switchmanager.northbound.version>
<testvm.argLine>-Xmx1024m -XX:MaxPermSize=256m</testvm.argLine>
- <topology.northbound.version>0.4.2-SNAPSHOT</topology.northbound.version>
- <topology.web.version>0.4.2-SNAPSHOT</topology.web.version>
- <topologymanager.version>0.4.2-SNAPSHOT</topologymanager.version>
- <topologymanager.shell.version>1.0.0-SNAPSHOT</topologymanager.shell.version>
- <troubleshoot.web.version>0.4.2-SNAPSHOT</troubleshoot.web.version>
+ <topology.northbound.version>0.5.0-SNAPSHOT</topology.northbound.version>
+ <topology.web.version>0.5.0-SNAPSHOT</topology.web.version>
+ <topologymanager.version>0.5.0-SNAPSHOT</topologymanager.version>
+ <topologymanager.shell.version>1.1.0-SNAPSHOT</topologymanager.shell.version>
+ <troubleshoot.web.version>0.5.0-SNAPSHOT</troubleshoot.web.version>
<typesafe.config.version>1.2.0</typesafe.config.version>
<uncommons.maths.version>1.2.2a</uncommons.maths.version>
- <usermanager.implementation.version>0.4.2-SNAPSHOT</usermanager.implementation.version>
- <usermanager.northbound.version>0.0.2-SNAPSHOT</usermanager.northbound.version>
- <usermanager.version>0.4.2-SNAPSHOT</usermanager.version>
- <nsf.version>0.4.2-SNAPSHOT</nsf.version>
- <web.version>0.4.2-SNAPSHOT</web.version>
+ <usermanager.implementation.version>0.5.0-SNAPSHOT</usermanager.implementation.version>
+ <usermanager.northbound.version>0.1.0-SNAPSHOT</usermanager.northbound.version>
+ <usermanager.version>0.5.0-SNAPSHOT</usermanager.version>
+ <nsf.version>0.5.0-SNAPSHOT</nsf.version>
+ <web.version>0.5.0-SNAPSHOT</web.version>
<xtend.dstdir>src/main/xtend-gen</xtend.dstdir>
- <yang-ext.version>2013.09.07.4-SNAPSHOT</yang-ext.version>
- <yang-jmx-generator.version>1.0.0-SNAPSHOT</yang-jmx-generator.version>
- <yangtools.version>0.6.2-SNAPSHOT</yangtools.version>
+ <yang-ext.version>2013.09.07.7-SNAPSHOT</yang-ext.version>
+ <yang-jmx-generator.version>1.1.0-SNAPSHOT</yang-jmx-generator.version>
+ <yangtools.version>0.7.0-SNAPSHOT</yangtools.version>
<sshd-core.version>0.12.0</sshd-core.version>
<jmh.version>0.9.7</jmh.version>
</properties>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>httpservice-bridge</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>jolokia-bridge</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
<!-- Karaf Dependencies -->
<dependency>
<dependency>
<groupId>org.opendaylight.yangtools.model</groupId>
<artifactId>ietf-yang-types-20130715</artifactId>
- <version>2013.07.15.1-SNAPSHOT</version>
+ <version>2013.07.15.7-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools.model</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.logback_settings</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<type>xml</type>
<scope>runtime</scope>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>features-netconf-connector</artifactId>
+ <version>${mdsal.version}</version>
+ <classifier>features</classifier>
+ <type>xml</type>
+ <scope>runtime</scope>
+ </dependency>
<!-- JMH Benchmark dependencies -->
<dependency>
<groupId>org.openjdk.jmh</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.logback_settings</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
<executions>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>checkstyle</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
<executions>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.parent</artifactId>
- <version>1.0.2-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<prerequisites>
<maven>3.0</maven>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>protocol-framework</artifactId>
- <version>0.5.0-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<name>${project.artifactId}</name>
<description>Common protocol framework</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>config-api</artifactId>
@Override
public String toString() {
- final StringBuffer sb = new StringBuffer("IdentityAttributeRef{");
+ final StringBuilder sb = new StringBuilder("IdentityAttributeRef{");
sb.append("qNameOfIdentity='").append(qNameOfIdentity).append('\'');
sb.append('}');
return sb.toString();
*/
@ThreadSafe
public class ObjectNameUtil {
+ private ObjectNameUtil() {
+ }
public static final String ON_DOMAIN = ConfigRegistryConstants.ON_DOMAIN;
public static final String MODULE_FACTORY_NAME_KEY = "moduleFactoryName";
}
public static Map<String, String> getAdditionalProperties(ObjectName on) {
- Hashtable<String, String> keyPropertyList = on.getKeyPropertyList();
+ Map<String, String> keyPropertyList = on.getKeyPropertyList();
Map<String, String> result = new HashMap<>();
for (Entry<String, String> entry : keyPropertyList.entrySet()) {
result.put(entry.getKey(), entry.getValue());
public static ObjectName createModulePattern(String moduleName,
String instanceName) {
- moduleName = moduleName == null ? ON_WILDCARD : moduleName;
- instanceName = instanceName == null ? ON_WILDCARD : instanceName;
+ String finalModuleName = moduleName == null ? ON_WILDCARD : moduleName;
+ String finalInstanceName = instanceName == null ? ON_WILDCARD : instanceName;
// do not return object names containing transaction name
ObjectName namePattern = ObjectNameUtil
+ ObjectNameUtil.TYPE_KEY + "="
+ ObjectNameUtil.TYPE_MODULE + ","
+ ObjectNameUtil.MODULE_FACTORY_NAME_KEY + "="
- + moduleName + "," + ""
- + ObjectNameUtil.INSTANCE_NAME_KEY + "=" + instanceName);
+ + finalModuleName + "," + ""
+ + ObjectNameUtil.INSTANCE_NAME_KEY + "=" + finalInstanceName);
return namePattern;
}
public static ObjectName createModulePattern(String ifcName,
String instanceName, String transactionName) {
- ifcName = ifcName == null ? ON_WILDCARD : ifcName;
- instanceName = instanceName == null ? ON_WILDCARD : instanceName;
- transactionName = transactionName == null ? ON_WILDCARD : transactionName;
+ String finalIfcName = ifcName == null ? ON_WILDCARD : ifcName;
+ String finalInstanceName = instanceName == null ? ON_WILDCARD : instanceName;
+ String finalTransactionName = transactionName == null ? ON_WILDCARD : transactionName;
return ObjectNameUtil.createON(ObjectNameUtil.ON_DOMAIN
+ ":type=Module," + ObjectNameUtil.MODULE_FACTORY_NAME_KEY
- + "=" + ifcName + "," + ObjectNameUtil.INSTANCE_NAME_KEY + "="
- + instanceName + "," + ObjectNameUtil.TRANSACTION_NAME_KEY
- + "=" + transactionName);
+ + "=" + finalIfcName + "," + ObjectNameUtil.INSTANCE_NAME_KEY + "="
+ + finalInstanceName + "," + ObjectNameUtil.TRANSACTION_NAME_KEY
+ + "=" + finalTransactionName);
}
public static ObjectName createRuntimeBeanPattern(String moduleName,
String instanceName) {
- moduleName = moduleName == null ? ON_WILDCARD : moduleName;
- instanceName = instanceName == null ? ON_WILDCARD : instanceName;
+ String finalModuleName = moduleName == null ? ON_WILDCARD : moduleName;
+ String finalInstanceName = instanceName == null ? ON_WILDCARD : instanceName;
return ObjectNameUtil.createON(ObjectNameUtil.ON_DOMAIN + ":"
+ ObjectNameUtil.TYPE_KEY + "="
+ ObjectNameUtil.TYPE_RUNTIME_BEAN + ","
- + ObjectNameUtil.MODULE_FACTORY_NAME_KEY + "=" + moduleName
- + "," + ObjectNameUtil.INSTANCE_NAME_KEY + "=" + instanceName
+ + ObjectNameUtil.MODULE_FACTORY_NAME_KEY + "=" + finalModuleName
+ + "," + ObjectNameUtil.INSTANCE_NAME_KEY + "=" + finalInstanceName
+ ",*");
}
public class ConfigRegistryConstants {
+ private ConfigRegistryConstants() {
+ }
+
public static final String TYPE_CONFIG_REGISTRY = "ConfigRegistry";
public static final String ON_DOMAIN = "org.opendaylight.controller";
the actual service-type which is actually required.";
mandatory true;
- type service-type-ref;
+ type leafref {
+ path "/config:services/config:service/config:type";
+ }
}
leaf name {
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>config-manager</artifactId>
*/
@ThreadSafe
public class ConfigRegistryImpl implements AutoCloseable, ConfigRegistryImplMXBean {
- private static final Logger logger = LoggerFactory.getLogger(ConfigRegistryImpl.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConfigRegistryImpl.class);
private final ModuleFactoriesResolver resolver;
private final MBeanServer configMBeanServer;
for (ModuleInternalInfo moduleInternalInfo : currentConfig.getEntries()) {
String name = moduleInternalInfo.getModuleFactory().getImplementationName();
if (allCurrentFactories.containsKey(name) == false) {
- logger.trace("Factory {} not found in SR, using reference from previous commit", name);
+ LOGGER.trace("Factory {} not found in SR, using reference from previous commit", name);
allCurrentFactories.put(name,
Maps.immutableEntry(moduleInternalInfo.getModuleFactory(), moduleInternalInfo.getBundleContext()));
}
throws ConflictingVersionException, ValidationException {
final String transactionName = ObjectNameUtil
.getTransactionName(transactionControllerON);
- logger.trace("About to commit {}. Current parentVersion: {}, versionCounter {}", transactionName, version, versionCounter);
+ LOGGER.trace("About to commit {}. Current parentVersion: {}, versionCounter {}", transactionName, version, versionCounter);
// find ConfigTransactionController
Map<String, Entry<ConfigTransactionControllerInternal, ConfigTransactionLookupRegistry>> transactions = transactionsHolder.getCurrentTransactions();
} catch (Error | RuntimeException t) { // some libs throw Errors: e.g.
// javax.xml.ws.spi.FactoryFinder$ConfigurationError
isHealthy = false;
- logger.error("Configuration Transaction failed on 2PC, server is unhealthy", t);
+ LOGGER.error("Configuration Transaction failed on 2PC, server is unhealthy", t);
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
} else {
int orderingIdx = 0;
for (ModuleIdentifier moduleIdentifier : orderedModuleIdentifiers) {
- logger.trace("Registering {}", moduleIdentifier);
+ LOGGER.trace("Registering {}", moduleIdentifier);
ModuleInternalTransactionalInfo entry = commitInfo.getCommitted()
.get(moduleIdentifier);
if (entry == null) {
configTransactionControllerEntry.getValue().close();
configTransactionController.abortConfig();
} catch (RuntimeException e) {
- logger.warn("Ignoring exception while aborting {}",
+ LOGGER.warn("Ignoring exception while aborting {}",
configTransactionController, e);
}
}
@Override
public Set<ObjectName> lookupRuntimeBeans(String moduleName,
String instanceName) {
- if (moduleName == null) {
- moduleName = "*";
- }
- if (instanceName == null) {
- instanceName = "*";
- }
+ String finalModuleName = moduleName == null ? "*" : moduleName;
+ String finalInstanceName = instanceName == null ? "*" : instanceName;
ObjectName namePattern = ObjectNameUtil.createRuntimeBeanPattern(
- moduleName, instanceName);
+ finalModuleName, finalInstanceName);
return baseJMXRegistrator.queryNames(namePattern, null);
}
* Service Registry.
*/
public void addAll(Collection<ModuleInternalInfo> configInfos) {
- if (currentConfig.size() > 0) {
+ if (!currentConfig.isEmpty()) {
throw new IllegalStateException(
"Error - some config entries were not removed: "
+ currentConfig);
ConfigTransactionControllerInternal,
ConfigTransactionControllerImplMXBean,
Identifiable<TransactionIdentifier> {
- private static final Logger logger = LoggerFactory.getLogger(ConfigTransactionControllerImpl.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConfigTransactionControllerImpl.class);
private final ConfigTransactionLookupRegistry txLookupRegistry;
private final ObjectName controllerON;
boolean isDefaultBean, BundleContext bundleContext)
throws InstanceAlreadyExistsException {
- logger.debug("Adding module {} to transaction {}", moduleIdentifier, this);
+ LOGGER.debug("Adding module {} to transaction {}", moduleIdentifier, this);
if (moduleIdentifier.equals(module.getIdentifier()) == false) {
throw new IllegalStateException("Incorrect name reported by module. Expected "
+ moduleIdentifier + ", got " + module.getIdentifier());
}
private synchronized void destroyModule(ModuleIdentifier moduleIdentifier) {
- logger.debug("Destroying module {} in transaction {}", moduleIdentifier, this);
+ LOGGER.debug("Destroying module {} in transaction {}", moduleIdentifier, this);
transactionStatus.checkNotAborted();
ModuleInternalTransactionalInfo found = dependencyResolverManager.findModuleInternalTransactionalInfo(moduleIdentifier);
- if (blankTransaction == false) {
-
- if (found.isDefaultBean()) {
- logger.warn("Warning: removing default bean. This will be forbidden in next version of config-subsystem");
- }
+ if (blankTransaction == false &&
+ found.isDefaultBean()) {
+ LOGGER.warn("Warning: removing default bean. This will be forbidden in next version of config-subsystem");
}
// first remove refNames, it checks for objectname existence
writableSRRegistry.removeServiceReferences(
ObjectNameUtil.createTransactionModuleON(getTransactionName(), moduleIdentifier));
} catch (InstanceNotFoundException e) {
- logger.error("Possible code error: cannot find {} in {}", moduleIdentifier, writableSRRegistry);
+ LOGGER.error("Possible code error: cannot find {} in {}", moduleIdentifier, writableSRRegistry);
throw new IllegalStateException("Possible code error: cannot find " + moduleIdentifier, e);
}
}
configBeanModificationDisabled.set(true);
try {
- validate_noLocks();
+ validateNoLocks();
} finally {
configBeanModificationDisabled.set(false);
}
}
- private void validate_noLocks() throws ValidationException {
+ private void validateNoLocks() throws ValidationException {
transactionStatus.checkNotAborted();
- logger.trace("Validating transaction {}", getTransactionIdentifier());
+ LOGGER.trace("Validating transaction {}", getTransactionIdentifier());
// call validate()
List<ValidationException> collectedExceptions = new ArrayList<>();
for (Entry<ModuleIdentifier, Module> entry : dependencyResolverManager
try {
module.validate();
} catch (Exception e) {
- logger.warn("Validation exception in {}", getTransactionName(),
+ LOGGER.warn("Validation exception in {}", getTransactionName(),
e);
collectedExceptions.add(ValidationException
.createForSingleException(name, e));
}
}
- if (collectedExceptions.size() > 0) {
+ if (!collectedExceptions.isEmpty()) {
throw ValidationException
.createFromCollectedValidationExceptions(collectedExceptions);
}
- logger.trace("Validated transaction {}", getTransactionIdentifier());
+ LOGGER.trace("Validated transaction {}", getTransactionIdentifier());
}
/**
transactionStatus.checkNotCommitStarted();
configBeanModificationDisabled.set(true);
try {
- validate_noLocks();
+ validateNoLocks();
} catch (ValidationException e) {
- logger.trace("Commit failed on validation");
+ LOGGER.trace("Commit failed on validation");
configBeanModificationDisabled.set(false); // recoverable error
throw e;
}
+ "to obtain a lock");
}
- logger.trace("Committing transaction {}", getTransactionIdentifier());
+ LOGGER.trace("Committing transaction {}", getTransactionIdentifier());
// call getInstance()
for (Entry<ModuleIdentifier, Module> entry : dependencyResolverManager
Module module = entry.getValue();
ModuleIdentifier name = entry.getKey();
try {
- logger.debug("About to commit {} in transaction {}",
+ LOGGER.debug("About to commit {} in transaction {}",
name, getTransactionIdentifier());
AutoCloseable instance = module.getInstance();
checkNotNull(instance, "Instance is null:{} in transaction {}", name, getTransactionIdentifier());
} catch (Exception e) {
- logger.error("Commit failed on {} in transaction {}", name,
+ LOGGER.error("Commit failed on {} in transaction {}", name,
getTransactionIdentifier(), e);
internalAbort();
throw new IllegalStateException(
// count dependency order
- logger.trace("Committed configuration {}", getTransactionIdentifier());
+ LOGGER.trace("Committed configuration {}", getTransactionIdentifier());
transactionStatus.setCommitted();
return dependencyResolverManager.getSortedModuleIdentifiers();
}
private void internalAbort() {
- logger.trace("Aborting {}", this);
+ LOGGER.trace("Aborting {}", this);
transactionStatus.setAborted();
close();
}
import java.util.concurrent.TimeUnit;
public class DeadlockMonitor implements AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(DeadlockMonitorRunnable.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(DeadlockMonitorRunnable.class);
private static final long WARN_AFTER_MILLIS = 5000;
moduleIdentifierWithNanosStack.push(current);
top = current;
}
- logger.trace("setCurrentlyInstantiatedModule {}, top {}", currentlyInstantiatedModule, top);
+ LOGGER.trace("setCurrentlyInstantiatedModule {}, top {}", currentlyInstantiatedModule, top);
}
public boolean isAlive() {
// is the getInstance() running longer than WARN_AFTER_MILLIS ?
long runningTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - copy.nanoTime);
if (runningTime > WARN_AFTER_MILLIS) {
- logger.warn("{} did not finish after {} ms", copy.moduleIdentifier, runningTime);
+ LOGGER.warn("{} did not finish after {} ms", copy.moduleIdentifier, runningTime);
}
}
try {
interrupt();
}
}
- logger.trace("Exiting {}", this);
+ LOGGER.trace("Exiting {}", this);
}
@Override
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
ModuleIdentifierWithNanos that = (ModuleIdentifierWithNanos) o;
- if (nanoTime != that.nanoTime) return false;
- if (moduleIdentifier != null ? !moduleIdentifier.equals(that.moduleIdentifier) : that.moduleIdentifier != null)
+ if (nanoTime != that.nanoTime) {
return false;
+ }
+ if (moduleIdentifier != null ? !moduleIdentifier.equals(that.moduleIdentifier) : that.moduleIdentifier != null) {
+ return false;
+ }
return true;
}
import org.slf4j.LoggerFactory;
public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceReadableRegistry, SearchableServiceReferenceWritableRegistry {
- private static final Logger logger = LoggerFactory.getLogger(ServiceReferenceRegistryImpl.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ServiceReferenceRegistryImpl.class);
private final Map<String, ModuleFactory> factories;
private final Map<String, Set<String>> factoryNamesToQNames;
boolean skipChecks = true;
newRegistry.saveServiceReference(refNameEntry.getKey(), currentImplementation, skipChecks);
} catch (InstanceNotFoundException e) {
- logger.error("Cannot save service reference({}, {})", refNameEntry.getKey(), currentImplementation);
+ LOGGER.error("Cannot save service reference({}, {})", refNameEntry.getKey(), currentImplementation);
throw new IllegalStateException("Possible code error", e);
}
}
Map<String, Set<String /* QName */>> modifiableFactoryNamesToQNames = new HashMap<>();
Set<ServiceInterfaceAnnotation> allAnnotations = new HashSet<>();
- Set<String /* qName */> allQNames = new HashSet<>();
+ Set<String /* qName */> allQNameSet = new HashSet<>();
for (Entry<String, ModuleFactory> entry : factories.entrySet()) {
if (entry.getKey().equals(entry.getValue().getImplementationName()) == false) {
- logger.error("Possible error in code: Mismatch between supplied and actual name of {}", entry);
+ LOGGER.error("Possible error in code: Mismatch between supplied and actual name of {}", entry);
throw new IllegalArgumentException("Possible error in code: Mismatch between supplied and actual name of " + entry);
}
Set<ServiceInterfaceAnnotation> siAnnotations = InterfacesHelper.getServiceInterfaceAnnotations(entry.getValue());
Set<String> qNames = InterfacesHelper.getQNames(siAnnotations);
allAnnotations.addAll(siAnnotations);
- allQNames.addAll(qNames);
+ allQNameSet.addAll(qNames);
modifiableFactoryNamesToQNames.put(entry.getKey(), Collections.unmodifiableSet(qNames));
}
this.factoryNamesToQNames = Collections.unmodifiableMap(modifiableFactoryNamesToQNames);
- this.allQNames = Collections.unmodifiableSet(allQNames);
+ this.allQNames = Collections.unmodifiableSet(allQNameSet);
// fill namespacesToAnnotations
Map<String /* namespace */, Map<String /* localName */, ServiceInterfaceAnnotation>> modifiableNamespacesToAnnotations =
new HashMap<>();
modifiableNamespacesToAnnotations.put(sia.namespace(), ofNamespace);
}
if (ofNamespace.containsKey(sia.localName())) {
- logger.error("Cannot construct namespacesToAnnotations map, conflict between local names in {}, offending local name: {}, map so far {}",
+ LOGGER.error("Cannot construct namespacesToAnnotations map, conflict between local names in {}, offending local name: {}, map so far {}",
sia.namespace(), sia.localName(), modifiableNamespacesToAnnotations);
throw new IllegalArgumentException("Conflict between local names in " + sia.namespace() + " : " + sia.localName());
}
}
this.namespacesToAnnotations = Collections.unmodifiableMap(modifiableNamespacesToAnnotations);
this.serviceQNamesToAnnotations = Collections.unmodifiableMap(modifiableServiceQNamesToAnnotations);
- logger.trace("factoryNamesToQNames:{}", this.factoryNamesToQNames);
+ LOGGER.trace("factoryNamesToQNames:{}", this.factoryNamesToQNames);
}
@Override
String factoryName = ObjectNameUtil.getFactoryName(objectName);
Set<String> serviceInterfaceAnnotations = factoryNamesToQNames.get(factoryName);
if (serviceInterfaceAnnotations == null) {
- logger.error("Possible error in code: cannot find factory annotations of '{}' extracted from ON {} in {}",
+ LOGGER.error("Possible error in code: cannot find factory annotations of '{}' extracted from ON {} in {}",
factoryName, objectName, factoryNamesToQNames);
throw new IllegalArgumentException("Cannot find factory with name " + factoryName);
}
public synchronized String getServiceInterfaceName(String namespace, String localName) {
Map<String /* localName */, ServiceInterfaceAnnotation> ofNamespace = namespacesToAnnotations.get(namespace);
if (ofNamespace == null) {
- logger.error("Cannot find namespace {} in {}", namespace, namespacesToAnnotations);
+ LOGGER.error("Cannot find namespace {} in {}", namespace, namespacesToAnnotations);
throw new IllegalArgumentException("Cannot find namespace " + namespace);
}
ServiceInterfaceAnnotation sia = ofNamespace.get(localName);
if (sia == null) {
- logger.error("Cannot find local name {} in namespace {}, found only {}", localName, namespace, ofNamespace);
+ LOGGER.error("Cannot find local name {} in namespace {}, found only {}", localName, namespace, ofNamespace);
throw new IllegalArgumentException("Cannot find local name " + localName + " in namespace " + namespace);
}
return sia.value();
try {
on = lookupRegistry.lookupConfigBean(moduleIdentifier.getFactoryName(), moduleIdentifier.getInstanceName());
} catch (InstanceNotFoundException e) {
- logger.error("Cannot find instance {}", moduleIdentifier);
+ LOGGER.error("Cannot find instance {}", moduleIdentifier);
throw new IllegalStateException("Cannot find instance " + moduleIdentifier, e);
}
return on;
ServiceReference serviceReference = new ServiceReference(serviceInterfaceQName, refName);
ModuleIdentifier moduleIdentifier = refNames.get(serviceReference);
if (moduleIdentifier == null) {
- logger.error("Cannot find qname {} and refName {} in {}", serviceInterfaceQName, refName, refName);
+ LOGGER.error("Cannot find qname {} and refName {} in {}", serviceInterfaceQName, refName, refName);
throw new IllegalArgumentException("Cannot find " + serviceReference);
}
return getObjectName(moduleIdentifier);
Map<String, Map<String, ObjectName>> serviceMapping = getServiceMapping();
Map<String, ObjectName> innerMap = serviceMapping.get(serviceInterfaceQName);
if (innerMap == null) {
- logger.error("Cannot find qname {} in {}", serviceInterfaceQName, refNames);
+ LOGGER.error("Cannot find qname {} in {}", serviceInterfaceQName, refNames);
throw new IllegalArgumentException("Cannot find " + serviceInterfaceQName);
}
return innerMap;
String referenceName = ObjectNameUtil.getReferenceName(objectName);
ServiceReference serviceReference = new ServiceReference(serviceQName, referenceName);
if (refNames.containsKey(serviceReference) == false) {
- logger.warn("Cannot find {} in {}", serviceReference, refNames);
+ LOGGER.warn("Cannot find {} in {}", serviceReference, refNames);
throw new InstanceNotFoundException("Service reference not found:" + objectName);
}
}
// check that service interface name exist
Set<String> serviceInterfaceQNames = factoryNamesToQNames.get(moduleIdentifier.getFactoryName());
if (serviceInterfaceQNames == null) {
- logger.error("Possible error in code: cannot find factoryName {} in {}, {}", moduleIdentifier.getFactoryName(),
+ LOGGER.error("Possible error in code: cannot find factoryName {} in {}, {}", moduleIdentifier.getFactoryName(),
factoryNamesToQNames, moduleIdentifier);
throw new IllegalStateException("Possible error in code: cannot find annotations of existing factory " + moduleIdentifier.getFactoryName());
}
// supplied serviceInterfaceName must exist in this collection
if (serviceInterfaceQNames.contains(serviceReference.getServiceInterfaceQName()) == false) {
- logger.error("Cannot find qName {} with factory name {}, found {}", serviceReference.getServiceInterfaceQName(), moduleIdentifier.getFactoryName(), serviceInterfaceQNames);
+ LOGGER.error("Cannot find qName {} with factory name {}, found {}", serviceReference.getServiceInterfaceQName(), moduleIdentifier.getFactoryName(), serviceInterfaceQNames);
throw new IllegalArgumentException("Cannot find service interface " + serviceReference.getServiceInterfaceQName() + " within factory " + moduleIdentifier.getFactoryName());
}
}
private synchronized void removeServiceReference(ServiceReference serviceReference) throws InstanceNotFoundException {
- logger.debug("Removing service reference {} from {}", serviceReference, this);
+ LOGGER.debug("Removing service reference {} from {}", serviceReference, this);
assertWritable();
// is the qName known?
if (allQNames.contains(serviceReference.getServiceInterfaceQName()) == false) {
- logger.error("Cannot find qname {} in {}", serviceReference.getServiceInterfaceQName(), allQNames);
+ LOGGER.error("Cannot find qname {} in {}", serviceReference.getServiceInterfaceQName(), allQNames);
throw new IllegalArgumentException("Cannot find service interface " + serviceReference.getServiceInterfaceQName());
}
ModuleIdentifier removed = refNames.remove(serviceReference);
private Set<ServiceReference> findServiceReferencesLinkingTo(ObjectName moduleObjectName, Set<String> serviceInterfaceQNames) {
String factoryName = ObjectNameUtil.getFactoryName(moduleObjectName);
if (serviceInterfaceQNames == null) {
- logger.warn("Possible error in code: cannot find factoryName {} in {}, object name {}", factoryName, factoryNamesToQNames, moduleObjectName);
+ LOGGER.warn("Possible error in code: cannot find factoryName {} in {}, object name {}", factoryName, factoryNamesToQNames, moduleObjectName);
throw new IllegalStateException("Possible error in code: cannot find annotations of existing factory " + factoryName);
}
String instanceName = ObjectNameUtil.getInstanceName(moduleObjectName);
*/
final class DependencyResolverImpl implements DependencyResolver,
Comparable<DependencyResolverImpl> {
- private static final Logger logger = LoggerFactory.getLogger(DependencyResolverImpl.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(DependencyResolverImpl.class);
private final ModulesHolder modulesHolder;
private final ModuleIdentifier name;
), jmxAttribute
);
- dependentReadOnlyON = translateServiceRefIfPossible(dependentReadOnlyON);
+ ObjectName newDependentReadOnlyON = translateServiceRefIfPossible(dependentReadOnlyON);
- ModuleIdentifier moduleIdentifier = ObjectNameUtil.fromON(dependentReadOnlyON, ObjectNameUtil
+ ModuleIdentifier moduleIdentifier = ObjectNameUtil.fromON(newDependentReadOnlyON, ObjectNameUtil
.TYPE_MODULE);
ModuleFactory foundFactory = modulesHolder.findModuleFactory(moduleIdentifier, jmxAttribute);
+ "Module name is %s : %s, expected service interface %s, dependent module ON %s , "
+ "attribute %s",
foundFactory.getImplementationName(), foundFactory,
- expectedServiceInterface, dependentReadOnlyON,
+ expectedServiceInterface, newDependentReadOnlyON,
jmxAttribute
);
throw new JmxAttributeValidationException(message, jmxAttribute);
// translate from serviceref to module ON
private ObjectName translateServiceRefIfPossible(ObjectName dependentReadOnlyON) {
- if (ObjectNameUtil.isServiceReference(dependentReadOnlyON)) {
- String serviceQName = ObjectNameUtil.getServiceQName(dependentReadOnlyON);
- String refName = ObjectNameUtil.getReferenceName(dependentReadOnlyON);
- dependentReadOnlyON = ObjectNameUtil.withoutTransactionName( // strip again of transaction name
+ ObjectName translatedDependentReadOnlyON = dependentReadOnlyON;
+ if (ObjectNameUtil.isServiceReference(translatedDependentReadOnlyON)) {
+ String serviceQName = ObjectNameUtil.getServiceQName(translatedDependentReadOnlyON);
+ String refName = ObjectNameUtil.getReferenceName(translatedDependentReadOnlyON);
+ translatedDependentReadOnlyON = ObjectNameUtil.withoutTransactionName( // strip again of transaction name
readableRegistry.lookupConfigBeanByServiceInterfaceName(serviceQName, refName));
}
- return dependentReadOnlyON;
+ return translatedDependentReadOnlyON;
}
/**
"Null parameters not allowed, got %s %s %s", expectedType,
dependentReadOnlyON, jmxAttribute));
}
- dependentReadOnlyON = translateServiceRefIfPossible(dependentReadOnlyON);
+ ObjectName translatedDependentReadOnlyON = translateServiceRefIfPossible(dependentReadOnlyON);
transactionStatus.checkCommitStarted();
transactionStatus.checkNotCommitted();
ModuleIdentifier dependentModuleIdentifier = ObjectNameUtil.fromON(
- dependentReadOnlyON, ObjectNameUtil.TYPE_MODULE);
+ translatedDependentReadOnlyON, ObjectNameUtil.TYPE_MODULE);
Module module = modulesHolder.findModule(dependentModuleIdentifier,
jmxAttribute);
synchronized (this) {
if (expectedBaseClass.isAssignableFrom(deserialized)) {
return (Class<T>) deserialized;
} else {
- logger.error("Cannot resolve class of identity {} : deserialized class {} is not a subclass of {}.",
+ LOGGER.error("Cannot resolve class of identity {} : deserialized class {} is not a subclass of {}.",
identityRef, deserialized, expectedBaseClass);
throw new IllegalArgumentException("Deserialized identity " + deserialized + " cannot be cast to " + expectedBaseClass);
}
@Override
public Object getAttribute(ObjectName name, String attribute)
throws MBeanException, AttributeNotFoundException, InstanceNotFoundException, ReflectionException {
- name = translateServiceRefIfPossible(name);
+ ObjectName newName = translateServiceRefIfPossible(name);
// add transaction name
- name = ObjectNameUtil.withTransactionName(name, transactionName);
- return mBeanServer.getAttribute(name, attribute);
+ newName = ObjectNameUtil.withTransactionName(newName, transactionName);
+ return mBeanServer.getAttribute(newName, attribute);
}
@Override
public <T> T newMXBeanProxy(ObjectName name, Class<T> interfaceClass) {
- name = translateServiceRefIfPossible(name);
+ ObjectName newName = translateServiceRefIfPossible(name);
// add transaction name
- name = ObjectNameUtil.withTransactionName(name, transactionName);
- return JMX.newMXBeanProxy(mBeanServer, name, interfaceClass);
+ newName = ObjectNameUtil.withTransactionName(newName, transactionName);
+ return JMX.newMXBeanProxy(mBeanServer, newName, interfaceClass);
}
}
@Override
protected Object handleInvocation(Object proxy, Method method, Object[] args) throws Throwable {
- boolean isGetInstance = method.getName().equals("getInstance");
+ boolean isGetInstance = "getInstance".equals(method.getName());
if (isGetInstance) {
if (cachedInstance != null) {
return cachedInstance;
*/
public class DestroyedModule implements AutoCloseable,
Comparable<DestroyedModule>, Identifiable<ModuleIdentifier> {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(DestroyedModule.class);
private final ModuleIdentifier identifier;
@Override
public void close() {
- logger.trace("Destroying {}", identifier);
+ LOGGER.trace("Destroying {}", identifier);
try {
instance.close();
} catch (Exception e) {
- logger.error("Error while closing instance of {}", identifier, e);
+ LOGGER.error("Error while closing instance of {}", identifier, e);
}
try {
oldJMXRegistrator.close();
} catch (Exception e) {
- logger.error("Error while closing jmx registrator of {}", identifier, e);
+ LOGGER.error("Error while closing jmx registrator of {}", identifier, e);
}
try {
osgiRegistration.close();
} catch (Exception e) {
- logger.error("Error while closing osgi registration of {}", identifier, e);
+ LOGGER.error("Error while closing osgi registration of {}", identifier, e);
}
}
* a read only wrapper.
*/
abstract class AbstractDynamicWrapper implements DynamicMBeanModuleWrapper {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(AbstractDynamicWrapper.class);
protected final boolean writable;
public Object getAttribute(String attributeName)
throws AttributeNotFoundException, MBeanException,
ReflectionException {
- if (attributeName.equals("MBeanInfo")) {
+ if ("MBeanInfo".equals(attributeName)) {
return getMBeanInfo();
}
result.add(new Attribute(attributeName, value));
} catch (Exception e) {
- logger.debug("Getting attribute {} failed", attributeName, e);
+ LOGGER.debug("Getting attribute {} failed", attributeName, e);
}
}
return result;
&& signature[0].equals(AttributeList.class.getName())) {
return setAttributes((AttributeList) params[0]);
} else {
- logger.debug("Operation not found {} ", actionName);
+ LOGGER.debug("Operation not found {} ", actionName);
throw new UnsupportedOperationException(
format("Operation not found on %s. Method invoke is only supported for getInstance and getAttribute(s) "
+ "method, got actionName %s, params %s, signature %s ",
public class AnnotationsHelper {
+ private AnnotationsHelper() {
+ }
+
/**
* Look for annotation specified by annotationType on method. First observe
* method's class, then its super classes, then all provided interfaces.
for (RequireInterface ri : foundRequireInterfaces) {
foundValues.add(ri.value());
}
- if (foundValues.size() == 0) {
+ if (foundValues.isEmpty()) {
return null;
} else if (foundValues.size() > 1) {
throw new IllegalStateException("Error finding @RequireInterface. "
public Object getAttribute(String attributeName)
throws AttributeNotFoundException, MBeanException,
ReflectionException {
- if (attributeName.equals("getInstance")) {
+ if ("getInstance".equals(attributeName)) {
return getInstance();
}
return super.getAttribute(attributeName);
*/
@ThreadSafe
public class DynamicWritableWrapper extends AbstractDynamicWrapper {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(DynamicWritableWrapper.class);
private final ReadOnlyAtomicBoolean configBeanModificationDisabled;
@Override
public synchronized void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
+ Attribute newAttribute = attribute;
if (configBeanModificationDisabled.get() == true) {
throw new IllegalStateException("Operation is not allowed now");
}
- if (attribute.getName().equals("Attribute")) {
- setAttribute((Attribute) attribute.getValue());
+ if ("Attribute".equals(newAttribute.getName())) {
+ setAttribute((Attribute) newAttribute.getValue());
return;
}
try {
- if (attribute.getValue() instanceof ObjectName) {
- attribute = fixDependencyAttribute(attribute);
- } else if (attribute.getValue() instanceof ObjectName[]) {
- attribute = fixDependencyListAttribute(attribute);
+ if (newAttribute.getValue() instanceof ObjectName) {
+ newAttribute = fixDependencyAttribute(newAttribute);
+ } else if (newAttribute.getValue() instanceof ObjectName[]) {
+ newAttribute = fixDependencyListAttribute(newAttribute);
}
- internalServer.setAttribute(objectNameInternal, attribute);
+ internalServer.setAttribute(objectNameInternal, newAttribute);
} catch (InstanceNotFoundException e) {
throw new MBeanException(e);
}
}
private Attribute fixDependencyListAttribute(Attribute attribute) {
- AttributeHolder attributeHolder = attributeHolderMap.get(attribute.getName());
+ Attribute newAttribute = attribute;
+ AttributeHolder attributeHolder = attributeHolderMap.get(newAttribute.getName());
if (attributeHolder.getRequireInterfaceOrNull() != null) {
- attribute = new Attribute(attribute.getName(), fixObjectNames((ObjectName[]) attribute.getValue()));
+ newAttribute = new Attribute(newAttribute.getName(), fixObjectNames((ObjectName[]) newAttribute.getValue()));
}
- return attribute;
+ return newAttribute;
}
private Attribute fixDependencyAttribute(Attribute attribute) {
- AttributeHolder attributeHolder = attributeHolderMap.get(attribute.getName());
+ Attribute newAttribute = attribute;
+ AttributeHolder attributeHolder = attributeHolderMap.get(newAttribute.getName());
if (attributeHolder.getRequireInterfaceOrNull() != null) {
- attribute = new Attribute(attribute.getName(), fixObjectName((ObjectName) attribute.getValue()));
+ newAttribute = new Attribute(newAttribute.getName(), fixObjectName((ObjectName) newAttribute.getValue()));
} else {
- attribute = new Attribute(attribute.getName(), attribute.getValue());
+ newAttribute = new Attribute(newAttribute.getName(), newAttribute.getValue());
}
- return attribute;
+ return newAttribute;
}
private ObjectName[] fixObjectNames(ObjectName[] dependencies) {
setAttribute(attribute);
result.add(attribute);
} catch (Exception e) {
- logger.warn("Setting attribute {} failed on {}", attribute.getName(), moduleIdentifier, e);
+ LOGGER.warn("Setting attribute {} failed on {}", attribute.getName(), moduleIdentifier, e);
throw new IllegalArgumentException(
"Setting attribute failed - " + attribute.getName()
+ " on " + moduleIdentifier, e);
import java.util.Set;
public class InternalJMXRegistrator implements Closeable {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(InternalJMXRegistrator.class);
private final MBeanServer configMBeanServer;
@GuardedBy("this")
private final Set<ObjectName> registeredObjectNames = new HashSet<>();
+ @GuardedBy("this")
private final List<InternalJMXRegistrator> children = new ArrayList<>();
public synchronized InternalJMXRegistration registerMBean(Object object,
}
}
- public InternalJMXRegistrator createChild() {
- InternalJMXRegistrator child = new InternalJMXRegistrator(
- configMBeanServer);
+ public synchronized InternalJMXRegistrator createChild() {
+ InternalJMXRegistrator child = new InternalJMXRegistrator(configMBeanServer);
children.add(child);
return child;
}
try {
configMBeanServer.unregisterMBean(on);
} catch (Exception e) {
- logger.warn("Ignoring error while unregistering {}", on, e);
+ LOGGER.warn("Ignoring error while unregistering {}", on, e);
}
}
registeredObjectNames.clear();
return getSameNames(result);
}
- private Set<ObjectName> getSameNames(Set<ObjectName> superSet) {
+ private synchronized Set<ObjectName> getSameNames(Set<ObjectName> superSet) {
Set<ObjectName> result = new HashSet<>(superSet);
result.retainAll(registeredObjectNames);
for (InternalJMXRegistrator child : children) {
public ModuleJMXRegistration registerMBean(Object object, ObjectName on)
throws InstanceAlreadyExistsException {
ObjectNameUtil.checkType(on, ObjectNameUtil.TYPE_MODULE);
- if (ObjectNameUtil.getTransactionName(on) != null)
+ if (ObjectNameUtil.getTransactionName(on) != null) {
throw new IllegalArgumentException(
"Transaction name not expected in " + on);
+ }
return new ModuleJMXRegistration(childJMXRegistrator.registerMBean(
object, on));
}
public static class OsgiRegistration implements AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(OsgiRegistration.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(OsgiRegistration.class);
@GuardedBy("this")
private AutoCloseable instance;
try {
serviceRegistration.unregister();
} catch(IllegalStateException e) {
- logger.trace("Cannot unregister {}", serviceRegistration, e);
+ LOGGER.trace("Cannot unregister {}", serviceRegistration, e);
}
}
serviceRegistrations.clear();
notEquals |= newAnnotationMapping.equals(serviceNamesToAnnotations) == false;
if (notEquals) {
// FIXME: changing from old state to new state can be improved by computing the diff
- logger.debug("Detected change in service registrations for {}: old: {}, new: {}", moduleIdentifier,
+ LOGGER.debug("Detected change in service registrations for {}: old: {}, new: {}", moduleIdentifier,
serviceNamesToAnnotations, newAnnotationMapping);
close();
this.instance = newInstance;
* functionality.
*/
public class BlankTransactionServiceTracker implements ServiceTrackerCustomizer<ModuleFactory, Object> {
- private static final Logger logger = LoggerFactory.getLogger(BlankTransactionServiceTracker.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(BlankTransactionServiceTracker.class);
public static final int DEFAULT_MAX_ATTEMPTS = 10;
try {
// create transaction
CommitStatus commitStatus = blankTransaction.hit();
- logger.debug("Committed blank transaction with status {}", commitStatus);
+ LOGGER.debug("Committed blank transaction with status {}", commitStatus);
return;
} catch (ConflictingVersionException e) {
lastException = e;
throw new IllegalStateException(interruptedException);
}
} catch (ValidationException e) {
- logger.error("Validation exception while running blank transaction indicates programming error", e);
+ LOGGER.error("Validation exception while running blank transaction indicates programming error", e);
throw new RuntimeException("Validation exception while running blank transaction indicates programming error", e);
}
}
*/
public class BundleContextBackedModuleFactoriesResolver implements
ModuleFactoriesResolver {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(BundleContextBackedModuleFactoriesResolver.class);
private final BundleContext bundleContext;
if (serviceReference.getBundle() == null || serviceReference.getBundle().getBundleContext() == null) {
throw new NullPointerException("Bundle context of " + factory + " ModuleFactory not found.");
}
- logger.debug("Reading factory {} {}", moduleName, factory);
+ LOGGER.debug("Reading factory {} {}", moduleName, factory);
Map.Entry<ModuleFactory, BundleContext> conflicting = result.get(moduleName);
if (conflicting != null) {
String error = String
.format("Module name is not unique. Found two conflicting factories with same name '%s': '%s' '%s'",
moduleName, conflicting.getKey(), factory);
- logger.error(error);
+ LOGGER.error(error);
throw new IllegalArgumentException(error);
} else {
result.put(moduleName, new AbstractMap.SimpleImmutableEntry<>(factory,
*/
public class ModuleFactoryBundleTracker implements BundleTrackerCustomizer<Object> {
private final BlankTransactionServiceTracker blankTransactionServiceTracker;
- private static final Logger logger = LoggerFactory.getLogger(ModuleFactoryBundleTracker.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ModuleFactoryBundleTracker.class);
public ModuleFactoryBundleTracker(BlankTransactionServiceTracker blankTransactionServiceTracker) {
this.blankTransactionServiceTracker = blankTransactionServiceTracker;
@Override
public Object addingBundle(Bundle bundle, BundleEvent event) {
URL resource = bundle.getEntry("META-INF/services/" + ModuleFactory.class.getName());
- logger.trace("Got addingBundle event of bundle {}, resource {}, event {}",
+ LOGGER.trace("Got addingBundle event of bundle {}, resource {}, event {}",
bundle, resource, event);
if (resource != null) {
try (InputStream inputStream = resource.openStream()) {
registerFactory(factoryClassName, bundle);
}
} catch (Exception e) {
- logger.error("Error while reading {}", resource, e);
+ LOGGER.error("Error while reading {}", resource, e);
throw new RuntimeException(e);
}
}
Class<?> clazz = bundle.loadClass(factoryClassName);
if (ModuleFactory.class.isAssignableFrom(clazz)) {
try {
- logger.debug("Registering {} in bundle {}",
+ LOGGER.debug("Registering {} in bundle {}",
clazz.getName(), bundle);
return bundle.getBundleContext().registerService(
ModuleFactory.class.getName(), clazz.newInstance(),
}
public static String logMessage(String slfMessage, Object... params) {
- logger.info(slfMessage, params);
+ LOGGER.info(slfMessage, params);
String formatMessage = slfMessage.replaceAll("\\{\\}", "%s");
return format(formatMessage, params);
}
*/
public final class ModuleInfoBundleTracker implements BundleTrackerCustomizer<Collection<ObjectRegistration<YangModuleInfo>>> {
- private static final Logger logger = LoggerFactory.getLogger(ModuleInfoBundleTracker.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ModuleInfoBundleTracker.class);
public static final String MODULE_INFO_PROVIDER_PATH_PREFIX = "META-INF/services/";
@Override
public Collection<ObjectRegistration<YangModuleInfo>> addingBundle(Bundle bundle, BundleEvent event) {
URL resource = bundle.getEntry(MODULE_INFO_PROVIDER_PATH_PREFIX + YangModelBindingProvider.class.getName());
- logger.debug("Got addingBundle({}) with YangModelBindingProvider resource {}", bundle, resource);
+ LOGGER.debug("Got addingBundle({}) with YangModelBindingProvider resource {}", bundle, resource);
if(resource==null) {
return null;
}
try (InputStream inputStream = resource.openStream()) {
List<String> lines = IOUtils.readLines(inputStream);
for (String moduleInfoName : lines) {
- logger.trace("Retrieve ModuleInfo({}, {})", moduleInfoName, bundle);
+ LOGGER.trace("Retrieve ModuleInfo({}, {})", moduleInfoName, bundle);
YangModuleInfo moduleInfo = retrieveModuleInfo(moduleInfoName, bundle);
registrations.add(moduleInfoRegistry.registerModuleInfo(moduleInfo));
}
} catch (Exception e) {
- logger.error("Error while reading {}", resource, e);
+ LOGGER.error("Error while reading {}", resource, e);
throw new RuntimeException(e);
}
- logger.trace("Got following registrations {}", registrations);
+ LOGGER.trace("Got following registrations {}", registrations);
return registrations;
}
} catch (NoClassDefFoundError e) {
- logger.error("Error while executing getModuleInfo on {}", instance, e);
+ LOGGER.error("Error while executing getModuleInfo on {}", instance, e);
throw e;
}
}
}
public static String logMessage(String slfMessage, Object... params) {
- logger.info(slfMessage, params);
+ LOGGER.info(slfMessage, params);
String formatMessage = slfMessage.replaceAll("\\{\\}", "%s");
return format(formatMessage, params);
}
public class InterfacesHelper {
+ private InterfacesHelper() {
+ }
+
public static Set<Class<?>> getAllInterfaces(Class<?> clazz) {
if (clazz.isInterface()) {
throw new IllegalArgumentException(clazz
}
private static Set<Class<?>> getAllSuperInterfaces(Set<Class<?>> ifcs) {
- ifcs = new HashSet<>(ifcs); // create copy to modify
+ Set<Class<?>> interfaces = new HashSet<>(ifcs); // create copy to modify
// each interface can extend other interfaces
Set<Class<?>> result = new HashSet<>();
- while (ifcs.size() > 0) {
- Iterator<Class<?>> iterator = ifcs.iterator();
+ while (!interfaces.isEmpty()) {
+ Iterator<Class<?>> iterator = interfaces.iterator();
Class<?> ifc = iterator.next();
iterator.remove();
if (ifc.isInterface() == false) {
throw new IllegalArgumentException(ifc + " should be an interface");
}
- ifcs.addAll(Arrays.asList(ifc.getInterfaces()));
+ interfaces.addAll(Arrays.asList(ifc.getInterfaces()));
result.add(ifc);
}
return result;
public class LookupBeansUtil {
+ private LookupBeansUtil() {
+ }
+
public static ObjectName lookupConfigBean(LookupRegistry lookupRegistry,
String moduleName, String instanceName)
throws InstanceNotFoundException {
Set<ObjectName> objectNames = lookupRegistry.lookupConfigBeans(
moduleName, instanceName);
- if (objectNames.size() == 0) {
+ if (objectNames.isEmpty()) {
throw new InstanceNotFoundException("No instance found");
} else if (objectNames.size() > 1) {
throw new InstanceNotFoundException("Too many instances found");
public class ModuleQNameUtil {
+ private ModuleQNameUtil() {
+ }
+
public static Set<String> getQNames(Map<String, Entry<ModuleFactory, BundleContext>> resolved) {
Set<String> result = new HashSet<>();
for (Entry<ModuleFactory, BundleContext> entry : resolved.values()) {
import org.slf4j.LoggerFactory;
public class OsgiRegistrationUtil {
- private static final Logger logger = LoggerFactory.getLogger(OsgiRegistrationUtil.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(OsgiRegistrationUtil.class);
+
+ private OsgiRegistrationUtil() {
+ }
@SafeVarargs
public static <T> AutoCloseable registerService(BundleContext bundleContext, T service, Class<? super T> ... interfaces) {
try {
ac.close();
} catch (Exception e) {
- logger.warn("Exception while closing {}", ac, e);
+ LOGGER.warn("Exception while closing {}", ac, e);
if (firstException == null) {
firstException = e;
} else {
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>config-module-archetype</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>config-netty-config</artifactId>
<description>Configuration files for sal-rest-connector</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>config-persister-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>config-persister-directory-xml-adapter</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
* Delegates the the contained feature and provides additional methods.
*/
public class AbstractFeatureWrapper implements Feature {
- private static final Logger logger = LoggerFactory.getLogger(AbstractFeatureWrapper.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(AbstractFeatureWrapper.class);
protected Feature feature = null;
protected AbstractFeatureWrapper() {
try {
snapShotHolders.add(new FeatureConfigSnapshotHolder(c,this));
} catch (JAXBException e) {
- logger.debug("{} is not a config subsystem config file",c.getFinalname());
+ LOGGER.debug("{} is not a config subsystem config file",c.getFinalname());
}
}
return snapShotHolders;
@Override
public boolean equals(Object obj) {
- if (this == obj)
+ if (this == obj) {
return true;
- if (obj == null)
+ }
+ if (obj == null) {
return false;
- if (getClass() != obj.getClass())
+ }
+ if (getClass() != obj.getClass()) {
return false;
+ }
AbstractFeatureWrapper other = (AbstractFeatureWrapper) obj;
if (feature == null) {
- if (other.feature != null)
+ if (other.feature != null) {
return false;
- } else if (!feature.equals(other.feature))
+ }
+ } else if (!feature.equals(other.feature)) {
return false;
+ }
return true;
}
* Delegates the the contained feature and provides additional methods.
*/
public class ChildAwareFeatureWrapper extends AbstractFeatureWrapper implements Feature {
- private static final Logger logger = LoggerFactory.getLogger(ChildAwareFeatureWrapper.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ChildAwareFeatureWrapper.class);
private FeaturesService featuresService= null;
protected ChildAwareFeatureWrapper(Feature f) {
f = new FeatureConfigSnapshotHolder(h,this);
snapShotHolders.add(f);
} catch (JAXBException e) {
- logger.debug("{} is not a config subsystem config file",h.getFileInfo().getFinalname());
+ LOGGER.debug("{} is not a config subsystem config file",h.getFileInfo().getFinalname());
}
}
}
for(Feature f: features) {
if (f.getName().equals(dependency.getName())) {
Version v = VersionTable.getVersion(f.getVersion());
- if (range.contains(v)) {
- if (fi == null || VersionTable.getVersion(fi.getVersion()).compareTo(v) < 0) {
+ if (range.contains(v) &&
+ (fi == null || VersionTable.getVersion(fi.getVersion()).compareTo(v) < 0)) {
fi = f;
break;
- }
}
}
}
import org.slf4j.LoggerFactory;
public class ConfigFeaturesListener implements FeaturesListener, AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(ConfigFeaturesListener.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConfigFeaturesListener.class);
private static final int QUEUE_SIZE = 1000;
private BlockingQueue<FeatureEvent> queue = new LinkedBlockingQueue<FeatureEvent>(QUEUE_SIZE);
Thread pushingThread = null;
@Override
public void repositoryEvent(RepositoryEvent event) {
- logger.debug("Repository: " + event.getType() + " " + event.getRepository());
+ LOGGER.debug("Repository: " + event.getType() + " " + event.getRepository());
}
@Override
import org.slf4j.LoggerFactory;
public class ConfigPusherCustomizer implements ServiceTrackerCustomizer<ConfigPusher, ConfigPusher>, AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(ConfigPusherCustomizer.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConfigPusherCustomizer.class);
private ConfigFeaturesListener configFeaturesListener = null;
private FeatureServiceCustomizer featureServiceCustomizer = null;
private ServiceTracker<FeaturesService,FeaturesService> fsst = null;
@Override
public ConfigPusher addingService(ServiceReference<ConfigPusher> configPusherServiceReference) {
- logger.trace("Got ConfigPusherCustomizer.addingService {}", configPusherServiceReference);
+ LOGGER.trace("Got ConfigPusherCustomizer.addingService {}", configPusherServiceReference);
BundleContext bc = configPusherServiceReference.getBundle().getBundleContext();
ConfigPusher cpService = bc.getService(configPusherServiceReference);
featureServiceCustomizer = new FeatureServiceCustomizer(cpService);
import com.google.common.collect.LinkedHashMultimap;
public class ConfigPushingRunnable implements Runnable {
- private static final Logger logger = LoggerFactory.getLogger(ConfigPushingRunnable.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ConfigPushingRunnable.class);
private static final int POLL_TIME = 1;
private BlockingQueue<FeatureEvent> queue;
private FeatureConfigPusher configPusher;
processFeatureEvent(event,toInstall);
}
} else if(toInstall.isEmpty()) {
- logger.error("ConfigPushingRunnable - exiting");
+ LOGGER.error("ConfigPushingRunnable - exiting");
return;
}
} catch (InterruptedException e) {
- logger.error("ConfigPushingRunnable - interupted");
+ LOGGER.error("ConfigPushingRunnable - interupted");
interuppted = true;
} catch (Exception e) {
- logger.error("Exception while processing features {}", e);
+ LOGGER.error("Exception while processing features {}", e);
}
}
}
protected void logPushResult(LinkedHashMultimap<Feature,FeatureConfigSnapshotHolder> results) {
for(Feature f:results.keySet()) {
- logger.info("Pushed configs for feature {} {}",f,results.get(f));
+ LOGGER.info("Pushed configs for feature {} {}",f,results.get(f));
}
}
}
* Simple class to push configs to the config subsystem from Feature's configfiles
*/
public class FeatureConfigPusher {
- private static final Logger logger = LoggerFactory.getLogger(FeatureConfigPusher.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(FeatureConfigPusher.class);
private static final int MAX_RETRIES=100;
private static final int RETRY_PAUSE_MILLIS=1;
private FeaturesService featuresService = null;
if(installedFeatures.contains(feature)) {
return true;
} else {
- logger.warn("Karaf featuresService.listInstalledFeatures() has not yet finished installing feature (retry {}) {} {}",retries,feature.getName(),feature.getVersion());
+ LOGGER.warn("Karaf featuresService.listInstalledFeatures() has not yet finished installing feature (retry {}) {} {}",retries,feature.getName(),feature.getVersion());
}
} catch (Exception e) {
if(retries < MAX_RETRIES) {
- logger.warn("Karaf featuresService.listInstalledFeatures() has thrown an exception, retry {}, Exception {}", retries,e);
+ LOGGER.warn("Karaf featuresService.listInstalledFeatures() has thrown an exception, retry {}, Exception {}", retries,e);
} else {
- logger.error("Giving up on Karaf featuresService.listInstalledFeatures() which has thrown an exception, retry {}, Exception {}", retries,e);
+ LOGGER.error("Giving up on Karaf featuresService.listInstalledFeatures() which has thrown an exception, retry {}, Exception {}", retries,e);
throw e;
}
}
throw new IllegalStateException(e1);
}
}
- logger.error("Giving up (after {} retries) on Karaf featuresService.listInstalledFeatures() which has not yet finished installing feature {} {}",MAX_RETRIES,feature.getName(),feature.getVersion());
+ LOGGER.error("Giving up (after {} retries) on Karaf featuresService.listInstalledFeatures() which has not yet finished installing feature {} {}",MAX_RETRIES,feature.getName(),feature.getVersion());
return false;
}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>config-persister-file-xml-adapter</artifactId>
* StorageAdapter that stores configuration in an xml file.
*/
public class XmlFileStorageAdapter implements StorageAdapter, Persister {
- private static final Logger logger = LoggerFactory.getLogger(XmlFileStorageAdapter.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(XmlFileStorageAdapter.class);
public static final String FILE_STORAGE_PROP = "fileStorage";
public static final String NUMBER_OF_BACKUPS = "numberOfBackups";
@Override
public Persister instantiate(PropertiesProvider propertiesProvider) {
File storage = extractStorageFileFromProperties(propertiesProvider);
- logger.debug("Using file {}", storage.getAbsolutePath());
+ LOGGER.debug("Using file {}", storage.getAbsolutePath());
// Create file if it does not exist
File parentFile = storage.getAbsoluteFile().getParentFile();
if (parentFile.exists() == false) {
- logger.debug("Creating parent folders {}", parentFile);
+ LOGGER.debug("Creating parent folders {}", parentFile);
parentFile.mkdirs();
}
if (storage.exists() == false) {
- logger.debug("Storage file does not exist, creating empty file");
+ LOGGER.debug("Storage file does not exist, creating empty file");
try {
boolean result = storage.createNewFile();
if (result == false)
} else {
numberOfStoredBackups = Integer.MAX_VALUE;
}
- logger.trace("Property {} set to {}", NUMBER_OF_BACKUPS, numberOfStoredBackups);
+ LOGGER.trace("Property {} set to {}", NUMBER_OF_BACKUPS, numberOfStoredBackups);
return result;
}
Optional<ConfigSnapshot> lastSnapshot = Config.fromXml(storage).getLastSnapshot();
- if (lastSnapshot.isPresent())
+ if (lastSnapshot.isPresent()) {
return Lists.newArrayList(toConfigSnapshot(lastSnapshot.get()));
- else
+ } else {
return Collections.emptyList();
+ }
}
@Override
public String toString() {
- final StringBuffer sb = new StringBuffer("ConfigSnapshot{");
+ final StringBuilder sb = new StringBuilder("ConfigSnapshot{");
sb.append("configSnapshot='").append(configSnapshot).append('\'');
sb.append(", capabilities=").append(capabilities);
sb.append('}');
final class StringTrimAdapter extends XmlAdapter<String, String> {
@Override
public String unmarshal(String v) throws Exception {
- if (v == null)
+ if (v == null) {
return null;
+ }
return v.trim();
}
@Override
public String marshal(String v) throws Exception {
- if (v == null)
+ if (v == null) {
return null;
+ }
return v.trim();
}
}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>config-plugin-parent</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>config-util</artifactId>
*/
@Deprecated
public <T> T newMBeanProxy(ObjectName on, Class<T> clazz) {
- on = translateServiceRefIfPossible(on, clazz, configMBeanServer);
- return JMX.newMBeanProxy(configMBeanServer, on, clazz);
+ ObjectName onObj = translateServiceRefIfPossible(on, clazz, configMBeanServer);
+ return JMX.newMBeanProxy(configMBeanServer, onObj, clazz);
}
static ObjectName translateServiceRefIfPossible(ObjectName on, Class<?> clazz, MBeanServer configMBeanServer) {
- if (ObjectNameUtil.isServiceReference(on) && clazz.equals(ServiceReferenceMXBean.class) == false) {
- ServiceReferenceMXBean proxy = JMX.newMXBeanProxy(configMBeanServer, on, ServiceReferenceMXBean.class);
- on = proxy.getCurrentImplementation();
+ ObjectName onObj = on;
+ if (ObjectNameUtil.isServiceReference(onObj) && clazz.equals(ServiceReferenceMXBean.class) == false) {
+ ServiceReferenceMXBean proxy = JMX.newMXBeanProxy(configMBeanServer, onObj, ServiceReferenceMXBean.class);
+ onObj = proxy.getCurrentImplementation();
}
- return on;
+ return onObj;
}
}
public <T> T newMXBeanProxy(ObjectName on, Class<T> clazz) {
+ ObjectName onName = on;
// if on is without transaction, add it. Reason is that when using getters on MXBeans the transaction name is stripped
- on = ObjectNameUtil.withTransactionName(on, getTransactionName());
+ onName = ObjectNameUtil.withTransactionName(onName, getTransactionName());
// if this is service reference and user requests for implementation, look it up
- on = ConfigRegistryJMXClient.translateServiceRefIfPossible(on, clazz, configMBeanServer);
- on = ObjectNameUtil.withTransactionName(on, getTransactionName());
- return JMX.newMXBeanProxy(configMBeanServer, on, clazz);
+ onName = ConfigRegistryJMXClient.translateServiceRefIfPossible(onName, clazz, configMBeanServer);
+ onName = ObjectNameUtil.withTransactionName(onName, getTransactionName());
+ return JMX.newMXBeanProxy(configMBeanServer, onName, clazz);
}
/**
@Override
public void setAttribute(ObjectName on, String attrName, Attribute attribute) {
- if (ObjectNameUtil.getTransactionName(on) == null)
+ if (ObjectNameUtil.getTransactionName(on) == null) {
throw new IllegalArgumentException("Not in transaction instance "
+ on + ", no transaction name present");
+ }
try {
configMBeanServer.setAttribute(on, attribute);
@Override
public Attribute getAttribute(ObjectName on, String attrName) {
- if (ObjectNameUtil.getTransactionName(on) == null)
+ if (ObjectNameUtil.getTransactionName(on) == null) {
throw new IllegalArgumentException("Not in transaction instance "
+ on + ", no transaction name present");
+ }
try {
return new Attribute(attrName, configMBeanServer.getAttribute(on,attrName));
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>logback-config-loader</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>logback-config</artifactId>
public class ContextSetterImpl implements ContextSetter, Closeable {
private final LogbackStatusListener statusListener;
- private static final org.slf4j.Logger classLogger = LoggerFactory.getLogger(ContextSetterImpl.class);
+ private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(ContextSetterImpl.class);
public ContextSetterImpl(LogbackRuntimeRegistrator rootRuntimeBeanRegistratorWrapper) {
statusListener = new LogbackStatusListener(rootRuntimeBeanRegistratorWrapper);
Map<String, Appender<ILoggingEvent>> appendersMap = getAppenders(module, context);
for (LoggerTO logger : module.getLoggerTO()) {
- classLogger.trace("Setting configuration for logger {}", logger.getLoggerName());
+ LOGGER.trace("Setting configuration for logger {}", logger.getLoggerName());
final ch.qos.logback.classic.Logger logbackLogger = context.getLogger(logger.getLoggerName());
Optional<Set<Appender<ILoggingEvent>>> appendersBefore = getAppendersBefore(loggersBefore, logbackLogger);
- classLogger.trace("Logger {}: Appenders registered before: {}", logger.getLoggerName(),
+ LOGGER.trace("Logger {}: Appenders registered before: {}", logger.getLoggerName(),
appendersBefore.isPresent() ? appendersBefore.get() : "NO APPENDERS BEFORE");
logbackLogger.setLevel(Level.toLevel(logger.getLevel()));
for (String appenderName : logger.getAppenders()) {
if (appendersMap.containsKey(appenderName)) {
logbackLogger.addAppender(appendersMap.get(appenderName));
- classLogger.trace("Logger {}: Adding new appender: {}", logger.getLoggerName(), appenderName);
+ LOGGER.trace("Logger {}: Adding new appender: {}", logger.getLoggerName(), appenderName);
} else {
throw new IllegalStateException("No appender " + appenderName
+ " found. This error should have been discovered by validation");
for (Appender<ILoggingEvent> appenderBefore : appendersBefore.get()) {
logbackLogger.detachAppender(appenderBefore);
appenderBefore.stop();
- classLogger.trace("Logger {}: Removing old appender: {}", logger.getLoggerName(),
+ LOGGER.trace("Logger {}: Removing old appender: {}", logger.getLoggerName(),
appenderBefore.getName());
}
loggersBefore.remove(logbackLogger);
appendersBefore.add(appenderIt.next());
}
return Optional.of(appendersBefore);
- } else
+ } else {
return Optional.absent();
+ }
}
context.getLogger(Logger.ROOT_LOGGER_NAME));
for (org.slf4j.Logger log : loggersToBeAdd) {
LoggerTO logger = new LoggerTO();
- if (((Logger) log).getLevel() != null)
+ if (((Logger) log).getLevel() != null) {
logger.setLevel(((Logger) log).getLevel().levelStr);
- else
+ } else {
logger.setLevel(((Logger) log).getEffectiveLevel().levelStr);
+ }
logger.setLoggerName(log.getName());
Iterator<Appender<ILoggingEvent>> iter = ((Logger) log).iteratorForAppenders();
while (iter.hasNext()) {
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>netty-config-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>netty-event-executor-config</artifactId>
public class ImmediateEventExecutorModuleTest extends AbstractConfigTest {
- private GlobalEventExecutorModuleFactory factory;
+ private ImmediateEventExecutorModuleFactory factory;
private final String instanceName = ImmediateEventExecutorModuleFactory.SINGLETON_NAME;
@Before
public void setUp() {
- factory = new GlobalEventExecutorModuleFactory();
+ factory = new ImmediateEventExecutorModuleFactory();
super.initConfigTransactionManagerImpl(new HardcodedModuleFactoriesResolver(mockedContext,factory));
}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>netty-threadgroup-config</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>netty-timer-config</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<prerequisites>
<configuration>
<outputDirectory>${project.build.directory}/jacoco</outputDirectory>
<haltOnFailure>false</haltOnFailure>
- <check>
- <classRatio>80</classRatio>
- </check>
+ <rules>
+ <rule>
+ <element>CLASS</element>
+ <excludes>
+ <exclude>*Test</exclude>
+ </excludes>
+ <limits>
+ <limit>
+ <counter>LINE</counter>
+ <value>COVEREDRATIO</value>
+ <minimum>0.50</minimum>
+ </limit>
+ </limits>
+ </rule>
+ </rules>
</configuration>
</execution>
</executions>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>shutdown-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>shutdown-impl</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>threadpool-config-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>threadpool-config-impl</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>yang-jmx-generator-it</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>yang-jmx-generator-plugin</artifactId>
final class CodeWriter {
- private static final Logger logger = LoggerFactory.getLogger(CodeWriter.class);
- private static final Optional<String> copyright = StringUtil.loadCopyright();
+ private static final Logger LOGGER = LoggerFactory.getLogger(CodeWriter.class);
+ private static final Optional<String> COPYRIGHT = StringUtil.loadCopyright();
public File writeSie(ServiceInterfaceEntry sie, File outputBaseDir) {
try {
GeneralInterfaceTemplate generalInterfaceTemplate = TemplateFactory.serviceInterfaceFromSie(sie);
- GeneratedObject go = new GenericGeneratedObjectFactory().toGeneratedObject(generalInterfaceTemplate, copyright);
+ GeneratedObject go = new GenericGeneratedObjectFactory().toGeneratedObject(generalInterfaceTemplate, COPYRIGHT);
return go.persist(outputBaseDir).get().getValue();
} catch (Exception e) {
String message = "An error occurred during Service interface generating, sie:"
+ sie.getTypeName() + ", " + sie.getFullyQualifiedName();
- logger.error(message, e);
+ LOGGER.error(message, e);
throw new RuntimeException(message, e);
}
}
// TOs
Map<String,GeneralClassTemplate> tosFromMbe = TemplateFactory.tOsFromMbe(mbe);
for(GeneralClassTemplate template: tosFromMbe.values()) {
- gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(template, copyright), true);
+ gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(template, COPYRIGHT), true);
}
// MXBean interface
GeneralInterfaceTemplate ifcTemplate = TemplateFactory.mXBeanInterfaceTemplateFromMbe(mbe);
- gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(ifcTemplate, copyright), true);
+ gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(ifcTemplate, COPYRIGHT), true);
// generate abstract factory
- gos.put(new AbsFactoryGeneratedObjectFactory().toGeneratedObject(mbe, copyright), true);
+ gos.put(new AbsFactoryGeneratedObjectFactory().toGeneratedObject(mbe, COPYRIGHT), true);
// generate abstract module
- gos.put(new AbsModuleGeneratedObjectFactory().toGeneratedObject(mbe, copyright), true);
+ gos.put(new AbsModuleGeneratedObjectFactory().toGeneratedObject(mbe, COPYRIGHT), true);
// generate concrete factory
StubFactoryTemplate concreteFactory = TemplateFactory.stubFactoryTemplateFromMbe(mbe);
- gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(concreteFactory, copyright), false);
+ gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(concreteFactory, COPYRIGHT), false);
// generate concrete module
- gos.put(new ConcreteModuleGeneratedObjectFactory().toGeneratedObject(mbe, copyright, Optional.<String>absent()), false);
+ gos.put(new ConcreteModuleGeneratedObjectFactory().toGeneratedObject(mbe, COPYRIGHT, Optional.<String>absent()), false);
// write runtime bean MXBeans and registrators
List<FtlTemplate> allFtlFiles = getRuntimeBeanFtlTemplates(mbe.getRuntimeBeans());
for(FtlTemplate template: allFtlFiles) {
- gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(template, copyright), true);
+ gos.put(new GenericGeneratedObjectFactory().toGeneratedObject(template, COPYRIGHT), true);
}
generatedFiles.addAll(persistGeneratedObjects(targetBaseDir, mainBaseDir, gos));
} catch (Exception e) {
String message = "An error occurred during Module generating, mbe:"
+ mbe.getJavaNamePrefix();
- logger.error(message, e);
+ LOGGER.error(message, e);
throw new RuntimeException(message, e);
}
}
private PackageTranslator packageTranslator;
private final CodeWriter codeWriter;
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(JMXGenerator.class);
private Map<String, String> namespaceToPackageMapping;
private File resourceBaseDir;
packageTranslator = new PackageTranslator(namespaceToPackageMapping);
- if (!outputBaseDir.exists())
+ if (!outputBaseDir.exists()) {
outputBaseDir.mkdirs();
+ }
GeneratedFilesTracker generatedFiles = new GeneratedFilesTracker();
// create SIE structure qNamesToSIEs
Preconditions.checkNotNull(resourceBaseDir,
"resource base dir attribute was null");
- StringBuffer fullyQualifiedNamesOfFactories = new StringBuffer();
+ StringBuilder fullyQualifiedNamesOfFactories = new StringBuilder();
// create MBEs
for (Module module : yangModulesInCurrentMavenModule) {
String packageName = packageTranslator.getPackageName(module);
fullyQualifiedNamesOfFactories.toString());
} catch (IOException e) {
String message = "Cannot write to " + serviceLoaderFile;
- logger.error(message);
+ LOGGER.error(message);
throw new RuntimeException(message, e);
}
}
@Override
public void setAdditionalConfig(Map<String, String> additionalCfg) {
- if (logger != null)
- logger.debug(getClass().getCanonicalName(),
+ if (LOGGER != null) {
+ LOGGER.debug(getClass().getCanonicalName(),
": Additional configuration received: ",
additionalCfg.toString());
+ }
this.namespaceToPackageMapping = extractNamespaceMapping(additionalCfg);
this.generateModuleFactoryFile = extractModuleFactoryBoolean(additionalCfg);
}
private boolean extractModuleFactoryBoolean(
Map<String, String> additionalCfg) {
String bool = additionalCfg.get(MODULE_FACTORY_FILE_BOOLEAN);
- if (bool == null)
+ if (bool == null) {
return true;
- if (bool.equals("false"))
+ }
+ if ("false".equals(bool)) {
return false;
+ }
return true;
}
public void setMavenProject(MavenProject project) {
this.projectBaseDir = project.getBasedir();
- if (logger != null)
- logger.debug(getClass().getCanonicalName(), " project base dir: ",
+ if (LOGGER != null)
+ LOGGER.debug(getClass().getCanonicalName(), " project base dir: ",
projectBaseDir);
}
}
}
if (undeletedFiles.isEmpty() == false) {
- logger.error(
+ LOGGER.error(
"Illegal state occurred: Unable to delete already generated files, undeleted files: {}",
undeletedFiles);
}
public class AbstractFactoryTemplate extends GeneralClassTemplate {
- private static final List<String> implementedIfcs = Lists
+ private static final List<String> IMPLEMENTED_IFCS = Lists
.newArrayList(ModuleFactory.class.getCanonicalName());
public AbstractFactoryTemplate(Header header, String packageName,
String abstractFactoryName,
List<Field> fields) {
super(header, packageName, abstractFactoryName, Collections
- .<String> emptyList(), implementedIfcs, fields, Collections
+ .<String> emptyList(), IMPLEMENTED_IFCS, fields, Collections
.<MethodDefinition> emptyList(), true, false, Collections
.<Constructor> emptyList());
}
.getFullyQualifiedName(rootRB.getPackageName(), rootRB.getJavaNameOfRuntimeMXBean());
String childRegistratorFQN = rootFtlFile.getFullyQualifiedName();
Field rbParameter = new Field(fullyQualifiedNameOfMXBean, "rb");
- StringBuffer registerBody = new StringBuffer();
+ StringBuilder registerBody = new StringBuilder();
registerBody.append(format("%s %s = this.%s.registerRoot(%s);\n",
HierarchicalRuntimeBeanRegistration.class
.getCanonicalName(), hierachchicalRegistration
unorderedResult.put(entry.getKey(), entry.getValue());
}
- if (childRegistratorMap.size() > 0) {
+ if (!childRegistratorMap.isEmpty()) {
// first entry is the direct descendant according to the create
// contract
RuntimeRegistratorFtlTemplate childRegistrator = childRegistratorMap
.values().iterator().next();
- StringBuffer body = new StringBuffer();
+ StringBuilder body = new StringBuilder();
String key, value;
key = child.getJavaNamePrefix();
body.append(format(
static String serializeType(Type type, boolean addWildcards) {
if (type instanceof ParameterizedType){
ParameterizedType parameterizedType = (ParameterizedType) type;
- StringBuffer sb = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
sb.append(parameterizedType.getRawType().getFullyQualifiedName());
sb.append(addWildcards ? "<? extends " : "<");
boolean first = true;
for (Rpc rpc : rbe.getRpcs()) {
AttributeIfc returnType = rpc.getReturnType();
- if (returnType == VoidAttribute.getInstance())
+ if (returnType == VoidAttribute.getInstance()) {
continue;
- if (returnType instanceof JavaAttribute)
+ }
+ if (returnType instanceof JavaAttribute) {
continue;
- if (returnType instanceof ListAttribute && returnType.getOpenType() instanceof SimpleType)
+ }
+ if (returnType instanceof ListAttribute && returnType.getOpenType() instanceof SimpleType) {
continue;
+ }
Preconditions.checkState(yangPropertiesToTypesMap.containsKey(returnType.getAttributeYangName()) == false,
"Duplicate TO %s for %s", returnType.getAttributeYangName(), rbe);
String returnType;
AttributeIfc attributeIfc = attrEntry.getValue();
- boolean isIdentityRef = false;
if (attributeIfc instanceof TypedAttribute) {
TypedAttribute typedAttribute = (TypedAttribute) attributeIfc;
returnType = serializeType(typedAttribute.getType());
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
public class ConfigConstants {
+ private ConfigConstants() {
+ }
+
public static final String CONFIG_NAMESPACE = "urn:opendaylight:params:xml:ns:yang:controller:config";
public static final String CONFIG_MODULE = "config";
return this;
}
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(ModuleMXBeanEntryBuilder.class);
// TODO: the XPath should be parsed by code generator IMO
private static final String MAGIC_STRING = "MAGIC_STRING";
private static final String MODULE_CONDITION_XPATH_TEMPLATE = "^/MAGIC_STRING:modules/MAGIC_STRING:module/MAGIC_STRING:type\\s*=\\s*['\"](.+)['\"]$";
- private static final SchemaPath expectedConfigurationAugmentationSchemaPath = SchemaPath.create(true,
+ private static final SchemaPath EXPECTED_CONFIGURATION_AUGMENTATION_SCHEMA_PATH = SchemaPath.create(true,
createConfigQName("modules"), createConfigQName("module"), createConfigQName("configuration"));
- private static final SchemaPath expectedStateAugmentationSchemaPath = SchemaPath.create(true,
+ private static final SchemaPath EXPECTED_STATE_AUGMENTATION_SCHEMA_PATH = SchemaPath.create(true,
createConfigQName("modules"), createConfigQName("module"), createConfigQName("state"));
private static final Pattern PREFIX_COLON_LOCAL_NAME = Pattern
.compile("^(.+):(.+)$");
public Map<String, ModuleMXBeanEntry> build() {
- logger.debug("Generating ModuleMXBeans of {} to package {}",
+ LOGGER.debug("Generating ModuleMXBeans of {} to package {}",
currentModule.getNamespace(), packageName);
String configModulePrefix;
checkAttributeNamesUniqueness(uniqueGeneratedClassesNames, result);
checkUnaugumentedIdentities(unaugmentedModuleIdentities);
- logger.debug("Number of ModuleMXBeans to be generated: {}", result.size());
+ LOGGER.debug("Number of ModuleMXBeans to be generated: {}", result.size());
return result;
}
private static void checkUnaugumentedIdentities(final Map<String, IdentitySchemaNode> unaugmentedModuleIdentities) {
if (unaugmentedModuleIdentities.size() > 0) {
- logger.warn("Augmentation not found for all currentModule identities: {}",
+ LOGGER.warn("Augmentation not found for all currentModule identities: {}",
unaugmentedModuleIdentities.keySet());
}
}
+ identityLocalName);
} else {
moduleIdentities.put(identityLocalName, id);
- logger.debug("Found identity {}", identityLocalName);
+ LOGGER.debug("Found identity {}", identityLocalName);
}
// validation check on unknown schema nodes
boolean providedServiceWasSet = false;
HAS_CHILDREN_AND_QNAME dataNodeContainer = getDataNodeContainer(choiceCaseNode);
- if (expectedConfigurationAugmentationSchemaPath.equals(augmentation.getTargetPath())) {
- logger.debug("Parsing configuration of {}", moduleLocalNameFromXPath);
+ if (EXPECTED_CONFIGURATION_AUGMENTATION_SCHEMA_PATH.equals(augmentation.getTargetPath())) {
+ LOGGER.debug("Parsing configuration of {}", moduleLocalNameFromXPath);
yangToAttributes = fillConfiguration(dataNodeContainer, currentModule, typeProviderWrapper, qNamesToSIEs,
schemaContext, packageName);
checkUniqueAttributesWithGeneratedClass(uniqueGeneratedClassesNames, when.getQName(), yangToAttributes);
- } else if (expectedStateAugmentationSchemaPath.equals(augmentation.getTargetPath())) {
- logger.debug("Parsing state of {}", moduleLocalNameFromXPath);
+ } else if (EXPECTED_STATE_AUGMENTATION_SCHEMA_PATH.equals(augmentation.getTargetPath())) {
+ LOGGER.debug("Parsing state of {}", moduleLocalNameFromXPath);
try {
runtimeBeans = fillRuntimeBeans(dataNodeContainer, currentModule, typeProviderWrapper, packageName,
moduleLocalNameFromXPath, javaNamePrefix);
public class ModuleUtil {
+ private ModuleUtil() {
+ }
+
public static QName getQName(final Module currentModule) {
return QName.create(currentModule.getNamespace(), currentModule.getRevision(), currentModule.getName());
}
// TODO add to PackageTranslator
private static String sanitizePackage(String namespace) {
- namespace = namespace.replace("://", ".");
- namespace = namespace.replace("/", ".");
- namespace = namespace.replace(":", ".");
- namespace = namespace.replace("-", "_");
- namespace = namespace.replace("@", ".");
- namespace = namespace.replace("$", ".");
- namespace = namespace.replace("#", ".");
- namespace = namespace.replace("'", ".");
- namespace = namespace.replace("*", ".");
- namespace = namespace.replace("+", ".");
- namespace = namespace.replace(",", ".");
- namespace = namespace.replace(";", ".");
- namespace = namespace.replace("=", ".");
- return namespace;
+ String newNamespace = namespace;
+ newNamespace = newNamespace.replace("://", ".");
+ newNamespace = newNamespace.replace("/", ".");
+ newNamespace = newNamespace.replace(":", ".");
+ newNamespace = newNamespace.replace("-", "_");
+ newNamespace = newNamespace.replace("@", ".");
+ newNamespace = newNamespace.replace("$", ".");
+ newNamespace = newNamespace.replace("#", ".");
+ newNamespace = newNamespace.replace("'", ".");
+ newNamespace = newNamespace.replace("*", ".");
+ newNamespace = newNamespace.replace("+", ".");
+ newNamespace = newNamespace.replace(",", ".");
+ newNamespace = newNamespace.replace(";", ".");
+ newNamespace = newNamespace.replace("=", ".");
+ return newNamespace;
}
}
final Map<QName, Set<RpcDefinition>> identitiesToRpcs) {
List<AttributeIfc> attributes = Lists.newArrayList();
- // List<JavaAttribute> javaAttributes = new ArrayList<>();
- // List<TOAttribute> toAttributes = new ArrayList<>();
List<RuntimeBeanEntry> runtimeBeanEntries = new ArrayList<>();
for (DataSchemaNode child : subtree.getChildNodes()) {
// child leaves can be java attributes, TO attributes, or child
.findJavaParameter(rpcDefinition);
AttributeIfc returnType;
if (rpcDefinition.getOutput() == null
- || rpcDefinition.getOutput().getChildNodes().size() == 0) {
+ || rpcDefinition.getOutput().getChildNodes().isEmpty()) {
returnType = VoidAttribute.getInstance();
} else if (rpcDefinition.getOutput().getChildNodes().size() == 1) {
DataSchemaNode returnDSN = rpcDefinition.getOutput()
currentModule, identitiesToRpcs);
Optional<String> keyYangName;
- if (listSchemaNode.getKeyDefinition().size() == 0) {
+ if (listSchemaNode.getKeyDefinition().isEmpty()) {
keyYangName = Optional.absent();
} else if (listSchemaNode.getKeyDefinition().size() == 1) {
// key must be either null or one of supported key types
* </p>
*/
public class ServiceInterfaceEntry extends AbstractEntry {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOGGER = LoggerFactory
.getLogger(ServiceInterfaceEntry.class);
private static final String CLASS_NAME_SUFFIX = "ServiceInterface";
*/
public static Map<QName, ServiceInterfaceEntry> create(Module currentModule,
String packageName,Map<IdentitySchemaNode, ServiceInterfaceEntry> definedSEItracker) {
- logger.debug("Generating ServiceInterfaces from {} to package {}",
+ LOGGER.debug("Generating ServiceInterfaces from {} to package {}",
currentModule.getNamespace(), packageName);
Map<IdentitySchemaNode, ServiceInterfaceEntry> identitiesToSIs = new HashMap<>();
Set<IdentitySchemaNode> notVisited = new HashSet<>(
currentModule.getIdentities());
int lastSize = notVisited.size() + 1;
- while (notVisited.size() > 0) {
+ while (!notVisited.isEmpty()) {
if (notVisited.size() == lastSize) {
- logger.debug(
+ LOGGER.debug(
"Following identities will be ignored while generating ServiceInterfaces, as they are not derived from {} : {}",
SERVICE_TYPE_Q_NAME, notVisited);
break;
for (ServiceInterfaceEntry sie : identitiesToSIs.values()) {
resultMap.put(sie.getQName(), sie);
}
- logger.debug("Number of ServiceInterfaces to be generated: {}",
+ LOGGER.debug("Number of ServiceInterfaces to be generated: {}",
resultMap.size());
return resultMap;
}
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (o == null || getClass() != o.getClass())
+ }
+ if (o == null || getClass() != o.getClass()) {
return false;
+ }
ServiceInterfaceEntry that = (ServiceInterfaceEntry) o;
- if (!maybeBaseCache.equals(that.maybeBaseCache))
+ if (!maybeBaseCache.equals(that.maybeBaseCache)) {
return false;
- if (!nullableDescription.equals(that.nullableDescription))
+ }
+ if (!nullableDescription.equals(that.nullableDescription)) {
return false;
- if (!exportedOsgiClassName.equals(that.exportedOsgiClassName))
+ }
+ if (!exportedOsgiClassName.equals(that.exportedOsgiClassName)) {
return false;
- if (!qName.equals(that.qName))
+ }
+ if (!qName.equals(that.qName)) {
return false;
- if (!packageName.equals(that.packageName))
+ }
+ if (!packageName.equals(that.packageName)) {
return false;
- if (!typeName.equals(that.typeName))
+ }
+ if (!typeName.equals(that.typeName)) {
return false;
+ }
return true;
}
try {
javaType = typeProvider.javaTypeForSchemaDefinitionType(
type, leaf);
- if (javaType == null)
+ if (javaType == null) {
throw new IllegalArgumentException("Unknown type received for "
+ leaf.toString());
+ }
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Error while resolving type of "
+ leaf, e);
try {
javaType = typeProvider.javaTypeForSchemaDefinitionType(
leaf.getType(), leaf);
- if (javaType == null)
+ if (javaType == null) {
throw new IllegalArgumentException(
"Unknown type received for " + leaf.toString());
+ }
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Error while resolving type of "
+ leaf, e);
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (!(o instanceof AbstractAttribute))
+ }
+ if (!(o instanceof AbstractAttribute)) {
return false;
+ }
AbstractAttribute that = (AbstractAttribute) o;
if (attributeYangName != null ? !attributeYangName
.equals(that.attributeYangName)
- : that.attributeYangName != null)
+ : that.attributeYangName != null) {
return false;
+ }
return true;
}
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (o == null || getClass() != o.getClass())
+ }
+ if (o == null || getClass() != o.getClass()) {
return false;
- if (!super.equals(o))
+ }
+ if (!super.equals(o)) {
return false;
+ }
AbstractDependencyAttribute that = (AbstractDependencyAttribute) o;
if (dependency != null ? !dependency.equals(that.dependency)
- : that.dependency != null)
+ : that.dependency != null) {
return false;
+ }
if (nullableDefault != null ? !nullableDefault
- .equals(that.nullableDefault) : that.nullableDefault != null)
+ .equals(that.nullableDefault) : that.nullableDefault != null) {
return false;
+ }
if (nullableDescription != null ? !nullableDescription
.equals(that.nullableDescription)
- : that.nullableDescription != null)
+ : that.nullableDescription != null) {
return false;
+ }
return true;
}
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (o == null || getClass() != o.getClass())
+ }
+ if (o == null || getClass() != o.getClass()) {
return false;
- if (!super.equals(o))
+ }
+ if (!super.equals(o)) {
return false;
+ }
JavaAttribute that = (JavaAttribute) o;
if (nullableDefault != null ? !nullableDefault
- .equals(that.nullableDefault) : that.nullableDefault != null)
+ .equals(that.nullableDefault) : that.nullableDefault != null) {
return false;
+ }
if (nullableDescription != null ? !nullableDescription
.equals(that.nullableDescription)
- : that.nullableDescription != null)
+ : that.nullableDescription != null) {
return false;
- if (type != null ? !type.equals(that.type) : that.type != null)
+ }
+ if (type != null ? !type.equals(that.type) : that.type != null) {
return false;
+ }
return true;
}
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (o == null || getClass() != o.getClass())
+ }
+ if (o == null || getClass() != o.getClass()) {
return false;
- if (!super.equals(o))
+ }
+ if (!super.equals(o)) {
return false;
+ }
ListAttribute that = (ListAttribute) o;
if (nullableDefault != null ? !nullableDefault
- .equals(that.nullableDefault) : that.nullableDefault != null)
+ .equals(that.nullableDefault) : that.nullableDefault != null) {
return false;
+ }
if (nullableDescription != null ? !nullableDescription
.equals(that.nullableDescription)
- : that.nullableDescription != null)
+ : that.nullableDescription != null) {
return false;
+ }
return true;
}
public class SimpleTypeResolver {
+ private SimpleTypeResolver() {
+ }
+
public static SimpleType<?> getSimpleType(Type type) {
SimpleType<?> expectedSimpleType = JAVA_TYPE_TO_SIMPLE_TYPE.get(type
.getFullyQualifiedName());
TypeProviderWrapper typeProviderWrapper, String packageName) {
Class<? extends DataSchemaNode> type = isAllowedType(dataSchemaNode);
- if (type.equals(LeafSchemaNode.class))
+ if (type.equals(LeafSchemaNode.class)) {
return new JavaAttribute((LeafSchemaNode) dataSchemaNode,
typeProviderWrapper);
- else if (type.equals(ListSchemaNode.class))
+ } else if (type.equals(ListSchemaNode.class)) {
return ListAttribute.create((ListSchemaNode) dataSchemaNode,
typeProviderWrapper, packageName);
- else if (type.equals(LeafListSchemaNode.class))
+ } else if (type.equals(LeafListSchemaNode.class)) {
return ListAttribute.create((LeafListSchemaNode) dataSchemaNode,
typeProviderWrapper);
- else if (type.equals(ContainerSchemaNode.class))
+ } else if (type.equals(ContainerSchemaNode.class)) {
return TOAttribute.create((ContainerSchemaNode) dataSchemaNode,
typeProviderWrapper, packageName);
+ }
throw new IllegalStateException("This should never happen");
}
private static Class<? extends DataSchemaNode> isAllowedType(
DataSchemaNode dataSchemaNode) {
for (Class<? extends DataSchemaNode> allowedType : ALLOWED_CHILDREN) {
- if (allowedType.isAssignableFrom(dataSchemaNode.getClass()) == true)
+ if (allowedType.isAssignableFrom(dataSchemaNode.getClass()) == true) {
return allowedType;
+ }
}
throw new IllegalArgumentException("Illegal child node for TO: "
+ dataSchemaNode.getClass() + " allowed node types: "
@Override
public boolean equals(Object o) {
- if (this == o)
+ if (this == o) {
return true;
- if (o == null || getClass() != o.getClass())
+ }
+ if (o == null || getClass() != o.getClass()) {
return false;
- if (!super.equals(o))
+ }
+ if (!super.equals(o)) {
return false;
+ }
TOAttribute that = (TOAttribute) o;
if (nullableDefault != null ? !nullableDefault
- .equals(that.nullableDefault) : that.nullableDefault != null)
+ .equals(that.nullableDefault) : that.nullableDefault != null) {
return false;
+ }
if (nullableDescription != null ? !nullableDescription
.equals(that.nullableDescription)
- : that.nullableDescription != null)
+ : that.nullableDescription != null) {
return false;
+ }
if (yangNameToAttributeMap != null ? !yangNameToAttributeMap
.equals(that.yangNameToAttributeMap)
- : that.yangNameToAttributeMap != null)
+ : that.yangNameToAttributeMap != null) {
return false;
+ }
return true;
}
final class Util {
+ private Util() {
+ }
/**
* Used for date <-> xml serialization
*/
- private static final SimpleDateFormat dateFormat = new SimpleDateFormat(
+ private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat(
"yyyy-MM-dd");
public static String writeDate(Date date) {
- return dateFormat.format(date);
+ return DATE_FORMAT.format(date);
}
public static Date readDate(String s) throws ParseException {
- return dateFormat.parse(s);
+ return DATE_FORMAT.parse(s);
}
}
package org.opendaylight.controller.config.yangjmxgenerator.plugin.util;
public class FullyQualifiedNameHelper {
+
+ private FullyQualifiedNameHelper() {
+ }
+
public static String getFullyQualifiedName(String packageName,
String className) {
if (packageName.isEmpty())
import java.util.Map;
import java.util.Set;
+import org.junit.Ignore;
import org.junit.Test;
import org.opendaylight.controller.config.yangjmxgenerator.plugin.util.NameConflictException;
import org.opendaylight.controller.config.yangjmxgenerator.plugin.util.YangModelSearchUtils;
Map<File, String> testedFilesToYangModules = new HashMap<>();
Map<String, String> testedYangModulesToExpectedConflictingName = new HashMap<>();
+ @Ignore
@Test
public void testNameConflicts() throws Exception {
prepareSamples();
}
- leaf simpleInt {
+ leaf simpleInt1 {
type uint32;
default 99L;
}
}
}
- leaf simpleInt {
+ leaf simpleInt2 {
type uint32;
}
case impl-netconf {
when "/config:modules/config:module/config:type = 'impl-netconf'";
// root runtime bean
- leaf created-sessions {
+ leaf created-sessions-1 {
type uint32;
}
case netconf {
when "/config:modules/config:module/config:type = 'netconf'";
// root runtime bean
- leaf created-sessions {
+ leaf created-sessions2 {
type uint32;
}
case netconf {
when "/config:modules/config:module/config:type = 'netconf'";
- container dto-a {
+ container dto-a2 {
leaf simple-arg {
type uint32;
}
case netconf {
when "/config:modules/config:module/config:type = 'netconf'";
// root runtime bean
- leaf created-sessions {
+ leaf created-sessions2 {
type uint32;
}
case netconf {
when "/config:modules/config:module/config:type = 'netconf'";
// root runtime bean
- leaf created-sessions {
+ leaf created-sessions2 {
type uint32;
}
case netconf1 {
when "/config:modules/config:module/config:type = 'netconf1'";
// root runtime bean
- leaf created-sessions {
+ leaf created-sessions2 {
type uint32;
}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<artifactId>yang-test-plugin</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-plugin-parent</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../config-plugin-parent</relativePath>
</parent>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>configuration</artifactId>
- <version>0.4.3-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>configuration.implementation</artifactId>
- <version>0.4.3-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>configuration.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>ch.qos.logback</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>connectionmanager</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>connectionmanager.implementation</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>containermanager</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>containermanager.implementation</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>containermanager.it.implementation</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>containermanager.shell</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>opendaylight-karaf-empty</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>opendaylight-karaf-resources</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>distribution.opendaylight-karaf</artifactId>
<artifactId>features-base</artifactId>
<classifier>features</classifier>
<type>xml</type>
+ <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-adsal</artifactId>
<classifier>features</classifier>
<type>xml</type>
+ <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-nsf</artifactId>
<classifier>features</classifier>
<type>xml</type>
+ <scope>runtime</scope>
</dependency>
<!-- MD-SAL Related Features -->
<dependency>
<artifactId>features-mdsal</artifactId>
<classifier>features</classifier>
<type>xml</type>
+ <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-flow</artifactId>
<classifier>features</classifier>
<type>xml</type>
+ <scope>runtime</scope>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>features-restconf</artifactId>
+ <version>1.2.0-SNAPSHOT</version>
+ <classifier>features</classifier>
+ <type>xml</type>
+ <scope>runtime</scope>
+ </dependency>
+ <!-- Netconf connector features. When this is included, users can test the netconf connector using netconf-testtool -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>features-netconf-connector</artifactId>
+ <classifier>features</classifier>
+ <type>xml</type>
+ </dependency>
+
</dependencies>
<build>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>distribution.opendaylight</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<prerequisites>
<maven>3.0</maven>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-inmemory-datastore</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-topology</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<!-- toaster example I'm pretty sure we should trim -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>swagger-ui</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
</dependency>
</dependencies>
</profile>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.0-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<scm>
<groupId>org.opendaylight.controller</groupId>
<artifactId>distribution.p2site</artifactId>
- <version>0.1.0-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<build>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>sanitytest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>dummy-console</artifactId>
- <version>1.1.0-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<description>Dummy Console Interfaces for Equinox-specific CLI</description>
<build>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>forwarding.staticrouting</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>forwardingrulesmanager</artifactId>
- <version>0.6.0-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>forwardingrulesmanager.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>forwardingrulesmanager.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementation/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker.implementation</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>hosttracker.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementaiton/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker.shell</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker_new</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker_new.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>releasepom</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>karaf.branding</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<name>OpenDaylight :: Karaf :: Branding</name>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>karaf-tomcat-security</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>logging.bridge</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<parent>
<artifactId>sal-parent</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>compatibility-parent</artifactId>
<packaging>pom</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>compatibility-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-compatibility</artifactId>
<packaging>bundle</packaging>
*/
package org.opendaylight.controller.sal.compatibility;
-import java.util.Dictionary;
-import java.util.Hashtable;
-
+import com.google.common.base.Preconditions;
import org.apache.felix.dm.Component;
import org.opendaylight.controller.clustering.services.IClusterGlobalServices;
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker;
import org.opendaylight.controller.sal.utils.INodeFactory;
import org.osgi.framework.BundleContext;
-import com.google.common.base.Preconditions;
+import java.util.Dictionary;
+import java.util.Hashtable;
public class ComponentActivator extends ComponentActivatorAbstractBase {
private final INodeConnectorFactory nodeConnectorFactory = new MDSalNodeConnectorFactory();
@Override
public void start(final BundleContext context) {
- super.start(context);
this.context = Preconditions.checkNotNull(context);
+ super.start(context);
}
public ProviderContext setBroker(final BindingAwareBroker broker) {
@Override
protected Object[] getGlobalImplementations() {
return new Object[] {
+ this, // Used for setBroker callback
flow,
inventory,
dataPacket,
nodeFactory,
nodeConnectorFactory,
topology,
- tpProvider,
- this // Used for setBroker callback
+ tpProvider
};
}
.setService(IDiscoveryService.class)
.setCallbacks("setDiscoveryPublisher", "setDiscoveryPublisher")
.setRequired(false));
+ it.add(createServiceDependency()
+ .setService(BindingAwareBroker.class)
+ .setRequired(true));
}
private void _instanceConfigure(final InventoryAndReadAdapter imp, final Component it, String containerName) {
.setService(IPluginOutInventoryService.class)
.setCallbacks("setInventoryPublisher", "unsetInventoryPublisher")
.setRequired(false));
+ it.add(createServiceDependency()
+ .setService(BindingAwareBroker.class)
+ .setRequired(true));
}
private void _configure(final TopologyAdapter imp, final Component it) {
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>forwardingrules-manager</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>inventory-manager</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>md-sal-config</artifactId>
<description>Configuration files for md-sal</description>
<parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>model-flow-base</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>model-flow-service</artifactId>
<packaging>bundle</packaging>
--- /dev/null
+module node-config {
+ namespace "urn:opendaylight:module:config";
+ prefix node-config;
+
+ import flow-capable-transaction {prefix tr;}
+ import opendaylight-inventory {prefix inv;revision-date "2013-08-19";}
+
+ revision "2014-10-15" {
+ description "Initial revision of node configuration service";
+ }
+
+ grouping node-ref {
+ uses "inv:node-context-ref";
+ }
+
+
+
+ /** Base configuration structure **/
+ grouping node-config {
+ leaf flag {
+ type string;
+ description "Switch config flag. Expected values FRAGNORMAL, OFPCFRAGDROP, OFPCFRAGREASM, OFPCFRAGMASK";
+ }
+ leaf miss-search-length{
+ type uint16;
+ }
+ }
+
+ rpc set-config {
+ input {
+ uses node-config;
+ uses tr:transaction-aware;
+ uses node-ref;
+ }
+ output {
+ uses tr:transaction-aware;
+ }
+ }
+}
<parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>model-flow-statistics</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>model-inventory</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>model-topology</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.model</groupId>
<artifactId>model-parent</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-akka-raft</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-clustering-commons</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-api</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-broker-impl</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>binding-data-codec</artifactId>
- <version>0.6.2-SNAPSHOT</version>
+ <version>0.7.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-config</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-dom-it</artifactId>
<packaging>jar</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-test-model</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
public class ListProcessingAndOrderingTest extends AbstractDataServiceTest {
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_UNORDERED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(UnorderedContainer.QNAME).node(UnorderedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(UnorderedContainer.QNAME).node(UnorderedList.QNAME).build();
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_ORDERED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(OrderedContainer.QNAME).node(OrderedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(OrderedContainer.QNAME).node(OrderedList.QNAME).build();
private static final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier DOM_UNKEYED_LIST_PATH = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .builder(Lists.QNAME).node(UnkeyedContainer.QNAME).node(UnkeyedList.QNAME).build();
+ .builder().node(Lists.QNAME).node(UnkeyedContainer.QNAME).node(UnkeyedList.QNAME).build();
private static final InstanceIdentifier<UnorderedContainer> UNORDERED_CONTAINER_PATH = InstanceIdentifier.builder(Lists.class).child(UnorderedContainer.class).build();
private static final InstanceIdentifier<OrderedContainer> ORDERED_CONTAINER_PATH = InstanceIdentifier.builder(Lists.class).child(OrderedContainer.class).build();
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-it</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-binding-util</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-clustering-commons</artifactId>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.common.actor;
+
+/**
+ * Actor with its behaviour metered. Metering is enabled by configuration.
+ */
+public abstract class AbstractUntypedPersistentActorWithMetering extends AbstractUntypedPersistentActor {
+
+ public AbstractUntypedPersistentActorWithMetering() {
+ if (isMetricsCaptureEnabled())
+ getContext().become(new MeteringBehavior(this));
+ }
+
+ private boolean isMetricsCaptureEnabled(){
+ CommonConfig config = new CommonConfig(getContext().system().settings().config());
+ return config.isMetricCaptureEnabled();
+ }
+}
package org.opendaylight.controller.cluster.datastore.node;
-import org.opendaylight.controller.cluster.datastore.node.utils.PathUtils;
import org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeSerializer;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeSerializer.DeSerializer;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeSerializer.Serializer;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.Container;
+import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
public class NormalizedNodeToNodeCodec {
+ public interface Encoded {
+ NormalizedNodeMessages.Container getEncodedNode();
+
+ NormalizedNodeMessages.InstanceIdentifier getEncodedPath();
+ }
+
+ public interface Decoded {
+ NormalizedNode<?,?> getDecodedNode();
+
+ YangInstanceIdentifier getDecodedPath();
+ }
+
private final SchemaContext ctx;
- private static final Logger logger = LoggerFactory.getLogger(NormalizedNodeToNodeCodec.class);
public NormalizedNodeToNodeCodec(final SchemaContext ctx){
this.ctx = ctx;
+ }
+ public NormalizedNodeMessages.Container encode(NormalizedNode<?,?> node){
+ return encode(null, node).getEncodedNode();
}
- public NormalizedNodeMessages.Container encode(YangInstanceIdentifier id, NormalizedNode node){
+ public Encoded encode(YangInstanceIdentifier path, NormalizedNode<?,?> node) {
+
+ NormalizedNodeMessages.InstanceIdentifier serializedPath = null;
NormalizedNodeMessages.Container.Builder builder = NormalizedNodeMessages.Container.newBuilder();
- String parentPath = "";
- if(id != null){
- parentPath = PathUtils.getParentPath(PathUtils.toString(id));
- }
+ // Note: parent path is no longer used
+ builder.setParentPath("");
- builder.setParentPath(parentPath);
if(node != null) {
- builder.setNormalizedNode(NormalizedNodeSerializer.serialize(node));
+ if(path == null) {
+ builder.setNormalizedNode(NormalizedNodeSerializer.serialize(node));
+ } else {
+ Serializer serializer = NormalizedNodeSerializer.newSerializer(node);
+ builder.setNormalizedNode(serializer.serialize(path));
+ serializedPath = serializer.getSerializedPath();
+ }
}
- return builder.build();
+ return new EncodedImpl(builder.build(), serializedPath);
+ }
+
+
+ public NormalizedNode<?,?> decode(NormalizedNodeMessages.Node node){
+ return decode(null, node).getDecodedNode();
}
- public NormalizedNode<?,?> decode(YangInstanceIdentifier id, NormalizedNodeMessages.Node node){
+ public Decoded decode(NormalizedNodeMessages.InstanceIdentifier path,
+ NormalizedNodeMessages.Node node) {
if(node.getIntType() < 0 || node.getSerializedSize() == 0){
- return null;
+ return new DecodedImpl(null, null);
}
- return NormalizedNodeSerializer.deSerialize(node);
+
+ DeSerializer deSerializer = NormalizedNodeSerializer.newDeSerializer(path, node);
+ NormalizedNode<?,?> decodedNode = deSerializer.deSerialize();
+ return new DecodedImpl(decodedNode, deSerializer.getDeserializedPath());
}
+ private static class DecodedImpl implements Decoded {
+
+ private final NormalizedNode<?, ?> decodedNode;
+ private final YangInstanceIdentifier decodedPath;
+ public DecodedImpl(NormalizedNode<?, ?> decodedNode, YangInstanceIdentifier decodedPath) {
+ this.decodedNode = decodedNode;
+ this.decodedPath = decodedPath;
+ }
+
+ @Override
+ public NormalizedNode<?, ?> getDecodedNode() {
+ return decodedNode;
+ }
+
+ @Override
+ public YangInstanceIdentifier getDecodedPath() {
+ return decodedPath;
+ }
+ }
+
+ private static class EncodedImpl implements Encoded {
+
+ private final Container encodedNode;
+ private final InstanceIdentifier encodedPath;
+
+ EncodedImpl(Container encodedNode, InstanceIdentifier encodedPath) {
+ this.encodedNode = encodedNode;
+ this.encodedPath = encodedPath;
+ }
+
+ @Override
+ public Container getEncodedNode() {
+ return encodedNode;
+ }
+
+ @Override
+ public InstanceIdentifier getEncodedPath() {
+ return encodedPath;
+ }
+ }
}
.create(name.trim()));
}
- return new YangInstanceIdentifier.AugmentationIdentifier(null, childNames);
+ return new YangInstanceIdentifier.AugmentationIdentifier(childNames);
}
}
public class PathUtils {
- public static String getParentPath(String currentElementPath){
- StringBuilder parentPath = new StringBuilder();
-
- if(currentElementPath != null){
- String[] parentPaths = currentElementPath.split("/");
- if(parentPaths.length > 2){
- for(int i=0;i<parentPaths.length-1;i++){
- if(parentPaths[i].length() > 0){
- parentPath.append("/");
- parentPath.append(parentPaths[i]);
- }
- }
- }
- }
- return parentPath.toString();
- }
-
/**
* Given a YangInstanceIdentifier return a serialized version of the same
* as a String
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
-import java.net.URI;
-import java.util.Date;
-
/**
* NormalizedNodeSerializationContext provides methods which help in encoding
* certain components of a NormalizedNode properly
*/
public interface NormalizedNodeSerializationContext {
- int addNamespace(URI namespace);
- int addRevision(Date revision);
- int addLocalName(String localName);
}
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
import com.google.common.base.Preconditions;
-
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.common.SimpleDateFormatUtil;
+import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.Node.Builder;
import org.opendaylight.yangtools.yang.data.api.Node;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.AnyXmlNode;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.ListNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeAttrBuilder;
-
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Date;
import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
import java.util.Map;
-
import static org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeType.ANY_XML_NODE_TYPE;
import static org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeType.AUGMENTATION_NODE_TYPE;
import static org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeType.CHOICE_NODE_TYPE;
return new Serializer(node).serialize();
}
+ public static Serializer newSerializer(NormalizedNode node) {
+ Preconditions.checkNotNull(node, "node should not be null");
+ return new Serializer(node);
+ }
/**
* DeSerialize a protocol buffer message back into a NormalizedNode
* @param node
* @return
*/
- public static NormalizedNode deSerialize(NormalizedNodeMessages.Node node){
- return new DeSerializer(node).deSerialize();
+ public static NormalizedNode deSerialize(NormalizedNodeMessages.Node node) {
+ Preconditions.checkNotNull(node, "node should not be null");
+ return new DeSerializer(null, node).deSerialize();
+ }
+
+ public static DeSerializer newDeSerializer(NormalizedNodeMessages.InstanceIdentifier path,
+ NormalizedNodeMessages.Node node) {
+ Preconditions.checkNotNull(node, "node should not be null");
+ return new DeSerializer(path, node);
}
/**
* @param pathArgument
* @return
*/
- public static YangInstanceIdentifier.PathArgument deSerialize(NormalizedNodeMessages.Node node, NormalizedNodeMessages.PathArgument pathArgument){
+ public static YangInstanceIdentifier.PathArgument deSerialize(NormalizedNodeMessages.Node node,
+ NormalizedNodeMessages.PathArgument pathArgument){
Preconditions.checkNotNull(node, "node should not be null");
Preconditions.checkNotNull(pathArgument, "pathArgument should not be null");
- return new DeSerializer(node).deSerialize(pathArgument);
+ return new DeSerializer(null, node).deSerialize(pathArgument);
}
- private static class Serializer implements NormalizedNodeSerializationContext {
+ public static class Serializer extends QNameSerializationContextImpl
+ implements NormalizedNodeSerializationContext {
private final NormalizedNode node;
- private final Map<Object, Integer> codeMap = new HashMap<>();
- private final List<String> codes = new ArrayList<>();
+ private NormalizedNodeMessages.InstanceIdentifier serializedPath;
private Serializer(NormalizedNode node) {
this.node = node;
}
- private NormalizedNodeMessages.Node serialize() {
- return this.serialize(node).addAllCode(codes).build();
+ public NormalizedNodeMessages.InstanceIdentifier getSerializedPath() {
+ return serializedPath;
+ }
+
+ public NormalizedNodeMessages.Node serialize() {
+ return this.serialize(node).addAllCode(getCodes()).build();
+ }
+
+ public NormalizedNodeMessages.Node serialize(YangInstanceIdentifier path) {
+ Builder builder = serialize(node);
+ serializedPath = InstanceIdentifierUtils.toSerializable(path, this);
+ return builder.addAllCode(getCodes()).build();
}
private NormalizedNodeMessages.Node.Builder serialize(
return builder;
}
-
-
- @Override public int addNamespace(URI namespace) {
- int namespaceInt = getCode(namespace);
-
- if(namespaceInt == -1) {
- namespaceInt = addCode(namespace, namespace.toString());
- }
- return namespaceInt;
- }
-
- @Override public int addRevision(Date revision) {
- if(revision == null){
- return -1;
- }
-
- int revisionInt = getCode(revision);
- if(revisionInt == -1) {
- String formattedRevision =
- SimpleDateFormatUtil.getRevisionFormat().format(revision);
- revisionInt = addCode(revision, formattedRevision);
- }
- return revisionInt;
- }
-
- @Override public int addLocalName(String localName) {
- int localNameInt = getCode(localName);
- if(localNameInt == -1) {
- localNameInt = addCode(localName, localName.toString());
- }
- return localNameInt;
-
- }
-
- public int addCode(Object code, String codeStr){
- int count = codes.size();
- codes.add(codeStr);
- codeMap.put(code, Integer.valueOf(count));
- return count;
- }
-
- public int getCode(Object code){
- if(codeMap.containsKey(code)){
- return codeMap.get(code);
- }
- return -1;
- }
}
- private static class DeSerializer implements NormalizedNodeDeSerializationContext {
+ public static class DeSerializer extends QNameDeSerializationContextImpl
+ implements NormalizedNodeDeSerializationContext {
private static Map<NormalizedNodeType, DeSerializationFunction>
deSerializationFunctions = new EnumMap<>(NormalizedNodeType.class);
}
private final NormalizedNodeMessages.Node node;
+ private final NormalizedNodeMessages.InstanceIdentifier path;
+ private YangInstanceIdentifier deserializedPath;
- public DeSerializer(NormalizedNodeMessages.Node node){
+ public DeSerializer(NormalizedNodeMessages.InstanceIdentifier path,
+ NormalizedNodeMessages.Node node) {
+ super(node.getCodeList());
+ this.path = path;
this.node = node;
}
- public NormalizedNode deSerialize(){
- return deSerialize(node);
+ public YangInstanceIdentifier getDeserializedPath() {
+ return deserializedPath;
+ }
+
+ public NormalizedNode deSerialize() {
+ NormalizedNode deserializedNode = deSerialize(node);
+ if(path != null) {
+ deserializedPath = InstanceIdentifierUtils.fromSerializable(path, this);
+ }
+
+ return deserializedNode;
}
private NormalizedNode deSerialize(NormalizedNodeMessages.Node node){
this, path);
}
- @Override public String getNamespace(int namespace) {
- return node.getCode(namespace);
- }
-
- @Override public String getRevision(int revision) {
- return node.getCode(revision);
- }
-
- @Override public String getLocalName(int localName) {
- return node.getCode(localName);
- }
-
public YangInstanceIdentifier.PathArgument deSerialize(
NormalizedNodeMessages.PathArgument pathArgument) {
return PathArgumentSerializer.deSerialize(this, pathArgument);
public static NormalizedNodeType getSerializableNodeType(NormalizedNode node){
Preconditions.checkNotNull(node, "node should not be null");
- if(node instanceof ContainerNode){
- return CONTAINER_NODE_TYPE;
- } else if(node instanceof LeafNode){
+ if(node instanceof LeafNode){
return LEAF_NODE_TYPE;
- } else if(node instanceof MapNode){
- return MAP_NODE_TYPE;
+ } else if(node instanceof LeafSetEntryNode){
+ return LEAF_SET_ENTRY_NODE_TYPE;
} else if(node instanceof MapEntryNode){
return MAP_ENTRY_NODE_TYPE;
+ } else if(node instanceof ContainerNode){
+ return CONTAINER_NODE_TYPE;
+ } else if(node instanceof MapNode){
+ return MAP_NODE_TYPE;
} else if(node instanceof AugmentationNode){
return AUGMENTATION_NODE_TYPE;
} else if(node instanceof LeafSetNode){
return LEAF_SET_NODE_TYPE;
- } else if(node instanceof LeafSetEntryNode){
- return LEAF_SET_ENTRY_NODE_TYPE;
} else if(node instanceof ChoiceNode){
return CHOICE_NODE_TYPE;
} else if(node instanceof OrderedLeafSetNode){
} else if(node instanceof AnyXmlNode){
return ANY_XML_NODE_TYPE;
}
+
throw new IllegalArgumentException("Node type unknown : " + node.getClass().getSimpleName());
}
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
import com.google.common.base.Preconditions;
-
import org.opendaylight.controller.cluster.datastore.node.utils.NodeIdentifierFactory;
import org.opendaylight.controller.cluster.datastore.node.utils.QNameFactory;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
-
import static org.opendaylight.controller.cluster.datastore.node.utils.serialization.PathArgumentType.getSerializablePathArgumentType;
public class PathArgumentSerializer {
private static final String REVISION_ARG = "?revision=";
- private static final Map<Class, PathArgumentAttributesGetter> pathArgumentAttributesGetters = new HashMap<>();
+ private static final Map<Class<?>, PathArgumentAttributesGetter> pathArgumentAttributesGetters = new HashMap<>();
- public static NormalizedNodeMessages.PathArgument serialize(NormalizedNodeSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument){
+ public static NormalizedNodeMessages.PathArgument serialize(QNameSerializationContext context,
+ YangInstanceIdentifier.PathArgument pathArgument){
Preconditions.checkNotNull(context, "context should not be null");
Preconditions.checkNotNull(pathArgument, "pathArgument should not be null");
}
- public static YangInstanceIdentifier.PathArgument deSerialize(NormalizedNodeDeSerializationContext context, NormalizedNodeMessages.PathArgument pathArgument){
+ public static YangInstanceIdentifier.PathArgument deSerialize(QNameDeSerializationContext context,
+ NormalizedNodeMessages.PathArgument pathArgument){
Preconditions.checkNotNull(context, "context should not be null");
Preconditions.checkNotNull(pathArgument, "pathArgument should not be null");
private static interface PathArgumentAttributesGetter {
- Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument);
+ Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument);
}
static {
pathArgumentAttributesGetters.put(YangInstanceIdentifier.NodeWithValue.class, new PathArgumentAttributesGetter() {
@Override
public Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(
- NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument) {
- List<NormalizedNodeMessages.PathArgumentAttribute> attributes =
- new ArrayList<>();
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument) {
YangInstanceIdentifier.NodeWithValue identifier
= (YangInstanceIdentifier.NodeWithValue) pathArgument;
NormalizedNodeMessages.PathArgumentAttribute attribute =
buildAttribute(context, null, identifier.getValue());
- attributes.add(attribute);
-
- return attributes;
-
+ return Arrays.asList(attribute);
}
});
pathArgumentAttributesGetters.put(YangInstanceIdentifier.NodeIdentifierWithPredicates.class, new PathArgumentAttributesGetter() {
@Override
public Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(
- NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument) {
-
- List<NormalizedNodeMessages.PathArgumentAttribute> attributes =
- new ArrayList<>();
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument) {
YangInstanceIdentifier.NodeIdentifierWithPredicates identifier
= (YangInstanceIdentifier.NodeIdentifierWithPredicates) pathArgument;
- for (QName key : identifier.getKeyValues().keySet()) {
- Object value = identifier.getKeyValues().get(key);
+ Map<QName, Object> keyValues = identifier.getKeyValues();
+ List<NormalizedNodeMessages.PathArgumentAttribute> attributes =
+ new ArrayList<>(keyValues.size());
+ for (Entry<QName, Object> e : keyValues.entrySet()) {
NormalizedNodeMessages.PathArgumentAttribute attribute =
- buildAttribute(context, key, value);
+ buildAttribute(context, e.getKey(), e.getValue());
attributes.add(attribute);
-
}
return attributes;
-
}
});
pathArgumentAttributesGetters.put(YangInstanceIdentifier.AugmentationIdentifier.class, new PathArgumentAttributesGetter() {
@Override
public Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(
- NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument) {
-
- List<NormalizedNodeMessages.PathArgumentAttribute> attributes =
- new ArrayList<>();
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument) {
YangInstanceIdentifier.AugmentationIdentifier identifier
= (YangInstanceIdentifier.AugmentationIdentifier) pathArgument;
- for (QName key : identifier.getPossibleChildNames()) {
+ Set<QName> possibleChildNames = identifier.getPossibleChildNames();
+ List<NormalizedNodeMessages.PathArgumentAttribute> attributes =
+ new ArrayList<>(possibleChildNames.size());
+ for (QName key : possibleChildNames) {
Object value = key;
NormalizedNodeMessages.PathArgumentAttribute attribute =
buildAttribute(context, key, value);
attributes.add(attribute);
-
}
return attributes;
-
}
});
pathArgumentAttributesGetters.put(YangInstanceIdentifier.NodeIdentifier.class, new PathArgumentAttributesGetter() {
@Override
public Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> get(
- NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument) {
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument) {
return Collections.emptyList();
}
});
}
- private static NormalizedNodeMessages.PathArgumentAttribute buildAttribute(NormalizedNodeSerializationContext context,QName name, Object value){
+ private static NormalizedNodeMessages.PathArgumentAttribute buildAttribute(
+ QNameSerializationContext context, QName name, Object value) {
NormalizedNodeMessages.PathArgumentAttribute.Builder builder =
NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
}
- private static NormalizedNodeMessages.QName.Builder encodeQName(NormalizedNodeSerializationContext context, QName qName){
- if(qName == null){
+ private static NormalizedNodeMessages.QName.Builder encodeQName(QNameSerializationContext context,
+ QName qName) {
+ if(qName == null) {
return NormalizedNodeMessages.QName.getDefaultInstance().toBuilder();
}
NormalizedNodeMessages.QName.Builder qNameBuilder =
}
private static Iterable<? extends NormalizedNodeMessages.PathArgumentAttribute> getPathArgumentAttributes(
- NormalizedNodeSerializationContext context,
- YangInstanceIdentifier.PathArgument pathArgument) {
+ QNameSerializationContext context, YangInstanceIdentifier.PathArgument pathArgument) {
return pathArgumentAttributesGetters.get(pathArgument.getClass()).get(context, pathArgument);
-
}
- private static String qNameToString(NormalizedNodeDeSerializationContext context,
+ private static String qNameToString(QNameDeSerializationContext context,
NormalizedNodeMessages.QName qName){
// If this serializer is used qName cannot be null (see encodeQName)
// adding null check only in case someone tried to deSerialize a protocol buffer node
* @return MD-SAL PathArgument
*/
private static YangInstanceIdentifier.PathArgument parsePathArgument(
- NormalizedNodeDeSerializationContext context,
- NormalizedNodeMessages.PathArgument pathArgument) {
+ QNameDeSerializationContext context, NormalizedNodeMessages.PathArgument pathArgument) {
switch(PathArgumentType.values()[pathArgument.getIntType()]){
case NODE_IDENTIFIER_WITH_VALUE : {
}
private static Map<QName, Object> toAttributesMap(
- NormalizedNodeDeSerializationContext context,
- List<NormalizedNodeMessages.PathArgumentAttribute> attributesList) {
+ QNameDeSerializationContext context,
+ List<NormalizedNodeMessages.PathArgumentAttribute> attributesList) {
Map<QName, Object> map;
if(attributesList.size() == 1) {
return map;
}
- private static Object parseAttribute(NormalizedNodeDeSerializationContext context, NormalizedNodeMessages.PathArgumentAttribute attribute){
+ private static Object parseAttribute(QNameDeSerializationContext context,
+ NormalizedNodeMessages.PathArgumentAttribute attribute){
return ValueSerializer.deSerialize(context, attribute);
}
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
-import com.google.common.base.Preconditions;
+import java.util.Map;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import com.google.common.collect.ImmutableMap;
public enum PathArgumentType {
AUGMENTATION_IDENTIFIER,
NODE_IDENTIFIER_WITH_VALUE,
NODE_IDENTIFIER_WITH_PREDICATES;
+ private static Map<Class<?>, PathArgumentType> CLASS_TO_ENUM_MAP =
+ ImmutableMap.<Class<?>, PathArgumentType>builder().
+ put(YangInstanceIdentifier.AugmentationIdentifier.class, AUGMENTATION_IDENTIFIER).
+ put(YangInstanceIdentifier.NodeIdentifier.class, NODE_IDENTIFIER).
+ put(YangInstanceIdentifier.NodeIdentifierWithPredicates.class, NODE_IDENTIFIER_WITH_PREDICATES).
+ put(YangInstanceIdentifier.NodeWithValue.class, NODE_IDENTIFIER_WITH_VALUE).build();
+
public static int getSerializablePathArgumentType(YangInstanceIdentifier.PathArgument pathArgument){
- Preconditions.checkNotNull(pathArgument, "pathArgument should not be null");
-
- if(pathArgument instanceof YangInstanceIdentifier.AugmentationIdentifier){
- return AUGMENTATION_IDENTIFIER.ordinal();
- } else if(pathArgument instanceof YangInstanceIdentifier.NodeIdentifier){
- return NODE_IDENTIFIER.ordinal();
- } else if(pathArgument instanceof YangInstanceIdentifier.NodeIdentifierWithPredicates){
- return NODE_IDENTIFIER_WITH_PREDICATES.ordinal();
- } else if(pathArgument instanceof YangInstanceIdentifier.NodeWithValue){
- return NODE_IDENTIFIER_WITH_VALUE.ordinal();
+
+ PathArgumentType type = CLASS_TO_ENUM_MAP.get(pathArgument.getClass());
+ if(type == null) {
+ throw new IllegalArgumentException("Unknown type of PathArgument = " + pathArgument);
}
- throw new IllegalArgumentException("Unknown type of PathArgument = " + pathArgument.toString());
+
+ return type.ordinal();
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+
+/**
+ * Interface that provides methods which help in decoding components of a QName.
+ *
+ * @author Thomas Pantelis
+ */
+public interface QNameDeSerializationContext {
+ String getNamespace(int namespace);
+
+ String getRevision(int revision);
+
+ String getLocalName(int localName);
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+
+import java.util.List;
+
+/**
+ * Implementation of the QNameDeSerializationContext interface.
+ *
+ * @author Thomas Pantelis
+ */
+public class QNameDeSerializationContextImpl implements QNameDeSerializationContext {
+
+ private final List<String> codeList;
+
+ public QNameDeSerializationContextImpl(List<String> codeList) {
+ this.codeList = codeList;
+ }
+
+ @Override
+ public String getNamespace(int namespace) {
+ return codeList.get(namespace);
+ }
+
+ @Override
+ public String getRevision(int revision) {
+ return codeList.get(revision);
+ }
+
+ @Override
+ public String getLocalName(int localName) {
+ return codeList.get(localName);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+
+import java.net.URI;
+import java.util.Date;
+
+/**
+ * Interface that provides methods which help in encoding components of a QName.
+ *
+ * @author Thomas Pantelis
+ */
+public interface QNameSerializationContext {
+ int addNamespace(URI namespace);
+
+ int addRevision(Date revision);
+
+ int addLocalName(String localName);
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.opendaylight.yangtools.yang.common.SimpleDateFormatUtil;
+
+/**
+ * Implementation of the QNameSerializationContext interface.
+ *
+ * @author Thomas Pantelis
+ */
+public class QNameSerializationContextImpl implements QNameSerializationContext {
+
+ private final Map<Object, Integer> codeMap = new HashMap<>();
+ private final List<String> codes = new ArrayList<>();
+
+ public List<String> getCodes() {
+ return codes;
+ }
+
+ @Override public int addNamespace(URI namespace) {
+ int namespaceInt = getCode(namespace);
+
+ if(namespaceInt == -1) {
+ namespaceInt = addCode(namespace, namespace.toString());
+ }
+ return namespaceInt;
+ }
+
+ @Override public int addRevision(Date revision) {
+ if(revision == null){
+ return -1;
+ }
+
+ int revisionInt = getCode(revision);
+ if(revisionInt == -1) {
+ String formattedRevision =
+ SimpleDateFormatUtil.getRevisionFormat().format(revision);
+ revisionInt = addCode(revision, formattedRevision);
+ }
+ return revisionInt;
+ }
+
+ @Override public int addLocalName(String localName) {
+ int localNameInt = getCode(localName);
+ if(localNameInt == -1) {
+ localNameInt = addCode(localName, localName);
+ }
+ return localNameInt;
+
+ }
+
+ private int addCode(Object code, String codeStr){
+ int count = codes.size();
+ codes.add(codeStr);
+ codeMap.put(code, Integer.valueOf(count));
+ return count;
+ }
+
+ private int getCode(Object code){
+ Integer value = codeMap.get(code);
+ return value == null ? -1 : value.intValue();
+ }
+}
public class ValueSerializer {
public static void serialize(NormalizedNodeMessages.Node.Builder builder,
- NormalizedNodeSerializationContext context, Object value){
+ QNameSerializationContext context, Object value) {
builder.setIntValueType(ValueType.getSerializableType(value).ordinal());
if(value instanceof YangInstanceIdentifier) {
builder.setInstanceIdentifierValue(
- InstanceIdentifierUtils.toSerializable((YangInstanceIdentifier) value));
+ InstanceIdentifierUtils.toSerializable((YangInstanceIdentifier) value, context));
} else if(value instanceof Set) {
Set set = (Set) value;
if(!set.isEmpty()){
}
public static void serialize(NormalizedNodeMessages.PathArgumentAttribute.Builder builder,
- NormalizedNodeSerializationContext context, Object value){
+ QNameSerializationContext context, Object value){
builder.setType(ValueType.getSerializableType(value).ordinal());
builder.setValue(value.toString());
}
- public static Object deSerialize(
- NormalizedNodeDeSerializationContext context, NormalizedNodeMessages.Node node) {
+ public static Object deSerialize(QNameDeSerializationContext context,
+ NormalizedNodeMessages.Node node) {
if(node.getIntValueType() == ValueType.YANG_IDENTIFIER_TYPE.ordinal()){
return InstanceIdentifierUtils.fromSerializable(
- node.getInstanceIdentifierValue());
+ node.getInstanceIdentifierValue(), context);
} else if(node.getIntValueType() == ValueType.BITS_TYPE.ordinal()){
return new HashSet(node.getBitsValueList());
}
return deSerializeBasicTypes(node.getIntValueType(), node.getValue());
}
- public static Object deSerialize(
- NormalizedNodeDeSerializationContext context,
- NormalizedNodeMessages.PathArgumentAttribute attribute) {
+ public static Object deSerialize(QNameDeSerializationContext context,
+ NormalizedNodeMessages.PathArgumentAttribute attribute) {
return deSerializeBasicTypes(attribute.getType(), attribute.getValue());
}
import org.opendaylight.controller.cluster.datastore.node.utils.NodeIdentifierFactory;
import org.opendaylight.controller.cluster.datastore.node.utils.QNameFactory;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.PathArgumentSerializer;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameDeSerializationContext;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameDeSerializationContextImpl;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameSerializationContext;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameSerializationContextImpl;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
protected static final Logger logger = LoggerFactory
.getLogger(InstanceIdentifierUtils.class);
- @Deprecated
- public static YangInstanceIdentifier from(String path) {
- String[] ids = path.split("/");
-
- List<YangInstanceIdentifier.PathArgument> pathArguments =
- new ArrayList<>();
- for (String nodeId : ids) {
- if (!"".equals(nodeId)) {
- pathArguments
- .add(NodeIdentifierFactory.getArgument(nodeId));
- }
- }
- final YangInstanceIdentifier instanceIdentifier =
- YangInstanceIdentifier.create(pathArguments);
- return instanceIdentifier;
- }
-
-
/**
* Convert an MD-SAL YangInstanceIdentifier into a protocol buffer version of it
*
* @param path an MD-SAL YangInstanceIdentifier
* @return a protocol buffer version of the MD-SAL YangInstanceIdentifier
*/
- public static NormalizedNodeMessages.InstanceIdentifier toSerializable(YangInstanceIdentifier path){
+ public static NormalizedNodeMessages.InstanceIdentifier toSerializable(YangInstanceIdentifier path) {
+ QNameSerializationContextImpl context = new QNameSerializationContextImpl();
+ Builder builder = toSerializableBuilder(path, context);
+ return builder.addAllCode(context.getCodes()).build();
+ }
+
+ public static NormalizedNodeMessages.InstanceIdentifier toSerializable(
+ YangInstanceIdentifier path, QNameSerializationContext context) {
+ return toSerializableBuilder(path, context).build();
+ }
+
+ private static NormalizedNodeMessages.InstanceIdentifier.Builder toSerializableBuilder(
+ YangInstanceIdentifier path, QNameSerializationContext context) {
NormalizedNodeMessages.InstanceIdentifier.Builder builder =
NormalizedNodeMessages.InstanceIdentifier.newBuilder();
try {
-
- for (org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument pathArgument : path
- .getPathArguments()) {
-
- String nodeType = "";
- if(!(pathArgument instanceof YangInstanceIdentifier.AugmentationIdentifier)){
- nodeType = pathArgument.getNodeType().toString();
+ for (org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.
+ PathArgument pathArgument : path.getPathArguments()) {
+ NormalizedNodeMessages.PathArgument serializablePathArgument;
+ if(context == null) {
+ String nodeType = "";
+ if(!(pathArgument instanceof YangInstanceIdentifier.AugmentationIdentifier)){
+ nodeType = pathArgument.getNodeType().toString();
+ }
+
+ serializablePathArgument = NormalizedNodeMessages.PathArgument.newBuilder()
+ .setValue(pathArgument.toString())
+ .setType(pathArgument.getClass().getSimpleName())
+ .setNodeType(NormalizedNodeMessages.QName.newBuilder().setValue(nodeType))
+ .addAllAttributes(getPathArgumentAttributes(pathArgument)).build();
+ } else {
+ serializablePathArgument = PathArgumentSerializer.serialize(context, pathArgument);
}
- NormalizedNodeMessages.PathArgument serializablePathArgument =
- NormalizedNodeMessages.PathArgument.newBuilder()
- .setValue(pathArgument.toString())
- .setType(pathArgument.getClass().getSimpleName())
- .setNodeType(NormalizedNodeMessages.QName.newBuilder()
- .setValue(nodeType))
- .addAllAttributes(getPathArgumentAttributes(
- pathArgument))
- .build();
-
builder.addArguments(serializablePathArgument);
}
-
} catch(Exception e){
logger.error("An exception occurred", e);
}
- return builder.build();
+
+ return builder;
}
* @param path a protocol buffer version of the MD-SAL YangInstanceIdentifier
* @return an MD-SAL YangInstanceIdentifier
*/
- public static YangInstanceIdentifier fromSerializable(NormalizedNodeMessages.InstanceIdentifier path){
-
- List<YangInstanceIdentifier.PathArgument> pathArguments =
- new ArrayList<>();
+ public static YangInstanceIdentifier fromSerializable(NormalizedNodeMessages.InstanceIdentifier path) {
+ return fromSerializable(path, new QNameDeSerializationContextImpl(path.getCodeList()));
+ }
- for(NormalizedNodeMessages.PathArgument pathArgument : path.getArgumentsList()){
+ public static YangInstanceIdentifier fromSerializable(NormalizedNodeMessages.InstanceIdentifier path,
+ QNameDeSerializationContext context) {
- pathArguments
- .add(parsePathArgument(pathArgument));
+ List<YangInstanceIdentifier.PathArgument> pathArguments = new ArrayList<>();
+ for(NormalizedNodeMessages.PathArgument pathArgument : path.getArgumentsList()) {
+ if(context == null || pathArgument.hasType()) {
+ pathArguments.add(parsePathArgument(pathArgument));
+ } else {
+ pathArguments.add(PathArgumentSerializer.deSerialize(context, pathArgument));
+ }
}
- final YangInstanceIdentifier instanceIdentifier = YangInstanceIdentifier.create(pathArguments);
-
- return instanceIdentifier;
+ return YangInstanceIdentifier.create(pathArguments);
}
/**
* @param pathArgument protocol buffer PathArgument
* @return MD-SAL PathArgument
*/
- private static YangInstanceIdentifier.PathArgument parsePathArgument(NormalizedNodeMessages.PathArgument pathArgument) {
+ private static YangInstanceIdentifier.PathArgument parsePathArgument(
+ NormalizedNodeMessages.PathArgument pathArgument) {
if (YangInstanceIdentifier.NodeWithValue.class.getSimpleName().equals(pathArgument.getType())) {
YangInstanceIdentifier.NodeWithValue nodeWithValue =
}
public interface CanCommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CanCommitTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CanCommitTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CanCommitTransaction)
}
public interface AbortTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.AbortTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.AbortTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.AbortTransaction)
}
public interface CommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
+
+ // required string transactionId = 1;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ boolean hasTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ java.lang.String getTransactionId();
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTransactionIdBytes();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CommitTransaction}
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
+ int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
}
break;
}
+ case 10: {
+ bitField0_ |= 0x00000001;
+ transactionId_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return PARSER;
}
+ private int bitField0_;
+ // required string transactionId = 1;
+ public static final int TRANSACTIONID_FIELD_NUMBER = 1;
+ private java.lang.Object transactionId_;
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ transactionId_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private void initFields() {
+ transactionId_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
+ if (!hasTransactionId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTransactionIdBytes());
+ }
getUnknownFields().writeTo(output);
}
if (size != -1) return size;
size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTransactionIdBytes());
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
public Builder clear() {
super.clear();
+ transactionId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction buildPartial() {
org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction result = new org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.transactionId_ = transactionId_;
+ result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction other) {
if (other == org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages.CommitTransaction.getDefaultInstance()) return this;
+ if (other.hasTransactionId()) {
+ bitField0_ |= 0x00000001;
+ transactionId_ = other.transactionId_;
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
+ if (!hasTransactionId()) {
+
+ return false;
+ }
return true;
}
}
return this;
}
+ private int bitField0_;
+
+ // required string transactionId = 1;
+ private java.lang.Object transactionId_ = "";
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public boolean hasTransactionId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public java.lang.String getTransactionId() {
+ java.lang.Object ref = transactionId_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ transactionId_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTransactionIdBytes() {
+ java.lang.Object ref = transactionId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ transactionId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionId(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder clearTransactionId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ transactionId_ = getDefaultInstance().getTransactionId();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string transactionId = 1;</code>
+ */
+ public Builder setTransactionIdBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ transactionId_ = value;
+ onChanged();
+ return this;
+ }
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CommitTransaction)
}
static {
java.lang.String[] descriptorData = {
"\n\014Cohort.proto\022!org.opendaylight.control" +
- "ler.mdsal\"\026\n\024CanCommitTransaction\".\n\031Can" +
- "CommitTransactionReply\022\021\n\tcanCommit\030\001 \002(" +
- "\010\"\022\n\020AbortTransaction\"\027\n\025AbortTransactio" +
- "nReply\"\023\n\021CommitTransaction\"\030\n\026CommitTra" +
- "nsactionReply\"\026\n\024PreCommitTransaction\"\033\n" +
- "\031PreCommitTransactionReplyBZ\n8org.openda" +
- "ylight.controller.protobuff.messages.coh" +
- "ort3pcB\036ThreePhaseCommitCohortMessages"
+ "ler.mdsal\"-\n\024CanCommitTransaction\022\025\n\rtra" +
+ "nsactionId\030\001 \002(\t\".\n\031CanCommitTransaction" +
+ "Reply\022\021\n\tcanCommit\030\001 \002(\010\")\n\020AbortTransac" +
+ "tion\022\025\n\rtransactionId\030\001 \002(\t\"\027\n\025AbortTran" +
+ "sactionReply\"*\n\021CommitTransaction\022\025\n\rtra" +
+ "nsactionId\030\001 \002(\t\"\030\n\026CommitTransactionRep" +
+ "ly\"\026\n\024PreCommitTransaction\"\033\n\031PreCommitT" +
+ "ransactionReplyBZ\n8org.opendaylight.cont" +
+ "roller.protobuff.messages.cohort3pcB\036Thr",
+ "eePhaseCommitCohortMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
internal_static_org_opendaylight_controller_mdsal_CanCommitTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CanCommitTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_CanCommitTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_org_opendaylight_controller_mdsal_CanCommitTransactionReply_fieldAccessorTable = new
internal_static_org_opendaylight_controller_mdsal_AbortTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_AbortTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_AbortTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_org_opendaylight_controller_mdsal_AbortTransactionReply_fieldAccessorTable = new
internal_static_org_opendaylight_controller_mdsal_CommitTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CommitTransaction_descriptor,
- new java.lang.String[] { });
+ new java.lang.String[] { "TransactionId", });
internal_static_org_opendaylight_controller_mdsal_CommitTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_org_opendaylight_controller_mdsal_CommitTransactionReply_fieldAccessorTable = new
*/
org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.PathArgumentOrBuilder getArgumentsOrBuilder(
int index);
+
+ // repeated string code = 2;
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ java.util.List<java.lang.String>
+ getCodeList();
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ int getCodeCount();
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ java.lang.String getCode(int index);
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ com.google.protobuf.ByteString
+ getCodeBytes(int index);
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.InstanceIdentifier}
arguments_.add(input.readMessage(org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.PathArgument.PARSER, extensionRegistry));
break;
}
+ case 18: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ code_ = new com.google.protobuf.LazyStringArrayList();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ code_.add(input.readBytes());
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
arguments_ = java.util.Collections.unmodifiableList(arguments_);
}
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ code_ = new com.google.protobuf.UnmodifiableLazyStringList(code_);
+ }
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
return arguments_.get(index);
}
+ // repeated string code = 2;
+ public static final int CODE_FIELD_NUMBER = 2;
+ private com.google.protobuf.LazyStringList code_;
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public java.util.List<java.lang.String>
+ getCodeList() {
+ return code_;
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public int getCodeCount() {
+ return code_.size();
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public java.lang.String getCode(int index) {
+ return code_.get(index);
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getCodeBytes(int index) {
+ return code_.getByteString(index);
+ }
+
private void initFields() {
arguments_ = java.util.Collections.emptyList();
+ code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
for (int i = 0; i < arguments_.size(); i++) {
output.writeMessage(1, arguments_.get(i));
}
+ for (int i = 0; i < code_.size(); i++) {
+ output.writeBytes(2, code_.getByteString(i));
+ }
getUnknownFields().writeTo(output);
}
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, arguments_.get(i));
}
+ {
+ int dataSize = 0;
+ for (int i = 0; i < code_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(code_.getByteString(i));
+ }
+ size += dataSize;
+ size += 1 * getCodeList().size();
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
} else {
argumentsBuilder_.clear();
}
+ code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
} else {
result.arguments_ = argumentsBuilder_.build();
}
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ code_ = new com.google.protobuf.UnmodifiableLazyStringList(
+ code_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.code_ = code_;
onBuilt();
return result;
}
}
}
}
+ if (!other.code_.isEmpty()) {
+ if (code_.isEmpty()) {
+ code_ = other.code_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureCodeIsMutable();
+ code_.addAll(other.code_);
+ }
+ onChanged();
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
return argumentsBuilder_;
}
+ // repeated string code = 2;
+ private com.google.protobuf.LazyStringList code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private void ensureCodeIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ code_ = new com.google.protobuf.LazyStringArrayList(code_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public java.util.List<java.lang.String>
+ getCodeList() {
+ return java.util.Collections.unmodifiableList(code_);
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public int getCodeCount() {
+ return code_.size();
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public java.lang.String getCode(int index) {
+ return code_.get(index);
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getCodeBytes(int index) {
+ return code_.getByteString(index);
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public Builder setCode(
+ int index, java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCodeIsMutable();
+ code_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public Builder addCode(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCodeIsMutable();
+ code_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public Builder addAllCode(
+ java.lang.Iterable<java.lang.String> values) {
+ ensureCodeIsMutable();
+ super.addAll(values, code_);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public Builder clearCode() {
+ code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string code = 2;</code>
+ *
+ * <pre>
+ * A list of string codes which can be used for any repeated strings in the path args. This is
+ * optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ * that contains the codes.
+ * </pre>
+ */
+ public Builder addCodeBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCodeIsMutable();
+ code_.add(value);
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.InstanceIdentifier)
}
"controller.mdsal.PathArgumentAttribute\022@" +
"\n\nattributes\030\005 \003(\0132,.org.opendaylight.co" +
"ntroller.mdsal.Attribute\022\017\n\007intType\030\006 \001(" +
- "\005\"X\n\022InstanceIdentifier\022B\n\targuments\030\001 \003" +
+ "\005\"f\n\022InstanceIdentifier\022B\n\targuments\030\001 \003" +
"(\0132/.org.opendaylight.controller.mdsal.P" +
- "athArgument\"\245\003\n\004Node\022\014\n\004path\030\001 \001(\t\022\014\n\004ty" +
- "pe\030\002 \001(\t\022E\n\014pathArgument\030\003 \001(\0132/.org.ope" +
- "ndaylight.controller.mdsal.PathArgument\022" +
- "\017\n\007intType\030\004 \001(\005\022@\n\nattributes\030\005 \003(\0132,.o",
- "rg.opendaylight.controller.mdsal.Attribu" +
- "te\0226\n\005child\030\006 \003(\0132\'.org.opendaylight.con" +
- "troller.mdsal.Node\022\r\n\005value\030\007 \001(\t\022\021\n\tval" +
- "ueType\030\010 \001(\t\022\024\n\014intValueType\030\t \001(\005\022V\n\027in" +
- "stanceIdentifierValue\030\n \001(\01325.org.openda" +
- "ylight.controller.mdsal.InstanceIdentifi" +
- "er\022\021\n\tbitsValue\030\013 \003(\t\022\014\n\004code\030\014 \003(\t\"`\n\tC" +
- "ontainer\022\022\n\nparentPath\030\001 \002(\t\022?\n\016normaliz" +
- "edNode\030\002 \001(\0132\'.org.opendaylight.controll" +
- "er.mdsal.Node\"\246\001\n\014NodeMapEntry\022U\n\026instan",
- "ceIdentifierPath\030\001 \002(\01325.org.opendayligh" +
- "t.controller.mdsal.InstanceIdentifier\022?\n" +
- "\016normalizedNode\030\002 \001(\0132\'.org.opendaylight" +
- ".controller.mdsal.Node\"N\n\007NodeMap\022C\n\nmap" +
- "Entries\030\001 \003(\0132/.org.opendaylight.control" +
- "ler.mdsal.NodeMapEntryBO\n5org.opendaylig" +
- "ht.controller.protobuff.messages.commonB" +
- "\026NormalizedNodeMessages"
+ "athArgument\022\014\n\004code\030\002 \003(\t\"\245\003\n\004Node\022\014\n\004pa" +
+ "th\030\001 \001(\t\022\014\n\004type\030\002 \001(\t\022E\n\014pathArgument\030\003" +
+ " \001(\0132/.org.opendaylight.controller.mdsal" +
+ ".PathArgument\022\017\n\007intType\030\004 \001(\005\022@\n\nattrib",
+ "utes\030\005 \003(\0132,.org.opendaylight.controller" +
+ ".mdsal.Attribute\0226\n\005child\030\006 \003(\0132\'.org.op" +
+ "endaylight.controller.mdsal.Node\022\r\n\005valu" +
+ "e\030\007 \001(\t\022\021\n\tvalueType\030\010 \001(\t\022\024\n\014intValueTy" +
+ "pe\030\t \001(\005\022V\n\027instanceIdentifierValue\030\n \001(" +
+ "\01325.org.opendaylight.controller.mdsal.In" +
+ "stanceIdentifier\022\021\n\tbitsValue\030\013 \003(\t\022\014\n\004c" +
+ "ode\030\014 \003(\t\"`\n\tContainer\022\022\n\nparentPath\030\001 \002" +
+ "(\t\022?\n\016normalizedNode\030\002 \001(\0132\'.org.openday" +
+ "light.controller.mdsal.Node\"\246\001\n\014NodeMapE",
+ "ntry\022U\n\026instanceIdentifierPath\030\001 \002(\01325.o" +
+ "rg.opendaylight.controller.mdsal.Instanc" +
+ "eIdentifier\022?\n\016normalizedNode\030\002 \001(\0132\'.or" +
+ "g.opendaylight.controller.mdsal.Node\"N\n\007" +
+ "NodeMap\022C\n\nmapEntries\030\001 \003(\0132/.org.openda" +
+ "ylight.controller.mdsal.NodeMapEntryBO\n5" +
+ "org.opendaylight.controller.protobuff.me" +
+ "ssages.commonB\026NormalizedNodeMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
internal_static_org_opendaylight_controller_mdsal_InstanceIdentifier_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_InstanceIdentifier_descriptor,
- new java.lang.String[] { "Arguments", });
+ new java.lang.String[] { "Arguments", "Code", });
internal_static_org_opendaylight_controller_mdsal_Node_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_org_opendaylight_controller_mdsal_Node_fieldAccessorTable = new
// @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CloseTransactionChainReply)
}
- public interface CreateTransactionChainOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChain}
- */
- public static final class CreateTransactionChain extends
- com.google.protobuf.GeneratedMessage
- implements CreateTransactionChainOrBuilder {
- // Use CreateTransactionChain.newBuilder() to construct.
- private CreateTransactionChain(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private CreateTransactionChain(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final CreateTransactionChain defaultInstance;
- public static CreateTransactionChain getDefaultInstance() {
- return defaultInstance;
- }
-
- public CreateTransactionChain getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private CreateTransactionChain(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.Builder.class);
- }
-
- public static com.google.protobuf.Parser<CreateTransactionChain> PARSER =
- new com.google.protobuf.AbstractParser<CreateTransactionChain>() {
- public CreateTransactionChain parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new CreateTransactionChain(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<CreateTransactionChain> getParserForType() {
- return PARSER;
- }
-
- private void initFields() {
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChain}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.Builder.class);
- }
-
- // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain getDefaultInstanceForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.getDefaultInstance();
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain build() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain buildPartial() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain(this);
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain) {
- return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain other) {
- if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChain) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransactionChain)
- }
-
- static {
- defaultInstance = new CreateTransactionChain(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CreateTransactionChain)
- }
-
- public interface CreateTransactionChainReplyOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string transactionChainPath = 1;
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- boolean hasTransactionChainPath();
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- java.lang.String getTransactionChainPath();
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- com.google.protobuf.ByteString
- getTransactionChainPathBytes();
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChainReply}
- */
- public static final class CreateTransactionChainReply extends
- com.google.protobuf.GeneratedMessage
- implements CreateTransactionChainReplyOrBuilder {
- // Use CreateTransactionChainReply.newBuilder() to construct.
- private CreateTransactionChainReply(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private CreateTransactionChainReply(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final CreateTransactionChainReply defaultInstance;
- public static CreateTransactionChainReply getDefaultInstance() {
- return defaultInstance;
- }
-
- public CreateTransactionChainReply getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private CreateTransactionChainReply(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- transactionChainPath_ = input.readBytes();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.Builder.class);
- }
-
- public static com.google.protobuf.Parser<CreateTransactionChainReply> PARSER =
- new com.google.protobuf.AbstractParser<CreateTransactionChainReply>() {
- public CreateTransactionChainReply parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new CreateTransactionChainReply(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<CreateTransactionChainReply> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // required string transactionChainPath = 1;
- public static final int TRANSACTIONCHAINPATH_FIELD_NUMBER = 1;
- private java.lang.Object transactionChainPath_;
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public boolean hasTransactionChainPath() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public java.lang.String getTransactionChainPath() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- transactionChainPath_ = s;
- }
- return s;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public com.google.protobuf.ByteString
- getTransactionChainPathBytes() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- transactionChainPath_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- transactionChainPath_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasTransactionChainPath()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getTransactionChainPathBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getTransactionChainPathBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionChainReply}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReplyOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.Builder.class);
- }
-
- // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- transactionChainPath_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply getDefaultInstanceForType() {
- return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.getDefaultInstance();
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply build() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply buildPartial() {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.transactionChainPath_ = transactionChainPath_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply) {
- return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply other) {
- if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply.getDefaultInstance()) return this;
- if (other.hasTransactionChainPath()) {
- bitField0_ |= 0x00000001;
- transactionChainPath_ = other.transactionChainPath_;
- onChanged();
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasTransactionChainPath()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages.CreateTransactionChainReply) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // required string transactionChainPath = 1;
- private java.lang.Object transactionChainPath_ = "";
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public boolean hasTransactionChainPath() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public java.lang.String getTransactionChainPath() {
- java.lang.Object ref = transactionChainPath_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- transactionChainPath_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public com.google.protobuf.ByteString
- getTransactionChainPathBytes() {
- java.lang.Object ref = transactionChainPath_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- transactionChainPath_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder setTransactionChainPath(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- transactionChainPath_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder clearTransactionChainPath() {
- bitField0_ = (bitField0_ & ~0x00000001);
- transactionChainPath_ = getDefaultInstance().getTransactionChainPath();
- onChanged();
- return this;
- }
- /**
- * <code>required string transactionChainPath = 1;</code>
- */
- public Builder setTransactionChainPathBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- transactionChainPath_ = value;
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransactionChainReply)
- }
-
- static {
- defaultInstance = new CreateTransactionChainReply(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.CreateTransactionChainReply)
- }
-
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChain_descriptor;
private static
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChainReply_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
"\n\033ShardTransactionChain.proto\022!org.opend" +
"aylight.controller.mdsal\"3\n\025CloseTransac" +
"tionChain\022\032\n\022transactionChainId\030\001 \001(\t\"\034\n" +
- "\032CloseTransactionChainReply\"\030\n\026CreateTra" +
- "nsactionChain\";\n\033CreateTransactionChainR" +
- "eply\022\034\n\024transactionChainPath\030\001 \002(\tB[\n:or" +
- "g.opendaylight.controller.protobuff.mess" +
- "ages.transactionB\035ShardTransactionChainM" +
- "essages"
+ "\032CloseTransactionChainReplyB[\n:org.opend" +
+ "aylight.controller.protobuff.messages.tr" +
+ "ansactionB\035ShardTransactionChainMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CloseTransactionChainReply_descriptor,
new java.lang.String[] { });
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor =
- getDescriptor().getMessageTypes().get(2);
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChain_descriptor,
- new java.lang.String[] { });
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor =
- getDescriptor().getMessageTypes().get(3);
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_org_opendaylight_controller_mdsal_CreateTransactionChainReply_descriptor,
- new java.lang.String[] { "TransactionChainPath", });
return null;
}
};
message CanCommitTransaction{
-
+ required string transactionId = 1;
}
message CanCommitTransactionReply{
}
message AbortTransaction{
-
+ required string transactionId = 1;
}
message AbortTransactionReply {
}
message CommitTransaction{
-
+ required string transactionId = 1;
}
message CommitTransactionReply{
message InstanceIdentifier {
repeated PathArgument arguments=1;
+
+ // A list of string codes which can be used for any repeated strings in the path args. This is
+ // optional - an InstanceIdentifier may be encoded as part of another message, eg NormalizedNode,
+ // that contains the codes.
+ repeated string code = 2;
}
message Node{
new NormalizedNodeToNodeCodec(schemaContext);
long start = System.currentTimeMillis();
Container container =
- codec.encode(instanceIdentifierFromString(id), output);
+ codec.encode(output);
long end = System.currentTimeMillis();
System.out.println("Timetaken to encode :"+(end-start));
assertNotNull(container);
- assertEquals(id, container.getParentPath() + "/"
- + NormalizedNodeSerializer.deSerialize(container.getNormalizedNode(),
- container.getNormalizedNode().getPathArgument()));
// Decode the normalized node from the ProtocolBuffer form
// first get the node representation of normalized node
start = System.currentTimeMillis();
NormalizedNode<?, ?> normalizedNode =
- codec.decode(instanceIdentifierFromString(id), node);
+ codec.decode(node);
end = System.currentTimeMillis();
System.out.println("Timetaken to decode :"+(end-start));
new NormalizedNodeToNodeCodec(schemaContext);
Container container =
- normalizedNodeToNodeCodec.encode(YangInstanceIdentifier.builder()
- .build(), documentOne);
+ normalizedNodeToNodeCodec.encode(documentOne);
final NormalizedNode<?, ?> decode =
normalizedNodeToNodeCodec
.decode(
- instanceIdentifierFromString("/(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)test"),
container.getNormalizedNode());
assertNotNull(decode);
// let us ensure that the return decode normalized node encode returns same container
Container containerResult =
- normalizedNodeToNodeCodec.encode(YangInstanceIdentifier.builder()
- .build(), decode);
-
- assertEquals(container.getParentPath(), containerResult.getParentPath());
-
- assertEquals(containerResult.getNormalizedNode().getChildCount(),
- container.getNormalizedNode().getChildCount());
+ normalizedNodeToNodeCodec.encode(decode);
// check first level children are proper
List<Node> childrenResult =
NormalizedNodeToNodeCodec codec =
new NormalizedNodeToNodeCodec(schemaContext);
- Container encode = codec.encode(identifier, uno);
+ Container encode = codec.encode(uno);
System.out.println(encode.getNormalizedNode());
- codec.decode(identifier, encode.getNormalizedNode());
+ codec.decode(encode.getNormalizedNode());
}
}
public class PathUtilsTest {
- @Test
- public void getParentPath(){
- assertEquals("", PathUtils.getParentPath("foobar"));
- assertEquals("", PathUtils.getParentPath("/a"));
- assertEquals("/a", PathUtils.getParentPath("/a/b"));
- assertEquals("/a/b", PathUtils.getParentPath("/a/b/c"));
- assertEquals("/a/b", PathUtils.getParentPath("a/b/c"));
- }
-
@Test
public void toStringNodeIdentifier(){
YangInstanceIdentifier.PathArgument pathArgument = nodeIdentifier();
expectedException.expect(NullPointerException.class);
expectedException.expectMessage("pathArgument should not be null");
- PathArgumentSerializer.serialize(mock(
- NormalizedNodeSerializationContext.class), null);
+ PathArgumentSerializer.serialize(mock(QNameSerializationContext.class), null);
}
expectedException.expect(NullPointerException.class);
expectedException.expectMessage("pathArgument should not be null");
- PathArgumentSerializer.deSerialize(mock(NormalizedNodeDeSerializationContext.class), null);
+ PathArgumentSerializer.deSerialize(mock(QNameDeSerializationContext.class), null);
}
@Test
public void testSerializeNodeIdentifier(){
- NormalizedNodeSerializationContext serializationContext =
- mock(NormalizedNodeSerializationContext.class);
+ QNameSerializationContext serializationContext = mock(QNameSerializationContext.class);
when(serializationContext.addLocalName(anyString())).thenReturn(5);
when(serializationContext.addNamespace(any(URI.class))).thenReturn(10);
@Test
public void testSerializeNodeIdentifierWithValue(){
- NormalizedNodeSerializationContext serializationContext =
- mock(NormalizedNodeSerializationContext.class);
+ QNameSerializationContext serializationContext = mock(QNameSerializationContext.class);
when(serializationContext.addLocalName(anyString())).thenReturn(5);
when(serializationContext.addNamespace(any(URI.class))).thenReturn(10);
@Test
public void testSerializeNodeIdentifierWithPredicates(){
- NormalizedNodeSerializationContext serializationContext =
- mock(NormalizedNodeSerializationContext.class);
-
+ QNameSerializationContext serializationContext = mock(QNameSerializationContext.class);
when(serializationContext.addLocalName("test")).thenReturn(5);
when(serializationContext.addLocalName("child-name")).thenReturn(55);
@Test
public void testSerializeAugmentationIdentifier(){
- NormalizedNodeSerializationContext serializationContext =
- mock(NormalizedNodeSerializationContext.class);
+ QNameSerializationContext serializationContext = mock(QNameSerializationContext.class);
when(serializationContext.addLocalName(anyString())).thenReturn(55);
when(serializationContext.addNamespace(any(URI.class))).thenReturn(66);
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
+import org.mockito.Mockito;
import org.opendaylight.controller.cluster.datastore.util.TestModel;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Set;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public void testSerializeShort(){
short v1 = 5;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.SHORT_TYPE.ordinal(), builder.getIntValueType());
assertEquals("5", builder.getValue());
- NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
+ NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 =
+ NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.SHORT_TYPE.ordinal(), builder1.getType());
assertEquals("5", builder.getValue());
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), expected);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), expected);
assertEquals(ValueType.INT_TYPE.ordinal(), builder.getIntValueType());
assertEquals("243", builder.getValue());
- NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
+ NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 =
+ NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), expected);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), expected);
assertEquals(ValueType.INT_TYPE.ordinal(), builder1.getType());
assertEquals("243", builder1.getValue());
public void testSerializeLong(){
long v1 = 5;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.LONG_TYPE.ordinal(), builder.getIntValueType());
assertEquals("5", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.LONG_TYPE.ordinal(), builder1.getType());
assertEquals("5", builder1.getValue());
public void testSerializeByte(){
byte v1 = 5;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BYTE_TYPE.ordinal(), builder.getIntValueType());
assertEquals("5", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BYTE_TYPE.ordinal(), builder1.getType());
assertEquals("5", builder1.getValue());
@Test
public void testSerializeBits(){
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class),
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class),
ImmutableSet.of("foo", "bar"));
assertEquals(ValueType.BITS_TYPE.ordinal(), builder.getIntValueType());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class),
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class),
ImmutableSet.of("foo", "bar"));
assertEquals(ValueType.BITS_TYPE.ordinal(), builder1.getType());
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Expected value type to be Bits but was :");
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class),
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class),
ImmutableSet.of(1, 2));
}
@Test
public void testSerializeEmptyString(){
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class),"");
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class),"");
assertEquals(ValueType.STRING_TYPE.ordinal(), builder.getIntValueType());
assertEquals("", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class),"");
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class),"");
assertEquals(ValueType.STRING_TYPE.ordinal(), builder1.getType());
assertEquals("", builder1.getValue());
@Test
public void testSerializeString(){
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class),"foo");
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class),"foo");
assertEquals(ValueType.STRING_TYPE.ordinal(), builder.getIntValueType());
assertEquals("foo", builder.getValue());
- NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
+ NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 =
+ NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class),"foo");
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class),"foo");
assertEquals(ValueType.STRING_TYPE.ordinal(), builder1.getType());
assertEquals("foo", builder1.getValue());
public void testSerializeBoolean(){
boolean v1 = true;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BOOL_TYPE.ordinal(), builder.getIntValueType());
assertEquals("true", builder.getValue());
- NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 =
+ NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BOOL_TYPE.ordinal(), builder1.getType());
assertEquals("true", builder1.getValue());
public void testSerializeQName(){
QName v1 = TestModel.TEST_QNAME;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.QNAME_TYPE.ordinal(), builder.getIntValueType());
assertEquals("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)test", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.QNAME_TYPE.ordinal(), builder1.getType());
assertEquals("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)test", builder1.getValue());
YangInstanceIdentifier v1 = TestModel.TEST_PATH;
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
-
+ QNameSerializationContext mockContext = mock(QNameSerializationContext.class);
+ ValueSerializer.serialize(builder, mockContext, v1);
assertEquals(ValueType.YANG_IDENTIFIER_TYPE.ordinal(), builder.getIntValueType());
NormalizedNodeMessages.InstanceIdentifier serializedYangInstanceIdentifier =
builder.getInstanceIdentifierValue();
assertEquals(1, serializedYangInstanceIdentifier.getArgumentsCount());
- assertEquals(TestModel.TEST_QNAME.toString(), serializedYangInstanceIdentifier.getArguments(0).getNodeType().getValue());
+ Mockito.verify(mockContext).addLocalName(TestModel.TEST_QNAME.getLocalName());
+ Mockito.verify(mockContext).addNamespace(TestModel.TEST_QNAME.getNamespace());
}
@Test
public void testSerializeBigInteger(){
BigInteger v1 = new BigInteger("1000000000000000000000000");
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BIG_INTEGER_TYPE.ordinal(), builder.getIntValueType());
assertEquals("1000000000000000000000000", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BIG_INTEGER_TYPE.ordinal(), builder1.getType());
assertEquals("1000000000000000000000000", builder1.getValue());
public void testSerializeBigDecimal(){
BigDecimal v1 = new BigDecimal("1000000000000000000000000.51616");
NormalizedNodeMessages.Node.Builder builder = NormalizedNodeMessages.Node.newBuilder();
- ValueSerializer.serialize(builder, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BIG_DECIMAL_TYPE.ordinal(), builder.getIntValueType());
assertEquals("1000000000000000000000000.51616", builder.getValue());
NormalizedNodeMessages.PathArgumentAttribute.Builder builder1 = NormalizedNodeMessages.PathArgumentAttribute.newBuilder();
- ValueSerializer.serialize(builder1, mock(
- NormalizedNodeSerializationContext.class), v1);
+ ValueSerializer.serialize(builder1, mock(QNameSerializationContext.class), v1);
assertEquals(ValueType.BIG_DECIMAL_TYPE.ordinal(), builder1.getType());
assertEquals("1000000000000000000000000.51616", builder1.getValue());
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Short);
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Byte);
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Integer);
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Long);
nodeBuilder.setValue("false");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Boolean);
nodeBuilder.setValue(TestModel.TEST_QNAME.toString());
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof QName);
nodeBuilder.addAllBitsValue(ImmutableList.of("foo", "bar"));
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof Set);
NormalizedNodeMessages.InstanceIdentifier.Builder idBuilder = NormalizedNodeMessages.InstanceIdentifier.newBuilder();
NormalizedNodeMessages.PathArgument.Builder pathBuilder = NormalizedNodeMessages.PathArgument.newBuilder();
- pathBuilder.setValue(TestModel.TEST_QNAME.toString());
pathBuilder.setIntType(PathArgumentType.NODE_IDENTIFIER.ordinal());
idBuilder.addArguments(pathBuilder);
nodeBuilder.setIntValueType(ValueType.YANG_IDENTIFIER_TYPE.ordinal());
nodeBuilder.setInstanceIdentifierValue(idBuilder);
- Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
- nodeBuilder.build());
+ QNameDeSerializationContext mockContext = mock(QNameDeSerializationContext.class);
+ Mockito.doReturn(TestModel.TEST_QNAME.getNamespace().toString()).when(mockContext).
+ getNamespace(Mockito.anyInt());
+ Mockito.doReturn(TestModel.TEST_QNAME.getLocalName()).when(mockContext).
+ getLocalName(Mockito.anyInt());
+ Mockito.doReturn(TestModel.TEST_QNAME.getFormattedRevision()).when(mockContext).
+ getRevision(Mockito.anyInt());
+
+ Object o = ValueSerializer.deSerialize(mockContext, nodeBuilder.build());
assertTrue(o instanceof YangInstanceIdentifier);
assertEquals(TestModel.TEST_PATH, o);
nodeBuilder.setIntValueType(ValueType.STRING_TYPE.ordinal());
nodeBuilder.setValue("25");
- Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ Object o = ValueSerializer.deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof String);
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof BigInteger);
nodeBuilder.setValue("25");
Object o = ValueSerializer
- .deSerialize(mock(NormalizedNodeDeSerializationContext.class),
+ .deSerialize(mock(QNameDeSerializationContext.class),
nodeBuilder.build());
assertTrue(o instanceof BigDecimal);
import org.junit.Assert;
import org.junit.Test;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameDeSerializationContext;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameDeSerializationContextImpl;
+import org.opendaylight.controller.cluster.datastore.node.utils.serialization.QNameSerializationContextImpl;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
public class InstanceIdentifierUtilsTest {
- private static QName TEST_QNAME =
- QName
- .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)test");
- private static QName NODE_WITH_VALUE_QNAME =
- QName
- .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)value");
- private static QName NODE_WITH_PREDICATES_QNAME =
- QName
- .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)pred");
- private static QName NAME_QNAME =
- QName
- .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)name");
-
- @Test
- public void testSerializationOfNodeIdentifier() {
- YangInstanceIdentifier.PathArgument p1 =
- new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
-
- List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
-
- arguments.add(p1);
+ private static QName TEST_QNAME = QName
+ .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)test");
+ private static QName NODE_WITH_VALUE_QNAME = QName
+ .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)value");
+ private static QName NODE_WITH_PREDICATES_QNAME = QName
+ .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)pred");
+ private static QName NAME_QNAME = QName
+ .create("(urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:store:test?revision=2014-03-13)name");
- YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
+ @Test
+ public void testSerializationOfNodeIdentifier() {
+ YangInstanceIdentifier.PathArgument p1 = new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
- NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
- InstanceIdentifierUtils.toSerializable(expected);
+ List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
- YangInstanceIdentifier actual =
- InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
+ arguments.add(p1);
+ YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
- Assert.assertEquals(expected.getLastPathArgument(),
- actual.getLastPathArgument());
+ NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
+ InstanceIdentifierUtils.toSerializable(expected);
+ YangInstanceIdentifier actual = InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
- }
+ Assert.assertEquals(expected.getLastPathArgument(), actual.getLastPathArgument());
+ }
- @Test
- public void testSerializationOfNodeWithValue() {
+ @Test
+ public void testSerializationOfNodeWithValue() {
- withValue((short) 1);
- withValue((long) 2);
- withValue(3);
- withValue(true);
+ withValue((short) 1);
+ withValue((long) 2);
+ withValue(3);
+ withValue(true);
- }
+ }
- private void withValue(Object value) {
- YangInstanceIdentifier.PathArgument p1 =
- new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
+ private void withValue(Object value) {
+ YangInstanceIdentifier.PathArgument p1 = new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
- YangInstanceIdentifier.PathArgument p2 =
- new YangInstanceIdentifier.NodeWithValue(NODE_WITH_VALUE_QNAME, value);
+ YangInstanceIdentifier.PathArgument p2 =
+ new YangInstanceIdentifier.NodeWithValue(NODE_WITH_VALUE_QNAME, value);
+ List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
- List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
+ arguments.add(p1);
+ arguments.add(p2);
- arguments.add(p1);
- arguments.add(p2);
+ YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
- YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
+ NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
+ InstanceIdentifierUtils.toSerializable(expected);
- NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
- InstanceIdentifierUtils.toSerializable(expected);
+ YangInstanceIdentifier actual = InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
- YangInstanceIdentifier actual =
- InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
+ Assert.assertEquals(expected.getLastPathArgument(), actual.getLastPathArgument());
+ }
+ @Test
+ public void testSerializationOfNodeIdentifierWithPredicates() {
- Assert.assertEquals(expected.getLastPathArgument(),
- actual.getLastPathArgument());
- }
+ withPredicates((short) 1);
+ withPredicates((long) 2);
+ withPredicates(3);
+ withPredicates(true);
+ }
- @Test
- public void testSerializationOfNodeIdentifierWithPredicates() {
+ private void withPredicates(Object value) {
+ YangInstanceIdentifier.PathArgument p1 = new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
- withPredicates((short) 1);
- withPredicates((long) 2);
- withPredicates(3);
- withPredicates(true);
+ YangInstanceIdentifier.PathArgument p2 = new YangInstanceIdentifier.NodeIdentifierWithPredicates(
+ NODE_WITH_PREDICATES_QNAME, NAME_QNAME, value);
- }
+ List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
- private void withPredicates(Object value) {
- YangInstanceIdentifier.PathArgument p1 =
- new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME);
+ arguments.add(p1);
+ arguments.add(p2);
- YangInstanceIdentifier.PathArgument p2 =
- new YangInstanceIdentifier.NodeIdentifierWithPredicates(
- NODE_WITH_PREDICATES_QNAME, NAME_QNAME, value);
+ YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
+ NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
+ InstanceIdentifierUtils.toSerializable(expected);
- List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
+ YangInstanceIdentifier actual = InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
- arguments.add(p1);
- arguments.add(p2);
+ Assert.assertEquals(expected.getLastPathArgument(), actual.getLastPathArgument());
+ }
- YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
+ @Test
+ public void testAugmentationIdentifier() {
+ YangInstanceIdentifier.PathArgument p1 = new YangInstanceIdentifier.AugmentationIdentifier(new HashSet(
+ Arrays.asList(TEST_QNAME)));
- NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
- InstanceIdentifierUtils.toSerializable(expected);
+ List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
- YangInstanceIdentifier actual =
- InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
+ arguments.add(p1);
+ YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
- Assert.assertEquals(expected.getLastPathArgument(),
- actual.getLastPathArgument());
- }
+ NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
+ InstanceIdentifierUtils.toSerializable(expected);
- @Test
- public void testAugmentationIdentifier() {
- YangInstanceIdentifier.PathArgument p1 =
- new YangInstanceIdentifier.AugmentationIdentifier(new HashSet(
- Arrays.asList(TEST_QNAME)));
+ YangInstanceIdentifier actual = InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
- List<YangInstanceIdentifier.PathArgument> arguments = new ArrayList<>();
+ Assert.assertEquals(expected.getLastPathArgument(), actual.getLastPathArgument());
- arguments.add(p1);
+ }
- YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
+ @Test
+ public void testSerializationWithContext() {
+ List<YangInstanceIdentifier.PathArgument> arguments =
+ Arrays.<YangInstanceIdentifier.PathArgument>asList(
+ new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME),
+ new YangInstanceIdentifier.NodeWithValue(NODE_WITH_VALUE_QNAME, 1),
+ new YangInstanceIdentifier.NodeIdentifierWithPredicates(
+ NODE_WITH_PREDICATES_QNAME, NAME_QNAME, 2));
- NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
- InstanceIdentifierUtils.toSerializable(expected);
+ YangInstanceIdentifier expected = YangInstanceIdentifier.create(arguments);
- YangInstanceIdentifier actual =
- InstanceIdentifierUtils.fromSerializable(instanceIdentifier);
+ QNameSerializationContextImpl serializationContext = new QNameSerializationContextImpl();
+ NormalizedNodeMessages.InstanceIdentifier instanceIdentifier =
+ InstanceIdentifierUtils.toSerializable(expected, serializationContext);
- Assert.assertEquals(expected.getLastPathArgument(),
- actual.getLastPathArgument());
+ QNameDeSerializationContext deserializationContext = new QNameDeSerializationContextImpl(
+ serializationContext.getCodes());
- }
+ YangInstanceIdentifier actual = InstanceIdentifierUtils.fromSerializable(
+ instanceIdentifier, deserializationContext);
+ Assert.assertEquals(expected.getLastPathArgument(), actual.getLastPathArgument());
+ }
}
package org.opendaylight.controller.protobuff.messages.transaction;
-import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.protobuff.messages.AbstractMessagesTest;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.common.QName;
/**
* This test case is present to ensure that if others have used proper version of protocol buffer
@Override
@Test
public void verifySerialization() throws Exception {
- String testTransactionChainPath =
- "/actor/path";
-
- ShardTransactionChainMessages.CreateTransactionChainReply.Builder builder =
- ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder();
- builder.setTransactionChainPath(testTransactionChainPath);
-
- writeToFile((com.google.protobuf.GeneratedMessage.Builder<?>) builder);
-
- // Here we will read the same and check we got back what we had saved
- ShardTransactionChainMessages.CreateTransactionChainReply replyNew =
- (ShardTransactionChainMessages.CreateTransactionChainReply) readFromFile(ShardTransactionChainMessages.CreateTransactionChainReply.PARSER);
-
- Assert.assertEquals(replyNew.getTransactionChainPath(),testTransactionChainPath);
-
- // the following will compare with the version we had shipped
- ShardTransactionChainMessages.CreateTransactionChainReply replyOriginal =
- (ShardTransactionChainMessages.CreateTransactionChainReply) readFromTestDataFile(ShardTransactionChainMessages.CreateTransactionChainReply.PARSER);
-
-
- Assert.assertEquals(replyOriginal.getTransactionChainPath(),
- replyNew.getTransactionChainPath());
-
}
@Override
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-clustering-config</artifactId>
<description>Configuration files for md-sal clustering</description>
<type xmlns:operational-dom-store-spi="urn:opendaylight:params:xml:ns:yang:controller:md:sal:core:spi:operational-dom-store">operational-dom-store-spi:operational-dom-datastore</type>
<name>distributed-operational-store-service</name>
</operational-data-store>
+
+ <allow-concurrent-commits>true</allow-concurrent-commits>
</module>
<module>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-common-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-common-impl</artifactId>
<packaging>bundle</packaging>
static final Short OUTER_LIST_ID = (short) 10;
- static final YangInstanceIdentifier OUTER_LIST_PATH_LEGACY = YangInstanceIdentifier.builder(TEST_QNAME)
+ static final YangInstanceIdentifier OUTER_LIST_PATH_LEGACY = YangInstanceIdentifier.builder(TEST_PATH)
.nodeWithKey(OUTER_LIST_QNAME, ID_QNAME, OUTER_LIST_ID).build();
static final YangInstanceIdentifier LEAF_TWO_PATH_LEGACY = YangInstanceIdentifier.builder(OUTER_LIST_PATH_LEGACY)
.withNodeIdentifier(new NodeIdentifier(TEST_QNAME)).withChild(testAnyXmlNode).build();
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
DataNormalizer normalizer = new DataNormalizer(createTestContext());
- Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder(TEST_QNAME).build(), testContainerNode);
+ Node<?> legacyNode = normalizer.toLegacy(YangInstanceIdentifier.builder().node(TEST_QNAME).build(), testContainerNode);
verifyLegacyNode(
legacyNode,
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-common-util</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-common</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-connector-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-distributed-datastore</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-inmemory-datastore</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-clustering-commons</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-akka-raft</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
import akka.actor.Props;
import akka.japi.Creator;
-
import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.cluster.datastore.messages.DataChangedReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
public class DataChangeListener extends AbstractUntypedActor {
private final AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> listener;
- private volatile boolean notificationsEnabled = false;
+ private boolean notificationsEnabled = false;
public DataChangeListener(AsyncDataChangeListener<YangInstanceIdentifier,
NormalizedNode<?, ?>> listener) {
change = reply.getChange();
this.listener.onDataChanged(change);
- if(getSender() != null){
+ // It seems the sender is never null but it doesn't hurt to check. If the caller passes in
+ // a null sender (ActorRef.noSender()), akka translates that to the deadLetters actor.
+ if(getSender() != null && !getContext().system().deadLetters().equals(getSender())) {
getSender().tell(new DataChangedReply(), getSelf());
}
}
package org.opendaylight.controller.cluster.datastore;
+import akka.actor.ActorRef;
import akka.actor.ActorSelection;
-
import com.google.common.base.Preconditions;
-
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
@Override public void onDataChanged(
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- dataChangeListenerActor.tell(new DataChanged(schemaContext, change), null);
+ dataChangeListenerActor.tell(new DataChanged(schemaContext, change), ActorRef.noSender());
}
}
import org.opendaylight.controller.cluster.raft.ConfigParams;
import org.opendaylight.controller.cluster.raft.DefaultConfigParamsImpl;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
-
import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
-
import java.util.concurrent.TimeUnit;
/**
private final int operationTimeoutInSeconds;
private final String dataStoreMXBeanType;
private final ConfigParams shardRaftConfig;
+ private final int shardTransactionCommitTimeoutInSeconds;
+ private final int shardTransactionCommitQueueCapacity;
- public DatastoreContext() {
- this("DistributedDatastore", null, Duration.create(10, TimeUnit.MINUTES), 5, 1000, 20000, 500);
- }
-
- public DatastoreContext(String dataStoreMXBeanType,
- InMemoryDOMDataStoreConfigProperties dataStoreProperties,
- Duration shardTransactionIdleTimeout,
- int operationTimeoutInSeconds,
- int shardJournalRecoveryLogBatchSize,
- int shardSnapshotBatchCount,
- int shardHeartbeatIntervalInMillis) {
- this.dataStoreMXBeanType = dataStoreMXBeanType;
+ private DatastoreContext(InMemoryDOMDataStoreConfigProperties dataStoreProperties,
+ ConfigParams shardRaftConfig, String dataStoreMXBeanType, int operationTimeoutInSeconds,
+ Duration shardTransactionIdleTimeout, int shardTransactionCommitTimeoutInSeconds,
+ int shardTransactionCommitQueueCapacity) {
this.dataStoreProperties = dataStoreProperties;
- this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ this.shardRaftConfig = shardRaftConfig;
+ this.dataStoreMXBeanType = dataStoreMXBeanType;
this.operationTimeoutInSeconds = operationTimeoutInSeconds;
+ this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ this.shardTransactionCommitTimeoutInSeconds = shardTransactionCommitTimeoutInSeconds;
+ this.shardTransactionCommitQueueCapacity = shardTransactionCommitQueueCapacity;
+ }
- DefaultConfigParamsImpl raftConfig = new DefaultConfigParamsImpl();
- raftConfig.setHeartBeatInterval(new FiniteDuration(shardHeartbeatIntervalInMillis,
- TimeUnit.MILLISECONDS));
- raftConfig.setJournalRecoveryLogBatchSize(shardJournalRecoveryLogBatchSize);
- raftConfig.setSnapshotBatchCount(shardSnapshotBatchCount);
- shardRaftConfig = raftConfig;
+ public static Builder newBuilder() {
+ return new Builder();
}
public InMemoryDOMDataStoreConfigProperties getDataStoreProperties() {
public ConfigParams getShardRaftConfig() {
return shardRaftConfig;
}
+
+ public int getShardTransactionCommitTimeoutInSeconds() {
+ return shardTransactionCommitTimeoutInSeconds;
+ }
+
+ public int getShardTransactionCommitQueueCapacity() {
+ return shardTransactionCommitQueueCapacity;
+ }
+
+ public static class Builder {
+ private InMemoryDOMDataStoreConfigProperties dataStoreProperties;
+ private Duration shardTransactionIdleTimeout = Duration.create(10, TimeUnit.MINUTES);
+ private int operationTimeoutInSeconds = 5;
+ private String dataStoreMXBeanType;
+ private int shardTransactionCommitTimeoutInSeconds = 30;
+ private int shardJournalRecoveryLogBatchSize = 1000;
+ private int shardSnapshotBatchCount = 20000;
+ private int shardHeartbeatIntervalInMillis = 500;
+ private int shardTransactionCommitQueueCapacity = 20000;
+
+ public Builder shardTransactionIdleTimeout(Duration shardTransactionIdleTimeout) {
+ this.shardTransactionIdleTimeout = shardTransactionIdleTimeout;
+ return this;
+ }
+
+ public Builder operationTimeoutInSeconds(int operationTimeoutInSeconds) {
+ this.operationTimeoutInSeconds = operationTimeoutInSeconds;
+ return this;
+ }
+
+ public Builder dataStoreMXBeanType(String dataStoreMXBeanType) {
+ this.dataStoreMXBeanType = dataStoreMXBeanType;
+ return this;
+ }
+
+ public Builder dataStoreProperties(InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
+ this.dataStoreProperties = dataStoreProperties;
+ return this;
+ }
+
+ public Builder shardTransactionCommitTimeoutInSeconds(int shardTransactionCommitTimeoutInSeconds) {
+ this.shardTransactionCommitTimeoutInSeconds = shardTransactionCommitTimeoutInSeconds;
+ return this;
+ }
+
+ public Builder shardJournalRecoveryLogBatchSize(int shardJournalRecoveryLogBatchSize) {
+ this.shardJournalRecoveryLogBatchSize = shardJournalRecoveryLogBatchSize;
+ return this;
+ }
+
+ public Builder shardSnapshotBatchCount(int shardSnapshotBatchCount) {
+ this.shardSnapshotBatchCount = shardSnapshotBatchCount;
+ return this;
+ }
+
+ public Builder shardHeartbeatIntervalInMillis(int shardHeartbeatIntervalInMillis) {
+ this.shardHeartbeatIntervalInMillis = shardHeartbeatIntervalInMillis;
+ return this;
+ }
+
+ public Builder shardTransactionCommitQueueCapacity(int shardTransactionCommitQueueCapacity) {
+ this.shardTransactionCommitQueueCapacity = shardTransactionCommitQueueCapacity;
+ return this;
+ }
+
+ public DatastoreContext build() {
+ DefaultConfigParamsImpl raftConfig = new DefaultConfigParamsImpl();
+ raftConfig.setHeartBeatInterval(new FiniteDuration(shardHeartbeatIntervalInMillis,
+ TimeUnit.MILLISECONDS));
+ raftConfig.setJournalRecoveryLogBatchSize(shardJournalRecoveryLogBatchSize);
+ raftConfig.setSnapshotBatchCount(shardSnapshotBatchCount);
+
+ return new DatastoreContext(dataStoreProperties, raftConfig, dataStoreMXBeanType,
+ operationTimeoutInSeconds, shardTransactionIdleTimeout,
+ shardTransactionCommitTimeoutInSeconds, shardTransactionCommitQueueCapacity);
+ }
+ }
}
import akka.actor.ActorSystem;
import akka.dispatch.OnComplete;
import akka.util.Timeout;
+import com.google.common.base.Optional;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
Preconditions.checkNotNull(path, "path should not be null");
Preconditions.checkNotNull(listener, "listener should not be null");
- if(LOG.isDebugEnabled()) {
- LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
- }
- ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
- DataChangeListener.props(listener ));
+
+ LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
- Future future = actorContext.executeLocalShardOperationAsync(shardName,
- new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
- new Timeout(actorContext.getOperationDuration().$times(
- REGISTER_DATA_CHANGE_LISTENER_TIMEOUT_FACTOR)));
+ Optional<ActorRef> shard = actorContext.findLocalShard(shardName);
+
+ //if shard is NOT local
+ if (!shard.isPresent()) {
+ LOG.debug("No local shard for shardName {} was found so returning a noop registration", shardName);
+ return new NoOpDataChangeListenerRegistration(listener);
+ }
+ //if shard is local
+ ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(DataChangeListener.props(listener));
+ Future future = actorContext.executeOperationAsync(shard.get(),
+ new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+ new Timeout(actorContext.getOperationDuration().$times(REGISTER_DATA_CHANGE_LISTENER_TIMEOUT_FACTOR)));
- if (future != null) {
- final DataChangeListenerRegistrationProxy listenerRegistrationProxy =
+ final DataChangeListenerRegistrationProxy listenerRegistrationProxy =
new DataChangeListenerRegistrationProxy(listener, dataChangeListenerActor);
- future.onComplete(new OnComplete(){
+ future.onComplete(new OnComplete() {
- @Override public void onComplete(Throwable failure, Object result)
+ @Override
+ public void onComplete(Throwable failure, Object result)
throws Throwable {
- if(failure != null){
- LOG.error("Failed to register listener at path " + path.toString(), failure);
- return;
- }
- RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
- listenerRegistrationProxy.setListenerRegistrationActor(actorContext
- .actorSelection(reply.getListenerRegistrationPath()));
+ if (failure != null) {
+ LOG.error("Failed to register listener at path " + path.toString(), failure);
+ return;
}
- }, actorContext.getActorSystem().dispatcher());
- return listenerRegistrationProxy;
- }
- if(LOG.isDebugEnabled()) {
- LOG.debug(
- "No local shard for shardName {} was found so returning a noop registration",
- shardName);
- }
- return new NoOpDataChangeListenerRegistration(listener);
+ RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
+ listenerRegistrationProxy.setListenerRegistrationActor(actorContext
+ .actorSelection(reply.getListenerRegistrationPath()));
+ }
+ }, actorContext.getActorSystem().dispatcher());
+
+ return listenerRegistrationProxy;
+
}
@Override
public void close() throws Exception {
actorContext.shutdown();
}
+
+ @VisibleForTesting
+ ActorContext getActorContext() {
+ return actorContext;
+ }
}
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
+import akka.actor.Cancellable;
import akka.actor.PoisonPill;
import akka.actor.Props;
import akka.event.Logging;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.protobuf.InvalidProtocolBufferException;
import org.opendaylight.controller.cluster.common.actor.CommonConfig;
import org.opendaylight.controller.cluster.common.actor.MeteringBehavior;
+import org.opendaylight.controller.cluster.datastore.ShardCommitCoordinator.CohortEntry;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardTransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardMBeanFactory;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionChain;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
import org.opendaylight.controller.cluster.raft.protobuff.client.messages.CompositeModificationPayload;
import org.opendaylight.controller.cluster.raft.protobuff.client.messages.Payload;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionFactory;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import scala.concurrent.duration.Duration;
+import scala.concurrent.duration.FiniteDuration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import javax.annotation.Nonnull;
/**
* A Shard represents a portion of the logical data tree <br/>
*/
public class Shard extends RaftActor {
+ private static final Object COMMIT_TRANSACTION_REPLY = new CommitTransactionReply().toSerializable();
+
+ private static final Object TX_COMMIT_TIMEOUT_CHECK_MESSAGE = "txCommitTimeoutCheck";
+
public static final String DEFAULT_NAME = "default";
// The state of this Shard
private final InMemoryDOMDataStore store;
- private final Map<Object, DOMStoreThreePhaseCommitCohort>
- modificationToCohort = new HashMap<>();
-
private final LoggingAdapter LOG =
Logging.getLogger(getContext().system(), this);
private ActorRef createSnapshotTransaction;
+ private int createSnapshotTransactionCounter;
+
+ private final ShardCommitCoordinator commitCoordinator;
+
+ private final long transactionCommitTimeout;
+
+ private Cancellable txCommitTimeoutCheckSchedule;
+
/**
* Coordinates persistence recovery on startup.
*/
if (isMetricsCaptureEnabled()) {
getContext().become(new MeteringBehavior(this));
}
+
+ commitCoordinator = new ShardCommitCoordinator(TimeUnit.SECONDS.convert(1, TimeUnit.MINUTES),
+ datastoreContext.getShardTransactionCommitQueueCapacity());
+
+ transactionCommitTimeout = TimeUnit.MILLISECONDS.convert(
+ datastoreContext.getShardTransactionCommitTimeoutInSeconds(), TimeUnit.SECONDS);
}
private static Map<String, String> mapPeerAddresses(
return Props.create(new ShardCreator(name, peerAddresses, datastoreContext, schemaContext));
}
- @Override public void onReceiveRecover(Object message) {
+ @Override
+ public void postStop() {
+ super.postStop();
+
+ if(txCommitTimeoutCheckSchedule != null) {
+ txCommitTimeoutCheckSchedule.cancel();
+ }
+ }
+
+ @Override
+ public void onReceiveRecover(Object message) {
if(LOG.isDebugEnabled()) {
LOG.debug("onReceiveRecover: Received message {} from {}",
message.getClass().toString(),
}
}
- @Override public void onReceiveCommand(Object message) {
+ @Override
+ public void onReceiveCommand(Object message) {
if(LOG.isDebugEnabled()) {
- LOG.debug("onReceiveCommand: Received message {} from {}",
- message.getClass().toString(),
- getSender());
+ LOG.debug("onReceiveCommand: Received message {} from {}", message, getSender());
}
if(message.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
- // This must be for install snapshot. Don't want to open this up and trigger
- // deSerialization
- self()
- .tell(new CaptureSnapshotReply(ReadDataReply.getNormalizedNodeByteString(message)),
- self());
-
- createSnapshotTransaction = null;
- // Send a PoisonPill instead of sending close transaction because we do not really need
- // a response
- getSender().tell(PoisonPill.getInstance(), self());
-
+ handleReadDataReply(message);
+ } else if (message.getClass().equals(CreateTransaction.SERIALIZABLE_CLASS)) {
+ handleCreateTransaction(message);
+ } else if(message instanceof ForwardedReadyTransaction) {
+ handleForwardedReadyTransaction((ForwardedReadyTransaction)message);
+ } else if(message.getClass().equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
+ handleCanCommitTransaction(CanCommitTransaction.fromSerializable(message));
+ } else if(message.getClass().equals(CommitTransaction.SERIALIZABLE_CLASS)) {
+ handleCommitTransaction(CommitTransaction.fromSerializable(message));
+ } else if(message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) {
+ handleAbortTransaction(AbortTransaction.fromSerializable(message));
} else if (message.getClass().equals(CloseTransactionChain.SERIALIZABLE_CLASS)){
closeTransactionChain(CloseTransactionChain.fromSerializable(message));
} else if (message instanceof RegisterChangeListener) {
registerChangeListener((RegisterChangeListener) message);
} else if (message instanceof UpdateSchemaContext) {
updateSchemaContext((UpdateSchemaContext) message);
- } else if (message instanceof ForwardedCommitTransaction) {
- handleForwardedCommit((ForwardedCommitTransaction) message);
- } else if (message.getClass()
- .equals(CreateTransaction.SERIALIZABLE_CLASS)) {
- if (isLeader()) {
- createTransaction(CreateTransaction.fromSerializable(message));
- } else if (getLeader() != null) {
- getLeader().forward(message, getContext());
- } else {
- getSender().tell(new akka.actor.Status.Failure(new IllegalStateException(
- "Could not find leader so transaction cannot be created")), getSelf());
- }
} else if (message instanceof PeerAddressResolved) {
PeerAddressResolved resolved = (PeerAddressResolved) message;
setPeerAddress(resolved.getPeerId().toString(),
resolved.getPeerAddress());
+ } else if(message.equals(TX_COMMIT_TIMEOUT_CHECK_MESSAGE)) {
+ handleTransactionCommitTimeoutCheck();
} else {
super.onReceiveCommand(message);
}
}
+ private void handleTransactionCommitTimeoutCheck() {
+ CohortEntry cohortEntry = commitCoordinator.getCurrentCohortEntry();
+ if(cohortEntry != null) {
+ long elapsed = System.currentTimeMillis() - cohortEntry.getLastAccessTime();
+ if(elapsed > transactionCommitTimeout) {
+ LOG.warning("Current transaction {} has timed out after {} ms - aborting",
+ cohortEntry.getTransactionID(), transactionCommitTimeout);
+
+ doAbortTransaction(cohortEntry.getTransactionID(), null);
+ }
+ }
+ }
+
+ private void handleCommitTransaction(CommitTransaction commit) {
+ final String transactionID = commit.getTransactionID();
+
+ LOG.debug("Committing transaction {}", transactionID);
+
+ // Get the current in-progress cohort entry in the commitCoordinator if it corresponds to
+ // this transaction.
+ final CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry == null) {
+ // We're not the current Tx - the Tx was likely expired b/c it took too long in
+ // between the canCommit and commit messages.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("Cannot commit transaction %s - it is not the current transaction",
+ transactionID));
+ LOG.error(ex.getMessage());
+ shardMBean.incrementFailedTransactionsCount();
+ getSender().tell(new akka.actor.Status.Failure(ex), getSelf());
+ return;
+ }
+
+ // We perform the preCommit phase here atomically with the commit phase. This is an
+ // optimization to eliminate the overhead of an extra preCommit message. We lose front-end
+ // coordination of preCommit across shards in case of failure but preCommit should not
+ // normally fail since we ensure only one concurrent 3-phase commit.
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ cohortEntry.getCohort().preCommit().get();
+
+ if(persistent) {
+ Shard.this.persistData(getSender(), transactionID,
+ new CompositeModificationPayload(cohortEntry.getModification().toSerializable()));
+ } else {
+ Shard.this.finishCommit(getSender(), transactionID);
+ }
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.error(e, "An exception occurred while preCommitting transaction {}",
+ cohortEntry.getTransactionID());
+ shardMBean.incrementFailedTransactionsCount();
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+
+ cohortEntry.updateLastAccessTime();
+ }
+
+ private void finishCommit(@Nonnull final ActorRef sender, final @Nonnull String transactionID) {
+ // With persistence enabled, this method is called via applyState by the leader strategy
+ // after the commit has been replicated to a majority of the followers.
+
+ CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry == null) {
+ // The transaction is no longer the current commit. This can happen if the transaction
+ // was aborted prior, most likely due to timeout in the front-end. We need to finish
+ // committing the transaction though since it was successfully persisted and replicated
+ // however we can't use the original cohort b/c it was already preCommitted and may
+ // conflict with the current commit or may have been aborted so we commit with a new
+ // transaction.
+ cohortEntry = commitCoordinator.getAndRemoveCohortEntry(transactionID);
+ if(cohortEntry != null) {
+ commitWithNewTransaction(cohortEntry.getModification());
+ sender.tell(COMMIT_TRANSACTION_REPLY, getSelf());
+ } else {
+ // This really shouldn't happen - it likely means that persistence or replication
+ // took so long to complete such that the cohort entry was expired from the cache.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("Could not finish committing transaction %s - no CohortEntry found",
+ transactionID));
+ LOG.error(ex.getMessage());
+ sender.tell(new akka.actor.Status.Failure(ex), getSelf());
+ }
+
+ return;
+ }
+
+ LOG.debug("Finishing commit for transaction {}", cohortEntry.getTransactionID());
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ cohortEntry.getCohort().commit().get();
+
+ sender.tell(COMMIT_TRANSACTION_REPLY, getSelf());
+
+ shardMBean.incrementCommittedTransactionCount();
+ shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
+
+ } catch (InterruptedException | ExecutionException e) {
+ sender.tell(new akka.actor.Status.Failure(e), getSelf());
+
+ LOG.error(e, "An exception occurred while committing transaction {}", transactionID);
+ shardMBean.incrementFailedTransactionsCount();
+ }
+
+ commitCoordinator.currentTransactionComplete(transactionID, true);
+ }
+
+ private void handleCanCommitTransaction(CanCommitTransaction canCommit) {
+ LOG.debug("Can committing transaction {}", canCommit.getTransactionID());
+ commitCoordinator.handleCanCommit(canCommit, getSender(), self());
+ }
+
+ private void handleForwardedReadyTransaction(ForwardedReadyTransaction ready) {
+ LOG.debug("Readying transaction {}", ready.getTransactionID());
+
+ // This message is forwarded by the ShardTransaction on ready. We cache the cohort in the
+ // commitCoordinator in preparation for the subsequent three phase commit initiated by
+ // the front-end.
+ commitCoordinator.transactionReady(ready.getTransactionID(), ready.getCohort(),
+ ready.getModification());
+
+ // Return our actor path as we'll handle the three phase commit.
+ getSender().tell(new ReadyTransactionReply(Serialization.serializedActorPath(self())).
+ toSerializable(), getSelf());
+ }
+
+ private void handleAbortTransaction(AbortTransaction abort) {
+ doAbortTransaction(abort.getTransactionID(), getSender());
+ }
+
+ private void doAbortTransaction(String transactionID, final ActorRef sender) {
+ final CohortEntry cohortEntry = commitCoordinator.getCohortEntryIfCurrent(transactionID);
+ if(cohortEntry != null) {
+ LOG.debug("Aborting transaction {}", transactionID);
+
+ // We don't remove the cached cohort entry here (ie pass false) in case the Tx was
+ // aborted during replication in which case we may still commit locally if replication
+ // succeeds.
+ commitCoordinator.currentTransactionComplete(transactionID, false);
+
+ final ListenableFuture<Void> future = cohortEntry.getCohort().abort();
+ final ActorRef self = getSelf();
+
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ @Override
+ public void onSuccess(Void v) {
+ shardMBean.incrementAbortTransactionsCount();
+
+ if(sender != null) {
+ sender.tell(new AbortTransactionReply().toSerializable(), self);
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during abort");
+
+ if(sender != null) {
+ sender.tell(new akka.actor.Status.Failure(t), self);
+ }
+ }
+ });
+ }
+ }
+
+ private void handleCreateTransaction(Object message) {
+ if (isLeader()) {
+ createTransaction(CreateTransaction.fromSerializable(message));
+ } else if (getLeader() != null) {
+ getLeader().forward(message, getContext());
+ } else {
+ getSender().tell(new akka.actor.Status.Failure(new IllegalStateException(
+ "Could not find shard leader so transaction cannot be created. This typically happens" +
+ " when system is coming up or recovering and a leader is being elected. Try again" +
+ " later.")), getSelf());
+ }
+ }
+
+ private void handleReadDataReply(Object message) {
+ // This must be for install snapshot. Don't want to open this up and trigger
+ // deSerialization
+
+ self().tell(new CaptureSnapshotReply(ReadDataReply.getNormalizedNodeByteString(message)),
+ self());
+
+ createSnapshotTransaction = null;
+
+ // Send a PoisonPill instead of sending close transaction because we do not really need
+ // a response
+ getSender().tell(PoisonPill.getInstance(), self());
+ }
+
private void closeTransactionChain(CloseTransactionChain closeTransactionChain) {
DOMStoreTransactionChain chain =
transactionChains.remove(closeTransactionChain.getTransactionChainId());
throw new NullPointerException("schemaContext should not be null");
}
- if (transactionType
- == TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
+ if (transactionType == TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
shardMBean.incrementReadOnlyTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newReadOnlyTransaction(), getSelf(),
- schemaContext,datastoreContext, shardMBean), transactionId.toString());
+ schemaContext,datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
- } else if (transactionType
- == TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
+ } else if (transactionType == TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
shardMBean.incrementReadWriteTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newReadWriteTransaction(), getSelf(),
- schemaContext, datastoreContext, shardMBean), transactionId.toString());
+ schemaContext, datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
- } else if (transactionType
- == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
+ } else if (transactionType == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
shardMBean.incrementWriteOnlyTransactionCount();
return getContext().actorOf(
ShardTransaction.props(factory.newWriteOnlyTransaction(), getSelf(),
- schemaContext, datastoreContext, shardMBean), transactionId.toString());
+ schemaContext, datastoreContext, shardMBean,
+ transactionId.getRemoteTransactionId()), transactionId.toString());
} else {
throw new IllegalArgumentException(
"Shard="+name + ":CreateTransaction message has unidentified transaction type="
commitCohort.commit().get();
}
-
- private void commit(final ActorRef sender, Object serialized) {
- Modification modification = MutableCompositeModification
- .fromSerializable(serialized, schemaContext);
- DOMStoreThreePhaseCommitCohort cohort =
- modificationToCohort.remove(serialized);
- if (cohort == null) {
- // If there's no cached cohort then we must be applying replicated state.
- commitWithNewTransaction(serialized);
- return;
- }
-
- if(sender == null) {
- LOG.error("Commit failed. Sender cannot be null");
- return;
- }
-
- ListenableFuture<Void> future = cohort.commit();
-
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- sender.tell(new CommitTransactionReply().toSerializable(), getSelf());
- shardMBean.incrementCommittedTransactionCount();
- shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during commit");
- shardMBean.incrementFailedTransactionsCount();
- sender.tell(new akka.actor.Status.Failure(t), getSelf());
- }
- });
-
- }
-
- private void commitWithNewTransaction(Object modification) {
+ private void commitWithNewTransaction(Modification modification) {
DOMStoreWriteTransaction tx = store.newWriteOnlyTransaction();
- MutableCompositeModification.fromSerializable(modification, schemaContext).apply(tx);
+ modification.apply(tx);
try {
syncCommitTransaction(tx);
shardMBean.incrementCommittedTransactionCount();
+ shardMBean.setLastCommittedTransactionTime(System.currentTimeMillis());
} catch (InterruptedException | ExecutionException e) {
shardMBean.incrementFailedTransactionsCount();
LOG.error(e, "Failed to commit");
}
}
- private void handleForwardedCommit(ForwardedCommitTransaction message) {
- Object serializedModification =
- message.getModification().toSerializable();
-
- modificationToCohort
- .put(serializedModification, message.getCohort());
-
- if (persistent) {
- this.persistData(getSender(), "identifier",
- new CompositeModificationPayload(serializedModification));
- } else {
- this.commit(getSender(), serializedModification);
- }
- }
-
private void updateSchemaContext(UpdateSchemaContext message) {
this.schemaContext = message.getSchemaContext();
updateSchemaContext(message.getSchemaContext());
recoveryCoordinator = null;
currentLogRecoveryBatch = null;
updateJournalStats();
+
+ //notify shard manager
+ getContext().parent().tell(new ActorInitialized(), getSelf());
+
+ // Schedule a message to be periodically sent to check if the current in-progress
+ // transaction should be expired and aborted.
+ FiniteDuration period = Duration.create(transactionCommitTimeout / 3, TimeUnit.MILLISECONDS);
+ txCommitTimeoutCheckSchedule = getContext().system().scheduler().schedule(
+ period, period, getSelf(),
+ TX_COMMIT_TIMEOUT_CHECK_MESSAGE, getContext().dispatcher(), ActorRef.noSender());
}
@Override
if (data instanceof CompositeModificationPayload) {
Object modification = ((CompositeModificationPayload) data).getModification();
- if (modification != null) {
- commit(clientActor, modification);
- } else {
+ if(modification == null) {
LOG.error(
- "modification is null - this is very unexpected, clientActor = {}, identifier = {}",
- identifier, clientActor != null ? clientActor.path().toString() : null);
+ "modification is null - this is very unexpected, clientActor = {}, identifier = {}",
+ identifier, clientActor != null ? clientActor.path().toString() : null);
+ } else if(clientActor == null) {
+ // There's no clientActor to which to send a commit reply so we must be applying
+ // replicated state from the leader.
+ commitWithNewTransaction(MutableCompositeModification.fromSerializable(
+ modification, schemaContext));
+ } else {
+ // This must be the OK to commit after replication consensus.
+ finishCommit(clientActor, identifier);
}
-
} else {
LOG.error("Unknown state received {} Class loader = {} CompositeNodeMod.ClassLoader = {}",
data, data.getClass().getClassLoader(),
// so that this actor does not get block building the snapshot
createSnapshotTransaction = createTransaction(
TransactionProxy.TransactionType.READ_ONLY.ordinal(),
- "createSnapshot", "");
+ "createSnapshot" + ++createSnapshotTransactionCounter, "");
createSnapshotTransaction.tell(
new ReadData(YangInstanceIdentifier.builder().build()).toSerializable(), self());
DOMStoreWriteTransaction transaction = store.newWriteOnlyTransaction();
NormalizedNodeMessages.Node serializedNode = NormalizedNodeMessages.Node.parseFrom(snapshot);
NormalizedNode<?, ?> node = new NormalizedNodeToNodeCodec(schemaContext)
- .decode(YangInstanceIdentifier.builder().build(), serializedNode);
+ .decode(serializedNode);
// delete everything first
transaction.delete(YangInstanceIdentifier.builder().build());
}
@VisibleForTesting
- NormalizedNode<?,?> readStore(YangInstanceIdentifier id)
- throws ExecutionException, InterruptedException {
- DOMStoreReadTransaction transaction = store.newReadOnlyTransaction();
-
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> future =
- transaction.read(id);
-
- Optional<NormalizedNode<?, ?>> optional = future.get();
- NormalizedNode<?, ?> node = optional.isPresent()? optional.get() : null;
-
- transaction.close();
-
- return node;
- }
-
- @VisibleForTesting
- void writeToStore(YangInstanceIdentifier id, NormalizedNode<?,?> node)
- throws ExecutionException, InterruptedException {
- DOMStoreWriteTransaction transaction = store.newWriteOnlyTransaction();
-
- transaction.write(id, node);
-
- syncCommitTransaction(transaction);
+ InMemoryDOMDataStore getDataStore() {
+ return store;
}
@VisibleForTesting
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import akka.actor.ActorRef;
+import akka.actor.Status;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+
+/**
+ * Coordinates commits for a shard ensuring only one concurrent 3-phase commit.
+ *
+ * @author Thomas Pantelis
+ */
+public class ShardCommitCoordinator {
+
+ private static final Logger LOG = LoggerFactory.getLogger(ShardCommitCoordinator.class);
+
+ private static final Object CAN_COMMIT_REPLY_TRUE =
+ new CanCommitTransactionReply(Boolean.TRUE).toSerializable();
+
+ private static final Object CAN_COMMIT_REPLY_FALSE =
+ new CanCommitTransactionReply(Boolean.FALSE).toSerializable();
+
+ private final Cache<String, CohortEntry> cohortCache;
+
+ private CohortEntry currentCohortEntry;
+
+ private final Queue<CohortEntry> queuedCohortEntries;
+
+ private final int queueCapacity;
+
+ public ShardCommitCoordinator(long cacheExpiryTimeoutInSec, int queueCapacity) {
+ cohortCache = CacheBuilder.newBuilder().expireAfterAccess(
+ cacheExpiryTimeoutInSec, TimeUnit.SECONDS).build();
+
+ this.queueCapacity = queueCapacity;
+
+ // We use a LinkedList here to avoid synchronization overhead with concurrent queue impls
+ // since this should only be accessed on the shard's dispatcher.
+ queuedCohortEntries = new LinkedList<>();
+ }
+
+ /**
+ * This method caches a cohort entry for the given transactions ID in preparation for the
+ * subsequent 3-phase commit.
+ *
+ * @param transactionID the ID of the transaction
+ * @param cohort the cohort to participate in the transaction commit
+ * @param modification the modification made by the transaction
+ */
+ public void transactionReady(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+
+ cohortCache.put(transactionID, new CohortEntry(transactionID, cohort, modification));
+ }
+
+ /**
+ * This method handles the canCommit phase for a transaction.
+ *
+ * @param canCommit the CanCommitTransaction message
+ * @param sender the actor that sent the message
+ * @param shard the transaction's shard actor
+ */
+ public void handleCanCommit(CanCommitTransaction canCommit, final ActorRef sender,
+ final ActorRef shard) {
+ String transactionID = canCommit.getTransactionID();
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("Processing canCommit for transaction {} for shard {}",
+ transactionID, shard.path());
+ }
+
+ // Lookup the cohort entry that was cached previously (or should have been) by
+ // transactionReady (via the ForwardedReadyTransaction message).
+ final CohortEntry cohortEntry = cohortCache.getIfPresent(transactionID);
+ if(cohortEntry == null) {
+ // Either canCommit was invoked before ready(shouldn't happen) or a long time passed
+ // between canCommit and ready and the entry was expired from the cache.
+ IllegalStateException ex = new IllegalStateException(
+ String.format("No cohort entry found for transaction %s", transactionID));
+ LOG.error(ex.getMessage());
+ sender.tell(new Status.Failure(ex), shard);
+ return;
+ }
+
+ cohortEntry.setCanCommitSender(sender);
+ cohortEntry.setShard(shard);
+
+ if(currentCohortEntry != null) {
+ // There's already a Tx commit in progress - attempt to queue this entry to be
+ // committed after the current Tx completes.
+ LOG.debug("Transaction {} is already in progress - queueing transaction {}",
+ currentCohortEntry.getTransactionID(), transactionID);
+
+ if(queuedCohortEntries.size() < queueCapacity) {
+ queuedCohortEntries.offer(cohortEntry);
+ } else {
+ removeCohortEntry(transactionID);
+
+ RuntimeException ex = new RuntimeException(
+ String.format("Could not enqueue transaction %s - the maximum commit queue"+
+ " capacity %d has been reached.",
+ transactionID, queueCapacity));
+ LOG.error(ex.getMessage());
+ sender.tell(new Status.Failure(ex), shard);
+ }
+ } else {
+ // No Tx commit currently in progress - make this the current entry and proceed with
+ // canCommit.
+ cohortEntry.updateLastAccessTime();
+ currentCohortEntry = cohortEntry;
+
+ doCanCommit(cohortEntry);
+ }
+ }
+
+ private void doCanCommit(final CohortEntry cohortEntry) {
+
+ try {
+ // We block on the future here so we don't have to worry about possibly accessing our
+ // state on a different thread outside of our dispatcher. Also, the data store
+ // currently uses a same thread executor anyway.
+ Boolean canCommit = cohortEntry.getCohort().canCommit().get();
+
+ cohortEntry.getCanCommitSender().tell(
+ canCommit ? CAN_COMMIT_REPLY_TRUE : CAN_COMMIT_REPLY_FALSE, cohortEntry.getShard());
+
+ if(!canCommit) {
+ // Remove the entry from the cache now since the Tx will be aborted.
+ removeCohortEntry(cohortEntry.getTransactionID());
+ }
+ } catch (InterruptedException | ExecutionException e) {
+ LOG.debug("An exception occurred during canCommit", e);
+
+ // Remove the entry from the cache now since the Tx will be aborted.
+ removeCohortEntry(cohortEntry.getTransactionID());
+ cohortEntry.getCanCommitSender().tell(new Status.Failure(e), cohortEntry.getShard());
+ }
+ }
+
+ /**
+ * Returns the cohort entry for the Tx commit currently in progress if the given transaction ID
+ * matches the current entry.
+ *
+ * @param transactionID the ID of the transaction
+ * @return the current CohortEntry or null if the given transaction ID does not match the
+ * current entry.
+ */
+ public CohortEntry getCohortEntryIfCurrent(String transactionID) {
+ if(isCurrentTransaction(transactionID)) {
+ return currentCohortEntry;
+ }
+
+ return null;
+ }
+
+ public CohortEntry getCurrentCohortEntry() {
+ return currentCohortEntry;
+ }
+
+ public CohortEntry getAndRemoveCohortEntry(String transactionID) {
+ CohortEntry cohortEntry = cohortCache.getIfPresent(transactionID);
+ cohortCache.invalidate(transactionID);
+ return cohortEntry;
+ }
+
+ public void removeCohortEntry(String transactionID) {
+ cohortCache.invalidate(transactionID);
+ }
+
+ public boolean isCurrentTransaction(String transactionID) {
+ return currentCohortEntry != null &&
+ currentCohortEntry.getTransactionID().equals(transactionID);
+ }
+
+ /**
+ * This method is called when a transaction is complete, successful or not. If the given
+ * given transaction ID matches the current in-progress transaction, the next cohort entry,
+ * if any, is dequeued and processed.
+ *
+ * @param transactionID the ID of the completed transaction
+ * @param removeCohortEntry if true the CohortEntry for the transaction is also removed from
+ * the cache.
+ */
+ public void currentTransactionComplete(String transactionID, boolean removeCohortEntry) {
+ if(removeCohortEntry) {
+ removeCohortEntry(transactionID);
+ }
+
+ if(isCurrentTransaction(transactionID)) {
+ // Dequeue the next cohort entry waiting in the queue.
+ currentCohortEntry = queuedCohortEntries.poll();
+ if(currentCohortEntry != null) {
+ doCanCommit(currentCohortEntry);
+ }
+ }
+ }
+
+ static class CohortEntry {
+ private final String transactionID;
+ private final DOMStoreThreePhaseCommitCohort cohort;
+ private final Modification modification;
+ private ActorRef canCommitSender;
+ private ActorRef shard;
+ private long lastAccessTime;
+
+ CohortEntry(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+ this.transactionID = transactionID;
+ this.cohort = cohort;
+ this.modification = modification;
+ }
+
+ void updateLastAccessTime() {
+ lastAccessTime = System.currentTimeMillis();
+ }
+
+ long getLastAccessTime() {
+ return lastAccessTime;
+ }
+
+ String getTransactionID() {
+ return transactionID;
+ }
+
+ DOMStoreThreePhaseCommitCohort getCohort() {
+ return cohort;
+ }
+
+ Modification getModification() {
+ return modification;
+ }
+
+ ActorRef getCanCommitSender() {
+ return canCommitSender;
+ }
+
+ void setCanCommitSender(ActorRef canCommitSender) {
+ this.canCommitSender = canCommitSender;
+ }
+
+ ActorRef getShard() {
+ return shard;
+ }
+
+ void setShard(ActorRef shard) {
+ this.shard = shard;
+ }
+ }
+}
import akka.persistence.RecoveryFailure;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import org.opendaylight.controller.cluster.common.actor.AbstractUntypedPersistentActor;
+import com.google.common.base.Supplier;
+import org.opendaylight.controller.cluster.common.actor.AbstractUntypedPersistentActorWithMetering;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfo;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfoMBean;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
+import org.opendaylight.controller.cluster.datastore.messages.ActorNotInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.yangtools.yang.model.api.ModuleIdentifier;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import scala.concurrent.duration.Duration;
-
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
* <li> Monitor the cluster members and store their addresses
* <ul>
*/
-public class ShardManager extends AbstractUntypedPersistentActor {
+public class ShardManager extends AbstractUntypedPersistentActorWithMetering {
protected final LoggingAdapter LOG =
Logging.getLogger(getContext().system(), this);
* @param type defines the kind of data that goes into shards created by this shard manager. Examples of type would be
* configuration or operational
*/
- private ShardManager(String type, ClusterWrapper cluster, Configuration configuration,
+ protected ShardManager(String type, ClusterWrapper cluster, Configuration configuration,
DatastoreContext datastoreContext) {
this.type = Preconditions.checkNotNull(type, "type should not be null");
// Subscribe this actor to cluster member events
cluster.subscribeToMemberEvents(getSelf());
- //createLocalShards(null);
+ createLocalShards();
}
public static Props props(final String type,
@Override
public void handleCommand(Object message) throws Exception {
if (message.getClass().equals(FindPrimary.SERIALIZABLE_CLASS)) {
- findPrimary(
- FindPrimary.fromSerializable(message));
+ findPrimary(FindPrimary.fromSerializable(message));
} else if(message instanceof FindLocalShard){
findLocalShard((FindLocalShard) message);
} else if (message instanceof UpdateSchemaContext) {
updateSchemaContext(message);
+ } else if(message instanceof ActorInitialized) {
+ onActorInitialized(message);
} else if (message instanceof ClusterEvent.MemberUp){
memberUp((ClusterEvent.MemberUp) message);
} else if(message instanceof ClusterEvent.MemberRemoved) {
}
- @Override protected void handleRecover(Object message) throws Exception {
+ private void onActorInitialized(Object message) {
+ final ActorRef sender = getSender();
+
+ if (sender == null) {
+ return; //why is a non-actor sending this message? Just ignore.
+ }
+
+ String actorName = sender.path().name();
+ //find shard name from actor name; actor name is stringified shardId
+ ShardIdentifier shardId = ShardIdentifier.builder().fromShardIdString(actorName).build();
+
+ if (shardId.getShardName() == null) {
+ return;
+ }
+ markShardAsInitialized(shardId.getShardName());
+ }
+
+ private void markShardAsInitialized(String shardName) {
+ LOG.debug("Initializing shard [{}]", shardName);
+ ShardInformation shardInformation = localShards.get(shardName);
+ if (shardInformation != null) {
+ shardInformation.setShardInitialized(true);
+ }
+ }
+ @Override
+ protected void handleRecover(Object message) throws Exception {
if(message instanceof SchemaContextModules){
SchemaContextModules msg = (SchemaContextModules) message;
knownModules.clear();
}
private void findLocalShard(FindLocalShard message) {
- ShardInformation shardInformation =
- localShards.get(message.getShardName());
+ final ShardInformation shardInformation = localShards.get(message.getShardName());
- if(shardInformation != null){
- getSender().tell(new LocalShardFound(shardInformation.getActor()), getSelf());
+ if(shardInformation == null){
+ getSender().tell(new LocalShardNotFound(message.getShardName()), getSelf());
return;
}
- getSender().tell(new LocalShardNotFound(message.getShardName()),
- getSelf());
+ sendResponse(shardInformation, new Supplier<Object>() {
+ @Override
+ public Object get() {
+ return new LocalShardFound(shardInformation.getActor());
+ }
+ });
+ }
+
+ private void sendResponse(ShardInformation shardInformation, Supplier<Object> messageSupplier) {
+ if (shardInformation.getActor() == null || !shardInformation.isShardInitialized()) {
+ getSender().tell(new ActorNotInitialized(), getSelf());
+ return;
+ }
+
+ getSender().tell(messageSupplier.get(), getSelf());
}
private void memberRemoved(ClusterEvent.MemberRemoved message) {
private void memberUp(ClusterEvent.MemberUp message) {
String memberName = message.member().roles().head();
- memberNameToAddress.put(memberName , message.member().address());
+ memberNameToAddress.put(memberName, message.member().address());
for(ShardInformation info : localShards.values()){
String shardName = info.getShardName();
persist(new SchemaContextModules(newModules), new Procedure<SchemaContextModules>() {
- @Override public void apply(SchemaContextModules param) throws Exception {
+ @Override
+ public void apply(SchemaContextModules param) throws Exception {
LOG.info("Sending new SchemaContext to Shards");
- if (localShards.size() == 0) {
- createLocalShards(schemaContext);
- } else {
- for (ShardInformation info : localShards.values()) {
+ for (ShardInformation info : localShards.values()) {
+ if(info.getActor() == null) {
+ info.setActor(getContext().actorOf(Shard.props(info.getShardId(),
+ info.getPeerAddresses(), datastoreContext, schemaContext),
+ info.getShardId().toString()));
+ } else {
info.getActor().tell(message, getSelf());
}
}
String shardName = message.getShardName();
// First see if the there is a local replica for the shard
- ShardInformation info = localShards.get(shardName);
- if(info != null) {
- ActorPath shardPath = info.getActorPath();
- if (shardPath != null) {
- getSender()
- .tell(
- new PrimaryFound(shardPath.toString()).toSerializable(),
- getSelf());
- return;
- }
+ final ShardInformation info = localShards.get(shardName);
+ if (info != null) {
+ sendResponse(info, new Supplier<Object>() {
+ @Override
+ public Object get() {
+ return new PrimaryFound(info.getActorPath().toString()).toSerializable();
+ }
+ });
+
+ return;
}
- List<String> members =
- configuration.getMembersFromShardName(shardName);
+ List<String> members = configuration.getMembersFromShardName(shardName);
if(cluster.getCurrentMemberName() != null) {
members.remove(cluster.getCurrentMemberName());
}
+ /**
+ * FIXME: Instead of sending remote shard actor path back to sender,
+ * forward FindPrimary message to remote shard manager
+ */
// There is no way for us to figure out the primary (for now) so assume
// that one of the remote nodes is a primary
for(String memberName : members) {
* runs
*
*/
- private void createLocalShards(SchemaContext schemaContext) {
+ private void createLocalShards() {
String memberName = this.cluster.getCurrentMemberName();
List<String> memberShardNames =
this.configuration.getMemberShardNames(memberName);
for(String shardName : memberShardNames){
ShardIdentifier shardId = getShardIdentifier(memberName, shardName);
Map<ShardIdentifier, String> peerAddresses = getPeerAddresses(shardName);
- ActorRef actor = getContext()
- .actorOf(Shard.props(shardId, peerAddresses, datastoreContext, schemaContext),
- shardId.toString());
localShardActorNames.add(shardId.toString());
- localShards.put(shardName, new ShardInformation(shardName, actor, peerAddresses));
+ localShards.put(shardName, new ShardInformation(shardName, shardId, peerAddresses));
}
mBean = ShardManagerInfo.createShardManagerMBean("shard-manager-" + this.type,
}
- @Override public String persistenceId() {
+ @Override
+ public String persistenceId() {
return "shard-manager-" + type;
}
- @VisibleForTesting public Collection<String> getKnownModules() {
+ @VisibleForTesting
+ Collection<String> getKnownModules() {
return knownModules;
}
private class ShardInformation {
+ private final ShardIdentifier shardId;
private final String shardName;
- private final ActorRef actor;
- private final ActorPath actorPath;
+ private ActorRef actor;
+ private ActorPath actorPath;
private final Map<ShardIdentifier, String> peerAddresses;
+ private boolean shardInitialized = false; // flag that determines if the actor is ready for business
- private ShardInformation(String shardName, ActorRef actor,
- Map<ShardIdentifier, String> peerAddresses) {
+ private ShardInformation(String shardName, ShardIdentifier shardId,
+ Map<ShardIdentifier, String> peerAddresses) {
this.shardName = shardName;
- this.actor = actor;
- this.actorPath = actor.path();
+ this.shardId = shardId;
this.peerAddresses = peerAddresses;
}
- public String getShardName() {
+ String getShardName() {
return shardName;
}
- public ActorRef getActor(){
+ ActorRef getActor(){
return actor;
}
- public ActorPath getActorPath() {
+ ActorPath getActorPath() {
return actorPath;
}
- public void updatePeerAddress(ShardIdentifier peerId, String peerAddress){
+ void setActor(ActorRef actor) {
+ this.actor = actor;
+ this.actorPath = actor.path();
+ }
+
+ ShardIdentifier getShardId() {
+ return shardId;
+ }
+
+ Map<ShardIdentifier, String> getPeerAddresses() {
+ return peerAddresses;
+ }
+
+ void updatePeerAddress(ShardIdentifier peerId, String peerAddress){
LOG.info("updatePeerAddress for peer {} with address {}", peerId,
peerAddress);
if(peerAddresses.containsKey(peerId)){
peerAddresses.put(peerId, peerAddress);
- if(LOG.isDebugEnabled()) {
- LOG.debug(
- "Sending PeerAddressResolved for peer {} with address {} to {}",
- peerId, peerAddress, actor.path());
- }
- actor
- .tell(new PeerAddressResolved(peerId, peerAddress),
- getSelf());
+ if(actor != null) {
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("Sending PeerAddressResolved for peer {} with address {} to {}",
+ peerId, peerAddress, actor.path());
+ }
+
+ actor.tell(new PeerAddressResolved(peerId, peerAddress), getSelf());
+ }
}
}
+
+ boolean isShardInitialized() {
+ return shardInitialized;
+ }
+
+ void setShardInitialized(boolean shardInitialized) {
+ this.shardInitialized = shardInitialized;
+ }
}
private static class ShardManagerCreator implements Creator<ShardManager> {
}
static class SchemaContextModules implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private final Set<String> modules;
SchemaContextModules(Set<String> modules){
private final DOMStoreReadTransaction transaction;
public ShardReadTransaction(DOMStoreReadTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.DataExists;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
-import org.opendaylight.controller.cluster.datastore.messages.MergeData;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
/**
* @author: syedbahm
* Date: 8/6/14
*/
-public class ShardReadWriteTransaction extends ShardTransaction {
+public class ShardReadWriteTransaction extends ShardWriteTransaction {
private final DOMStoreReadWriteTransaction transaction;
public ShardReadWriteTransaction(DOMStoreReadWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(transaction, shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
public void handleReceive(Object message) throws Exception {
if(ReadData.SERIALIZABLE_CLASS.equals(message.getClass())) {
readData(transaction, ReadData.fromSerializable(message));
- } else if(WriteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- writeData(transaction, WriteData.fromSerializable(message, schemaContext));
- } else if(MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
- } else if(DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction, DeleteData.fromSerializable(message));
- } else if(ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
- readyTransaction(transaction, new ReadyTransaction());
} else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
dataExists(transaction, DataExists.fromSerializable(message));
} else {
super.handleReceive(message);
}
}
-
- @Override
- protected DOMStoreTransaction getDOMStoreTransaction() {
- return transaction;
- }
}
try {
NormalizedNodeMessages.Node serializedNode = NormalizedNodeMessages.Node.parseFrom(snapshot);
NormalizedNode<?, ?> node = new NormalizedNodeToNodeCodec(schemaContext).decode(
- YangInstanceIdentifier.builder().build(), serializedNode);
+ serializedNode);
// delete everything first
resultingTx.delete(YangInstanceIdentifier.builder().build());
import akka.actor.Props;
import akka.actor.ReceiveTimeout;
import akka.japi.Creator;
-
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
-
import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.DataExists;
import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
-import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply;
-import org.opendaylight.controller.cluster.datastore.messages.MergeData;
-import org.opendaylight.controller.cluster.datastore.messages.MergeDataReply;
import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
-import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.DeleteModification;
-import org.opendaylight.controller.cluster.datastore.modification.ImmutableCompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
-import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public abstract class ShardTransaction extends AbstractUntypedActor {
private final ActorRef shardActor;
- protected final SchemaContext schemaContext;
+ private final SchemaContext schemaContext;
private final ShardStats shardStats;
-
- private final MutableCompositeModification modification = new MutableCompositeModification();
+ private final String transactionID;
protected ShardTransaction(ActorRef shardActor, SchemaContext schemaContext,
- ShardStats shardStats) {
+ ShardStats shardStats, String transactionID) {
this.shardActor = shardActor;
this.schemaContext = schemaContext;
this.shardStats = shardStats;
+ this.transactionID = transactionID;
}
public static Props props(DOMStoreTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext,DatastoreContext datastoreContext, ShardStats shardStats) {
+ SchemaContext schemaContext,DatastoreContext datastoreContext, ShardStats shardStats,
+ String transactionID) {
return Props.create(new ShardTransactionCreator(transaction, shardActor, schemaContext,
- datastoreContext, shardStats));
+ datastoreContext, shardStats, transactionID));
}
protected abstract DOMStoreTransaction getDOMStoreTransaction();
+ protected ActorRef getShardActor() {
+ return shardActor;
+ }
+
+ protected String getTransactionID() {
+ return transactionID;
+ }
+
+ protected SchemaContext getSchemaContext() {
+ return schemaContext;
+ }
+
@Override
public void handleReceive(Object message) throws Exception {
if (message.getClass().equals(CloseTransaction.SERIALIZABLE_CLASS)) {
closeTransaction(true);
- } else if (message instanceof GetCompositedModification) {
- // This is here for testing only
- getSender().tell(new GetCompositeModificationReply(
- new ImmutableCompositeModification(modification)), getSelf());
} else if (message instanceof ReceiveTimeout) {
if(LOG.isDebugEnabled()) {
LOG.debug("Got ReceiveTimeout for inactivity - closing Tx");
}
- protected void writeData(DOMStoreWriteTransaction transaction, WriteData message) {
- modification.addModification(
- new WriteModification(message.getPath(), message.getData(),schemaContext));
- if(LOG.isDebugEnabled()) {
- LOG.debug("writeData at path : " + message.getPath().toString());
- }
- try {
- transaction.write(message.getPath(), message.getData());
- getSender().tell(new WriteDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void mergeData(DOMStoreWriteTransaction transaction, MergeData message) {
- modification.addModification(
- new MergeModification(message.getPath(), message.getData(), schemaContext));
- if(LOG.isDebugEnabled()) {
- LOG.debug("mergeData at path : " + message.getPath().toString());
- }
- try {
- transaction.merge(message.getPath(), message.getData());
- getSender().tell(new MergeDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void deleteData(DOMStoreWriteTransaction transaction, DeleteData message) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("deleteData at path : " + message.getPath().toString());
- }
- modification.addModification(new DeleteModification(message.getPath()));
- try {
- transaction.delete(message.getPath());
- getSender().tell(new DeleteDataReply().toSerializable(), getSelf());
- }catch(Exception e){
- getSender().tell(new akka.actor.Status.Failure(e), getSelf());
- }
- }
-
- protected void readyTransaction(DOMStoreWriteTransaction transaction, ReadyTransaction message) {
- DOMStoreThreePhaseCommitCohort cohort = transaction.ready();
- ActorRef cohortActor = getContext().actorOf(
- ThreePhaseCommitCohort.props(cohort, shardActor, modification, shardStats), "cohort");
- getSender()
- .tell(new ReadyTransactionReply(cohortActor.path()).toSerializable(), getSelf());
-
- }
-
private static class ShardTransactionCreator implements Creator<ShardTransaction> {
private static final long serialVersionUID = 1L;
final SchemaContext schemaContext;
final DatastoreContext datastoreContext;
final ShardStats shardStats;
+ final String transactionID;
ShardTransactionCreator(DOMStoreTransaction transaction, ActorRef shardActor,
SchemaContext schemaContext, DatastoreContext datastoreContext,
- ShardStats shardStats) {
+ ShardStats shardStats, String transactionID) {
this.transaction = transaction;
this.shardActor = shardActor;
this.shardStats = shardStats;
this.schemaContext = schemaContext;
this.datastoreContext = datastoreContext;
+ this.transactionID = transactionID;
}
@Override
ShardTransaction tx;
if(transaction instanceof DOMStoreReadWriteTransaction) {
tx = new ShardReadWriteTransaction((DOMStoreReadWriteTransaction)transaction,
- shardActor, schemaContext, shardStats);
+ shardActor, schemaContext, shardStats, transactionID);
} else if(transaction instanceof DOMStoreReadTransaction) {
tx = new ShardReadTransaction((DOMStoreReadTransaction)transaction, shardActor,
- schemaContext, shardStats);
+ schemaContext, shardStats, transactionID);
} else {
tx = new ShardWriteTransaction((DOMStoreWriteTransaction)transaction,
- shardActor, schemaContext, shardStats);
+ shardActor, schemaContext, shardStats, transactionID);
}
tx.getContext().setReceiveTimeout(datastoreContext.getShardTransactionIdleTimeout());
return tx;
}
}
-
- // These classes are in here for test purposes only
-
- static class GetCompositedModification {
- }
-
-
- static class GetCompositeModificationReply {
- private final CompositeModification modification;
-
-
- GetCompositeModificationReply(CompositeModification modification) {
- this.modification = modification;
- }
-
-
- public CompositeModification getModification() {
- return modification;
- }
- }
}
return getContext().parent();
}
- private ActorRef createTypedTransactionActor(CreateTransaction createTransaction,
- String transactionId) {
+ private ActorRef createTypedTransactionActor(CreateTransaction createTransaction) {
+ String transactionName = "shard-" + createTransaction.getTransactionId();
if(createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newReadOnlyTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newReadWriteTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newWriteOnlyTransaction(), getShardActor(),
- schemaContext, datastoreContext, shardStats), transactionId);
+ schemaContext, datastoreContext, shardStats,
+ createTransaction.getTransactionId()), transactionName);
} else {
throw new IllegalArgumentException (
"CreateTransaction message has unidentified transaction type=" +
private void createTransaction(CreateTransaction createTransaction) {
- ActorRef transactionActor = createTypedTransactionActor(createTransaction, "shard-" + createTransaction.getTransactionId());
- getSender()
- .tell(new CreateTransactionReply(transactionActor.path().toString(),createTransaction.getTransactionId()).toSerializable(),
- getSelf());
+ ActorRef transactionActor = createTypedTransactionActor(createTransaction);
+ getSender().tell(new CreateTransactionReply(transactionActor.path().toString(),
+ createTransaction.getTransactionId()).toSerializable(), getSelf());
}
public static Props props(DOMStoreTransactionChain chain, SchemaContext schemaContext,
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
-
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
+import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.MergeData;
+import org.opendaylight.controller.cluster.datastore.messages.MergeDataReply;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
+import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.DeleteModification;
+import org.opendaylight.controller.cluster.datastore.modification.ImmutableCompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
+import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
* Date: 8/6/14
*/
public class ShardWriteTransaction extends ShardTransaction {
+
+ private final MutableCompositeModification modification = new MutableCompositeModification();
private final DOMStoreWriteTransaction transaction;
public ShardWriteTransaction(DOMStoreWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats) {
- super(shardActor, schemaContext, shardStats);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
+ super(shardActor, schemaContext, shardStats, transactionID);
this.transaction = transaction;
}
+ @Override
+ protected DOMStoreTransaction getDOMStoreTransaction() {
+ return transaction;
+ }
+
@Override
public void handleReceive(Object message) throws Exception {
if(WriteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- writeData(transaction, WriteData.fromSerializable(message, schemaContext));
+ writeData(transaction, WriteData.fromSerializable(message, getSchemaContext()));
} else if(MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
+ mergeData(transaction, MergeData.fromSerializable(message, getSchemaContext()));
} else if(DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
deleteData(transaction, DeleteData.fromSerializable(message));
} else if(ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction, new ReadyTransaction());
+ } else if (message instanceof GetCompositedModification) {
+ // This is here for testing only
+ getSender().tell(new GetCompositeModificationReply(
+ new ImmutableCompositeModification(modification)), getSelf());
} else {
super.handleReceive(message);
}
}
- @Override
- protected DOMStoreTransaction getDOMStoreTransaction() {
- return transaction;
+ private void writeData(DOMStoreWriteTransaction transaction, WriteData message) {
+ modification.addModification(
+ new WriteModification(message.getPath(), message.getData(), getSchemaContext()));
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("writeData at path : " + message.getPath().toString());
+ }
+ try {
+ transaction.write(message.getPath(), message.getData());
+ getSender().tell(new WriteDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void mergeData(DOMStoreWriteTransaction transaction, MergeData message) {
+ modification.addModification(
+ new MergeModification(message.getPath(), message.getData(), getSchemaContext()));
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("mergeData at path : " + message.getPath().toString());
+ }
+ try {
+ transaction.merge(message.getPath(), message.getData());
+ getSender().tell(new MergeDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void deleteData(DOMStoreWriteTransaction transaction, DeleteData message) {
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("deleteData at path : " + message.getPath().toString());
+ }
+ modification.addModification(new DeleteModification(message.getPath()));
+ try {
+ transaction.delete(message.getPath());
+ getSender().tell(new DeleteDataReply().toSerializable(), getSelf());
+ }catch(Exception e){
+ getSender().tell(new akka.actor.Status.Failure(e), getSelf());
+ }
+ }
+
+ private void readyTransaction(DOMStoreWriteTransaction transaction, ReadyTransaction message) {
+ DOMStoreThreePhaseCommitCohort cohort = transaction.ready();
+
+ getShardActor().forward(new ForwardedReadyTransaction(getTransactionID(), cohort, modification),
+ getContext());
+ }
+
+ // These classes are in here for test purposes only
+
+ static class GetCompositedModification {
+ }
+
+ static class GetCompositeModificationReply {
+ private final CompositeModification modification;
+
+
+ GetCompositeModificationReply(CompositeModification modification) {
+ this.modification = modification;
+ }
+
+ public CompositeModification getModification() {
+ return modification;
+ }
}
}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorRef;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import akka.event.Logging;
-import akka.event.LoggingAdapter;
-import akka.japi.Creator;
-
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
-
-import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
-import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-
-public class ThreePhaseCommitCohort extends AbstractUntypedActor {
- private final DOMStoreThreePhaseCommitCohort cohort;
- private final ActorRef shardActor;
- private final CompositeModification modification;
- private final ShardStats shardStats;
-
- public ThreePhaseCommitCohort(DOMStoreThreePhaseCommitCohort cohort,
- ActorRef shardActor, CompositeModification modification, ShardStats shardStats) {
-
- this.cohort = cohort;
- this.shardActor = shardActor;
- this.modification = modification;
- this.shardStats = shardStats;
- }
-
- private final LoggingAdapter log =
- Logging.getLogger(getContext().system(), this);
-
- public static Props props(final DOMStoreThreePhaseCommitCohort cohort,
- final ActorRef shardActor, final CompositeModification modification,
- ShardStats shardStats) {
- return Props.create(new ThreePhaseCommitCohortCreator(cohort, shardActor, modification,
- shardStats));
- }
-
- @Override
- public void handleReceive(Object message) throws Exception {
- if (message.getClass()
- .equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
- canCommit(new CanCommitTransaction());
- } else if (message.getClass()
- .equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
- preCommit(new PreCommitTransaction());
- } else if (message.getClass()
- .equals(CommitTransaction.SERIALIZABLE_CLASS)) {
- commit(new CommitTransaction());
- } else if (message.getClass()
- .equals(AbortTransaction.SERIALIZABLE_CLASS)) {
- abort(new AbortTransaction());
- } else {
- unknownMessage(message);
- }
- }
-
- private void abort(AbortTransaction message) {
- final ListenableFuture<Void> future = cohort.abort();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
-
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- shardStats.incrementAbortTransactionsCount();
- sender
- .tell(new AbortTransactionReply().toSerializable(),
- self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during abort");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
- }
-
- private void commit(CommitTransaction message) {
- // Forward the commit to the shard
- if(log.isDebugEnabled()) {
- log.debug("Forward commit transaction to Shard {} ", shardActor);
- }
- shardActor.forward(new ForwardedCommitTransaction(cohort, modification),
- getContext());
-
- getContext().parent().tell(PoisonPill.getInstance(), getSelf());
-
- }
-
- private void preCommit(PreCommitTransaction message) {
- final ListenableFuture<Void> future = cohort.preCommit();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
- Futures.addCallback(future, new FutureCallback<Void>() {
- @Override
- public void onSuccess(Void v) {
- sender
- .tell(new PreCommitTransactionReply().toSerializable(),
- self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during pre-commit");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
-
- }
-
- private void canCommit(CanCommitTransaction message) {
- final ListenableFuture<Boolean> future = cohort.canCommit();
- final ActorRef sender = getSender();
- final ActorRef self = getSelf();
- Futures.addCallback(future, new FutureCallback<Boolean>() {
- @Override
- public void onSuccess(Boolean canCommit) {
- sender.tell(new CanCommitTransactionReply(canCommit)
- .toSerializable(), self);
- }
-
- @Override
- public void onFailure(Throwable t) {
- LOG.error(t, "An exception happened during canCommit");
- sender
- .tell(new akka.actor.Status.Failure(t), self);
- }
- });
- }
-
- private static class ThreePhaseCommitCohortCreator implements Creator<ThreePhaseCommitCohort> {
- final DOMStoreThreePhaseCommitCohort cohort;
- final ActorRef shardActor;
- final CompositeModification modification;
- final ShardStats shardStats;
-
- ThreePhaseCommitCohortCreator(DOMStoreThreePhaseCommitCohort cohort,
- ActorRef shardActor, CompositeModification modification, ShardStats shardStats) {
- this.cohort = cohort;
- this.shardActor = shardActor;
- this.modification = modification;
- this.shardStats = shardStats;
- }
-
- @Override
- public ThreePhaseCommitCohort create() throws Exception {
- return new ThreePhaseCommitCohort(cohort, shardActor, modification, shardStats);
- }
- }
-}
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
import akka.actor.ActorSelection;
import akka.dispatch.Futures;
import akka.dispatch.OnComplete;
-
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
-
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
import scala.concurrent.Future;
import scala.runtime.AbstractFunction1;
-
import java.util.Collections;
import java.util.List;
private static final Logger LOG = LoggerFactory.getLogger(ThreePhaseCommitCohortProxy.class);
+ private static final ListenableFuture<Void> IMMEDIATE_SUCCESS =
+ com.google.common.util.concurrent.Futures.immediateFuture(null);
+
private final ActorContext actorContext;
- private final List<Future<ActorPath>> cohortPathFutures;
- private volatile List<ActorPath> cohortPaths;
+ private final List<Future<ActorSelection>> cohortFutures;
+ private volatile List<ActorSelection> cohorts;
private final String transactionId;
public ThreePhaseCommitCohortProxy(ActorContext actorContext,
- List<Future<ActorPath>> cohortPathFutures, String transactionId) {
+ List<Future<ActorSelection>> cohortFutures, String transactionId) {
this.actorContext = actorContext;
- this.cohortPathFutures = cohortPathFutures;
+ this.cohortFutures = cohortFutures;
this.transactionId = transactionId;
}
- private Future<Void> buildCohortPathsList() {
+ private Future<Void> buildCohortList() {
- Future<Iterable<ActorPath>> combinedFutures = Futures.sequence(cohortPathFutures,
+ Future<Iterable<ActorSelection>> combinedFutures = Futures.sequence(cohortFutures,
actorContext.getActorSystem().dispatcher());
- return combinedFutures.transform(new AbstractFunction1<Iterable<ActorPath>, Void>() {
+ return combinedFutures.transform(new AbstractFunction1<Iterable<ActorSelection>, Void>() {
@Override
- public Void apply(Iterable<ActorPath> paths) {
- cohortPaths = Lists.newArrayList(paths);
+ public Void apply(Iterable<ActorSelection> actorSelections) {
+ cohorts = Lists.newArrayList(actorSelections);
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} successfully built cohort path list: {}",
- transactionId, cohortPaths);
+ transactionId, cohorts);
}
return null;
}
// extracted from ReadyTransactionReply messages by the Futures that were obtained earlier
// and passed to us from upstream processing. If any one fails then we'll fail canCommit.
- buildCohortPathsList().onComplete(new OnComplete<Void>() {
+ buildCohortList().onComplete(new OnComplete<Void>() {
@Override
public void onComplete(Throwable failure, Void notUsed) throws Throwable {
if(failure != null) {
if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {}: a cohort path Future failed: {}", transactionId, failure);
+ LOG.debug("Tx {}: a cohort Future failed: {}", transactionId, failure);
}
returnFuture.setException(failure);
} else {
// their canCommit processing. If any one fails then we'll fail canCommit.
Future<Iterable<Object>> combinedFuture =
- invokeCohorts(new CanCommitTransaction().toSerializable());
+ invokeCohorts(new CanCommitTransaction(transactionId).toSerializable());
combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
@Override
}
private Future<Iterable<Object>> invokeCohorts(Object message) {
- List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
- for(ActorPath actorPath : cohortPaths) {
+ List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohorts.size());
+ for(ActorSelection cohort : cohorts) {
if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {}: Sending {} to cohort {}", transactionId, message, actorPath);
+ LOG.debug("Tx {}: Sending {} to cohort {}", transactionId, message, cohort);
}
- ActorSelection cohort = actorContext.actorSelection(actorPath);
- futureList.add(actorContext.executeRemoteOperationAsync(cohort, message));
+ futureList.add(actorContext.executeOperationAsync(cohort, message));
}
return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
@Override
public ListenableFuture<Void> preCommit() {
- return voidOperation("preCommit", new PreCommitTransaction().toSerializable(),
- PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
+ // We don't need to do anything here - preCommit is done atomically with the commit phase
+ // by the shard.
+ return IMMEDIATE_SUCCESS;
}
@Override
// exception then that exception will supersede and suppress the original exception. But
// it's the original exception that is the root cause and of more interest to the client.
- return voidOperation("abort", new AbortTransaction().toSerializable(),
+ return voidOperation("abort", new AbortTransaction(transactionId).toSerializable(),
AbortTransactionReply.SERIALIZABLE_CLASS, false);
}
@Override
public ListenableFuture<Void> commit() {
- return voidOperation("commit", new CommitTransaction().toSerializable(),
+ return voidOperation("commit", new CommitTransaction(transactionId).toSerializable(),
CommitTransactionReply.SERIALIZABLE_CLASS, true);
}
// The cohort actor list should already be built at this point by the canCommit phase but,
// if not for some reason, we'll try to build it here.
- if(cohortPaths != null) {
+ if(cohorts != null) {
finishVoidOperation(operationName, message, expectedResponseClass, propagateException,
returnFuture);
} else {
- buildCohortPathsList().onComplete(new OnComplete<Void>() {
+ buildCohortList().onComplete(new OnComplete<Void>() {
@Override
public void onComplete(Throwable failure, Void notUsed) throws Throwable {
if(failure != null) {
}
@VisibleForTesting
- List<Future<ActorPath>> getCohortPathFutures() {
- return Collections.unmodifiableList(cohortPathFutures);
+ List<Future<ActorSelection>> getCohortFutures() {
+ return Collections.unmodifiableList(cohortFutures);
}
}
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
+import akka.actor.ActorSelection;
import akka.dispatch.Futures;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionChain;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
public class TransactionChainProxy implements DOMStoreTransactionChain{
private final ActorContext actorContext;
private final String transactionChainId;
- private volatile List<Future<ActorPath>> cohortPathFutures = Collections.emptyList();
+ private volatile List<Future<ActorSelection>> cohortFutures = Collections.emptyList();
public TransactionChainProxy(ActorContext actorContext) {
this.actorContext = actorContext;
return transactionChainId;
}
- public void onTransactionReady(List<Future<ActorPath>> cohortPathFutures){
- this.cohortPathFutures = cohortPathFutures;
+ public void onTransactionReady(List<Future<ActorSelection>> cohortFutures){
+ this.cohortFutures = cohortFutures;
}
public void waitTillCurrentTransactionReady(){
try {
Await.result(Futures
- .sequence(this.cohortPathFutures, actorContext.getActorSystem().dispatcher()),
+ .sequence(this.cohortFutures, actorContext.getActorSystem().dispatcher()),
actorContext.getOperationDuration());
} catch (Exception e) {
throw new IllegalStateException("Failed when waiting for transaction on a chain to become ready", e);
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorPath;
import akka.actor.ActorSelection;
import akka.dispatch.OnComplete;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.SettableFuture;
+import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
if(remoteTransactionActorsMB.get()) {
for(ActorSelection actor : remoteTransactionActors) {
LOG.trace("Sending CloseTransaction to {}", actor);
- actorContext.sendRemoteOperationAsync(actor,
+ actorContext.sendOperationAsync(actor,
new CloseTransaction().toSerializable());
}
}
LOG.debug("Tx {} Trying to get {} transactions ready for commit", identifier,
remoteTransactionPaths.size());
}
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
LOG.debug("Tx {} Readying transaction for shard {}", identifier,
transactionContext.getShardName());
}
- cohortPathFutures.add(transactionContext.readyTransaction());
+ cohortFutures.add(transactionContext.readyTransaction());
}
if(transactionChainProxy != null){
- transactionChainProxy.onTransactionReady(cohortPathFutures);
+ transactionChainProxy.onTransactionReady(cohortFutures);
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures,
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures,
identifier.toString());
}
}
try {
- Object response = actorContext.executeShardOperation(shardName,
- new CreateTransaction(identifier.toString(), this.transactionType.ordinal(),
- getTransactionChainId()).toSerializable());
+ Optional<ActorSelection> primaryShard = actorContext.findPrimaryShard(shardName);
+ if (!primaryShard.isPresent()) {
+ throw new PrimaryNotFoundException("Primary could not be found for shard " + shardName);
+ }
+
+ Object response = actorContext.executeOperation(primaryShard.get(),
+ new CreateTransaction(identifier.toString(), this.transactionType.ordinal(),
+ getTransactionChainId()).toSerializable());
if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
CreateTransactionReply reply =
CreateTransactionReply.fromSerializable(response);
void closeTransaction();
- Future<ActorPath> readyTransaction();
+ Future<ActorSelection> readyTransaction();
void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data);
return actor;
}
- private String getResolvedCohortPath(String cohortPath) {
- return actorContext.resolvePath(actorPath, cohortPath);
- }
-
@Override
public void closeTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} closeTransaction called", identifier);
}
- actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
+ actorContext.sendOperationAsync(getActor(), new CloseTransaction().toSerializable());
}
@Override
- public Future<ActorPath> readyTransaction() {
+ public Future<ActorSelection> readyTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction called with {} previous recorded operations pending",
identifier, recordedOperationFutures.size());
}
// Send the ReadyTransaction message to the Tx actor.
- final Future<Object> replyFuture = actorContext.executeRemoteOperationAsync(getActor(),
+ final Future<Object> replyFuture = actorContext.executeOperationAsync(getActor(),
new ReadyTransaction().toSerializable());
// Combine all the previously recorded put/merge/delete operation reply Futures and the
// Transform the combined Future into a Future that returns the cohort actor path from
// the ReadyTransactionReply. That's the end result of the ready operation.
- return combinedFutures.transform(new AbstractFunction1<Iterable<Object>, ActorPath>() {
+ return combinedFutures.transform(new AbstractFunction1<Iterable<Object>, ActorSelection>() {
@Override
- public ActorPath apply(Iterable<Object> notUsed) {
+ public ActorSelection apply(Iterable<Object> notUsed) {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction: pending recorded operations succeeded",
identifier);
if(serializedReadyReply.getClass().equals(
ReadyTransactionReply.SERIALIZABLE_CLASS)) {
ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
- actorContext.getActorSystem(), serializedReadyReply);
+ serializedReadyReply);
- String resolvedCohortPath = getResolvedCohortPath(
- reply.getCohortPath().toString());
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Tx {} readyTransaction: resolved cohort path {}",
- identifier, resolvedCohortPath);
- }
- return actorContext.actorFor(resolvedCohortPath);
+ return actorContext.actorSelection(reply.getCohortPath());
} else {
// Throwing an exception here will fail the Future.
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} deleteData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
- new DeleteData(path).toSerializable() ));
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
+ new DeleteData(path).toSerializable()));
}
@Override
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} mergeData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
new MergeData(path, data, schemaContext).toSerializable()));
}
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} writeData called path = {}", identifier, path);
}
- recordedOperationFutures.add(actorContext.executeRemoteOperationAsync(getActor(),
+ recordedOperationFutures.add(actorContext.executeOperationAsync(getActor(),
new WriteData(path, data, schemaContext).toSerializable()));
}
}
};
- Future<Object> readFuture = actorContext.executeRemoteOperationAsync(getActor(),
+ Future<Object> readFuture = actorContext.executeOperationAsync(getActor(),
new ReadData(path).toSerializable());
readFuture.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
}
}
};
- Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ Future<Object> future = actorContext.executeOperationAsync(getActor(),
new DataExists(path).toSerializable());
future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
}
}
@Override
- public Future<ActorPath> readyTransaction() {
+ public Future<ActorSelection> readyTransaction() {
if(LOG.isDebugEnabled()) {
LOG.debug("Tx {} readyTransaction called", identifier);
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.exceptions;
+
+public class NotInitializedException extends RuntimeException {
+ public NotInitializedException(String message) {
+ super(message);
+ }
+}
import com.google.common.base.Preconditions;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
public class ShardIdentifier {
private final String shardName;
private final String memberName;
private final String type;
+ //format and pattern should be in sync
+ private final String format = "%s-shard-%s-%s";
+ private static final Pattern pattern = Pattern.compile("(\\S+)-shard-(\\S+)-(\\S+)");
public ShardIdentifier(String shardName, String memberName, String type) {
}
@Override public String toString() {
- StringBuilder builder = new StringBuilder();
- builder.append(memberName).append("-shard-").append(shardName).append("-").append(type);
- return builder.toString();
+ //ensure the output of toString matches the pattern above
+ return new StringBuilder(memberName)
+ .append("-shard-")
+ .append(shardName)
+ .append("-")
+ .append(type)
+ .toString();
}
public static Builder builder(){
return new Builder();
}
+ public String getShardName() {
+ return shardName;
+ }
+
+ public String getMemberName() {
+ return memberName;
+ }
+
+ public String getType() {
+ return type;
+ }
+
public static class Builder {
private String shardName;
private String memberName;
return this;
}
+ public Builder fromShardIdString(String shardId){
+ Matcher matcher = pattern.matcher(shardId);
+
+ if (matcher.matches()) {
+ memberName = matcher.group(1);
+ shardName = matcher.group(2);
+ type = matcher.group(3);
+ }
+ return this;
+ }
}
}
public class ShardTransactionIdentifier {
private final String remoteTransactionId;
- public ShardTransactionIdentifier(String remoteTransactionId) {
- this.remoteTransactionId = Preconditions.checkNotNull(remoteTransactionId, "remoteTransactionId should not be null");
+ private ShardTransactionIdentifier(String remoteTransactionId) {
+ this.remoteTransactionId = Preconditions.checkNotNull(remoteTransactionId,
+ "remoteTransactionId should not be null");
}
public static Builder builder(){
return new Builder();
}
+ public String getRemoteTransactionId() {
+ return remoteTransactionId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) {
}
@Override public String toString() {
- final StringBuilder sb =
- new StringBuilder();
+ final StringBuilder sb = new StringBuilder();
sb.append("shard-").append(remoteTransactionId);
return sb.toString();
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class AbortTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransaction.class;
+ public static final Class<ThreePhaseCommitCohortMessages.AbortTransaction> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.AbortTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public AbortTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder().
+ setTransactionId(transactionID).build();
+ }
+
+ public static AbortTransaction fromSerializable(Object message) {
+ return new AbortTransaction(((ThreePhaseCommitCohortMessages.AbortTransaction)message).
+ getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class AbortTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransactionReply.class;
+ public static final Class<ThreePhaseCommitCohortMessages.AbortTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.AbortTransactionReply.class;
-
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.AbortTransactionReply.newBuilder().build();
- }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.AbortTransactionReply.newBuilder().build();
+ }
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import java.io.Serializable;
+
+public class ActorInitialized implements Serializable {
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import java.io.Serializable;
+
+public class ActorNotInitialized implements Serializable {
+}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CanCommitTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class;
+ public static final Class<?> SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public CanCommitTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().
+ setTransactionId(transactionID).build();
+ }
+
+ public static CanCommitTransaction fromSerializable(Object message) {
+ return new CanCommitTransaction(((ThreePhaseCommitCohortMessages.CanCommitTransaction)message).
+ getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CanCommitTransactionReply implements SerializableMessage {
- public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransactionReply.class;
- private final Boolean canCommit;
+ public static Class<ThreePhaseCommitCohortMessages.CanCommitTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CanCommitTransactionReply.class;
- public CanCommitTransactionReply(Boolean canCommit) {
- this.canCommit = canCommit;
- }
+ private final Boolean canCommit;
- public Boolean getCanCommit() {
- return canCommit;
- }
+ public CanCommitTransactionReply(Boolean canCommit) {
+ this.canCommit = canCommit;
+ }
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CanCommitTransactionReply.newBuilder().setCanCommit(canCommit).build();
- }
+ public Boolean getCanCommit() {
+ return canCommit;
+ }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CanCommitTransactionReply.newBuilder().setCanCommit(canCommit).build();
+ }
- public static CanCommitTransactionReply fromSerializable(Object message) {
- return new CanCommitTransactionReply(((ThreePhaseCommitCohortMessages.CanCommitTransactionReply)message).getCanCommit());
- }
+ public static CanCommitTransactionReply fromSerializable(Object message) {
+ return new CanCommitTransactionReply(
+ ((ThreePhaseCommitCohortMessages.CanCommitTransactionReply) message).getCanCommit());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CommitTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransaction.class;
+ public static final Class<ThreePhaseCommitCohortMessages.CommitTransaction> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CommitTransaction.class;
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CommitTransaction.newBuilder().build();
- }
+ private final String transactionID;
+
+ public CommitTransaction(String transactionID) {
+ this.transactionID = transactionID;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CommitTransaction.newBuilder().setTransactionId(
+ transactionID).build();
+ }
+
+ public static CommitTransaction fromSerializable(Object message) {
+ return new CommitTransaction(((ThreePhaseCommitCohortMessages.
+ CommitTransaction)message).getTransactionId());
+ }
}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CommitTransactionReply implements SerializableMessage {
+ public static final Class<ThreePhaseCommitCohortMessages.CommitTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.CommitTransactionReply.class;
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransactionReply.class;
-
- @Override
- public Object toSerializable() {
- return ThreePhaseCommitCohortMessages.CommitTransactionReply.newBuilder().build();
- }
+ @Override
+ public Object toSerializable() {
+ return ThreePhaseCommitCohortMessages.CommitTransactionReply.newBuilder().build();
+ }
}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
-
-public class CreateTransactionChain implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChain.class;
-
- @Override
- public Object toSerializable() {
- return ShardTransactionChainMessages.CreateTransactionChain.newBuilder().build();
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import akka.actor.ActorPath;
-import akka.actor.ActorSystem;
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
-
-public class CreateTransactionChainReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChainReply.class;
- private final ActorPath transactionChainPath;
-
- public CreateTransactionChainReply(ActorPath transactionChainPath) {
- this.transactionChainPath = transactionChainPath;
- }
-
- public ActorPath getTransactionChainPath() {
- return transactionChainPath;
- }
-
- @Override
- public ShardTransactionChainMessages.CreateTransactionChainReply toSerializable() {
- return ShardTransactionChainMessages.CreateTransactionChainReply.newBuilder()
- .setTransactionChainPath(transactionChainPath.toString()).build();
- }
-
- public static CreateTransactionChainReply fromSerializable(ActorSystem actorSystem,Object serializable){
- ShardTransactionChainMessages.CreateTransactionChainReply o = (ShardTransactionChainMessages.CreateTransactionChainReply) serializable;
- return new CreateTransactionChainReply(
- actorSystem.actorFor(o.getTransactionChainPath()).path());
- }
-
-}
package org.opendaylight.controller.cluster.datastore.messages;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
import org.opendaylight.controller.protobuff.messages.datachange.notification.DataChangeListenerMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
public class DataChanged implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS =
+ public static final Class<DataChangeListenerMessages.DataChanged> SERIALIZABLE_CLASS =
DataChangeListenerMessages.DataChanged.class;
+
final private SchemaContext schemaContext;
private final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>>
change;
NormalizedNode<?, ?> normalizedNode) {
return new NormalizedNodeToNodeCodec(schemaContext)
- .encode(YangInstanceIdentifier.builder().build(), normalizedNode)
+ .encode(normalizedNode)
.getNormalizedNode();
}
removedPathInstanceIds.add(InstanceIdentifierUtils.toSerializable(id));
}
return new Iterable<NormalizedNodeMessages.InstanceIdentifier>() {
+ @Override
public Iterator<NormalizedNodeMessages.InstanceIdentifier> iterator() {
return removedPathInstanceIds.iterator();
}
builder.setInstanceIdentifierPath(instanceIdentifier)
.setNormalizedNode(normalizedNodeToNodeCodec
- .encode(entry.getKey(), entry.getValue())
+ .encode(entry.getValue())
.getNormalizedNode());
nodeMapBuilder.addMapEntries(builder.build());
}
static class DataChangedEvent implements
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- private final SchemaContext schemaContext;
private Map<YangInstanceIdentifier, NormalizedNode<?, ?>> createdData;
private final NormalizedNodeToNodeCodec nodeCodec;
private Map<YangInstanceIdentifier, NormalizedNode<?, ?>> updatedData;
private Set<YangInstanceIdentifier> removedPathIds;
DataChangedEvent(SchemaContext schemaContext) {
- this.schemaContext = schemaContext;
nodeCodec = new NormalizedNodeToNodeCodec(schemaContext);
}
YangInstanceIdentifier id = InstanceIdentifierUtils
.fromSerializable(nodeMapEntry.getInstanceIdentifierPath());
mapEntries.put(id,
- nodeCodec.decode(id, nodeMapEntry.getNormalizedNode()));
+ nodeCodec.decode(nodeMapEntry.getNormalizedNode()));
}
return mapEntries;
}
DataChangedEvent setOriginalSubtree(NormalizedNodeMessages.Node node,
YangInstanceIdentifier instanceIdentifierPath) {
- originalSubTree = nodeCodec.decode(instanceIdentifierPath, node);
+ originalSubTree = nodeCodec.decode(node);
return this;
}
DataChangedEvent setUpdatedSubtree(NormalizedNodeMessages.Node node,
YangInstanceIdentifier instanceIdentifierPath) {
- updatedSubTree = nodeCodec.decode(instanceIdentifierPath, node);
+ updatedSubTree = nodeCodec.decode(node);
return this;
}
package org.opendaylight.controller.cluster.datastore.messages;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore.messages;
-
-import org.opendaylight.controller.cluster.datastore.modification.Modification;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-
-public class ForwardedCommitTransaction {
- private final DOMStoreThreePhaseCommitCohort cohort;
- private final Modification modification;
-
- public ForwardedCommitTransaction(DOMStoreThreePhaseCommitCohort cohort, Modification modification){
- this.cohort = cohort;
- this.modification = modification;
- }
-
- public DOMStoreThreePhaseCommitCohort getCohort() {
- return cohort;
- }
-
- public Modification getModification() {
- return modification;
- }
-}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+
+/**
+ * Transaction ReadyTransaction message that is forwarded to the local Shard from the ShardTransaction.
+ *
+ * @author Thomas Pantelis
+ */
+public class ForwardedReadyTransaction {
+ private final String transactionID;
+ private final DOMStoreThreePhaseCommitCohort cohort;
+ private final Modification modification;
+
+ public ForwardedReadyTransaction(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
+ Modification modification) {
+ this.transactionID = transactionID;
+ this.cohort = cohort;
+ this.modification = modification;
+ }
+
+ public String getTransactionID() {
+ return transactionID;
+ }
+
+ public DOMStoreThreePhaseCommitCohort getCohort() {
+ return cohort;
+ }
+
+ public Modification getModification() {
+ return modification;
+ }
+}
package org.opendaylight.controller.cluster.datastore.messages;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Decoded;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Encoded;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
public class MergeData extends ModifyData{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.MergeData.class;
+ public static final Class<ShardTransactionMessages.MergeData> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.MergeData.class;
public MergeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data,
SchemaContext context) {
super(path, data, context);
}
- @Override public Object toSerializable() {
-
- NormalizedNodeMessages.Node normalizedNode =
- new NormalizedNodeToNodeCodec(schemaContext).encode(path, data)
- .getNormalizedNode();
+ @Override
+ public Object toSerializable() {
+ Encoded encoded = new NormalizedNodeToNodeCodec(schemaContext).encode(path, data);
return ShardTransactionMessages.MergeData.newBuilder()
- .setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path))
- .setNormalizedNode(normalizedNode).build();
+ .setInstanceIdentifierPathArguments(encoded.getEncodedPath())
+ .setNormalizedNode(encoded.getEncodedNode().getNormalizedNode()).build();
}
public static MergeData fromSerializable(Object serializable, SchemaContext schemaContext){
ShardTransactionMessages.MergeData o = (ShardTransactionMessages.MergeData) serializable;
- YangInstanceIdentifier identifier = InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments());
-
- NormalizedNode<?, ?> normalizedNode =
- new NormalizedNodeToNodeCodec(schemaContext)
- .decode(identifier, o.getNormalizedNode());
-
- return new MergeData(identifier, normalizedNode, schemaContext);
+ Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(
+ o.getInstanceIdentifierPathArguments(), o.getNormalizedNode());
+ return new MergeData(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
}
}
package org.opendaylight.controller.cluster.datastore.messages;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-public class ReadDataReply implements SerializableMessage{
-
- private final NormalizedNode<?, ?> normalizedNode;
- private final SchemaContext schemaContext;
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.ReadDataReply.class;
- public ReadDataReply(SchemaContext context,NormalizedNode<?, ?> normalizedNode){
-
- this.normalizedNode = normalizedNode;
- this.schemaContext = context;
- }
-
- public NormalizedNode<?, ?> getNormalizedNode() {
- return normalizedNode;
- }
-
- public Object toSerializable(){
- if(normalizedNode != null) {
- return ShardTransactionMessages.ReadDataReply.newBuilder()
- .setNormalizedNode(new NormalizedNodeToNodeCodec(schemaContext)
- .encode(YangInstanceIdentifier.builder().build(), normalizedNode).getNormalizedNode()
- ).build();
- }else{
- return ShardTransactionMessages.ReadDataReply.newBuilder().build();
+public class ReadDataReply implements SerializableMessage {
+ public static final Class<ShardTransactionMessages.ReadDataReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.ReadDataReply.class;
+ private final NormalizedNode<?, ?> normalizedNode;
+ private final SchemaContext schemaContext;
+
+ public ReadDataReply(SchemaContext context,NormalizedNode<?, ?> normalizedNode){
+
+ this.normalizedNode = normalizedNode;
+ this.schemaContext = context;
+ }
+
+ public NormalizedNode<?, ?> getNormalizedNode() {
+ return normalizedNode;
}
- }
+ @Override
+ public Object toSerializable(){
+ if(normalizedNode != null) {
+ return ShardTransactionMessages.ReadDataReply.newBuilder()
+ .setNormalizedNode(new NormalizedNodeToNodeCodec(schemaContext)
+ .encode(normalizedNode).getNormalizedNode()).build();
+ } else {
+ return ShardTransactionMessages.ReadDataReply.newBuilder().build();
- public static ReadDataReply fromSerializable(SchemaContext schemaContext,YangInstanceIdentifier id,Object serializable){
- ShardTransactionMessages.ReadDataReply o = (ShardTransactionMessages.ReadDataReply) serializable;
- return new ReadDataReply(schemaContext,new NormalizedNodeToNodeCodec(schemaContext).decode(id, o.getNormalizedNode()));
- }
+ }
+ }
+
+ public static ReadDataReply fromSerializable(SchemaContext schemaContext,
+ YangInstanceIdentifier id, Object serializable) {
+ ShardTransactionMessages.ReadDataReply o = (ShardTransactionMessages.ReadDataReply) serializable;
+ return new ReadDataReply(schemaContext, new NormalizedNodeToNodeCodec(schemaContext).decode(
+ o.getNormalizedNode()));
+ }
- public static ByteString getNormalizedNodeByteString(Object serializable){
- ShardTransactionMessages.ReadDataReply o = (ShardTransactionMessages.ReadDataReply) serializable;
- return ((ShardTransactionMessages.ReadDataReply) serializable).getNormalizedNode().toByteString();
- }
+ public static ByteString getNormalizedNodeByteString(Object serializable){
+ ShardTransactionMessages.ReadDataReply o = (ShardTransactionMessages.ReadDataReply) serializable;
+ return ((ShardTransactionMessages.ReadDataReply) serializable).getNormalizedNode().toByteString();
+ }
}
package org.opendaylight.controller.cluster.datastore.messages;
-import akka.actor.ActorPath;
-import akka.actor.ActorSystem;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class ReadyTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.ReadyTransactionReply.class;
- private final ActorPath cohortPath;
+ public static final Class<ShardTransactionMessages.ReadyTransactionReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.ReadyTransactionReply.class;
- public ReadyTransactionReply(ActorPath cohortPath) {
+ private final String cohortPath;
- this.cohortPath = cohortPath;
- }
+ public ReadyTransactionReply(String cohortPath) {
- public ActorPath getCohortPath() {
- return cohortPath;
- }
+ this.cohortPath = cohortPath;
+ }
- @Override
- public ShardTransactionMessages.ReadyTransactionReply toSerializable() {
- return ShardTransactionMessages.ReadyTransactionReply.newBuilder()
- .setActorPath(cohortPath.toString()).build();
- }
+ public String getCohortPath() {
+ return cohortPath;
+ }
- public static ReadyTransactionReply fromSerializable(ActorSystem actorSystem,Object serializable){
- ShardTransactionMessages.ReadyTransactionReply o = (ShardTransactionMessages.ReadyTransactionReply) serializable;
- return new ReadyTransactionReply(
- actorSystem.actorFor(o.getActorPath()).path());
- }
+ @Override
+ public ShardTransactionMessages.ReadyTransactionReply toSerializable() {
+ return ShardTransactionMessages.ReadyTransactionReply.newBuilder().
+ setActorPath(cohortPath).build();
+ }
+
+ public static ReadyTransactionReply fromSerializable(Object serializable) {
+ ShardTransactionMessages.ReadyTransactionReply o =
+ (ShardTransactionMessages.ReadyTransactionReply) serializable;
+
+ return new ReadyTransactionReply(o.getActorPath());
+ }
}
import akka.actor.ActorPath;
import akka.actor.ActorSystem;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
import org.opendaylight.controller.protobuff.messages.registration.ListenerRegistrationMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public class RegisterChangeListener implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.RegisterChangeListener.class;
+ public static final Class<ListenerRegistrationMessages.RegisterChangeListener> SERIALIZABLE_CLASS =
+ ListenerRegistrationMessages.RegisterChangeListener.class;
+
private final YangInstanceIdentifier path;
private final ActorPath dataChangeListenerPath;
private final AsyncDataBroker.DataChangeScope scope;
package org.opendaylight.controller.cluster.datastore.messages;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Decoded;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Encoded;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-public class WriteData extends ModifyData{
+public class WriteData extends ModifyData {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.WriteData.class;
+ public static final Class<ShardTransactionMessages.WriteData> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.WriteData.class;
- public WriteData(YangInstanceIdentifier path, NormalizedNode<?, ?> data, SchemaContext schemaContext) {
- super(path, data, schemaContext);
- }
-
- @Override public Object toSerializable() {
+ public WriteData(YangInstanceIdentifier path, NormalizedNode<?, ?> data, SchemaContext schemaContext) {
+ super(path, data, schemaContext);
+ }
- NormalizedNodeMessages.Node normalizedNode =
- new NormalizedNodeToNodeCodec(schemaContext).encode(path, data)
- .getNormalizedNode();
+ @Override
+ public Object toSerializable() {
+ Encoded encoded = new NormalizedNodeToNodeCodec(schemaContext).encode(path, data);
return ShardTransactionMessages.WriteData.newBuilder()
- .setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path))
- .setNormalizedNode(normalizedNode).build();
-
+ .setInstanceIdentifierPathArguments(encoded.getEncodedPath())
+ .setNormalizedNode(encoded.getEncodedNode().getNormalizedNode()).build();
}
public static WriteData fromSerializable(Object serializable, SchemaContext schemaContext){
ShardTransactionMessages.WriteData o = (ShardTransactionMessages.WriteData) serializable;
- YangInstanceIdentifier identifier = InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments());
-
- NormalizedNode<?, ?> normalizedNode =
- new NormalizedNodeToNodeCodec(schemaContext)
- .decode(identifier, o.getNormalizedNode());
-
- return new WriteData(identifier, normalizedNode, schemaContext);
+ Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(
+ o.getInstanceIdentifierPathArguments(), o.getNormalizedNode());
+ return new WriteData(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
}
-
}
protected AbstractModification(YangInstanceIdentifier path) {
this.path = path;
}
+
+ public YangInstanceIdentifier getPath() {
+ return path;
+ }
}
package org.opendaylight.controller.cluster.datastore.modification;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
+import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
* DeleteModification store all the parameters required to delete a path from the data tree
*/
public class DeleteModification extends AbstractModification {
- public DeleteModification(YangInstanceIdentifier path) {
- super(path);
- }
+ private static final long serialVersionUID = 1L;
- @Override
- public void apply(DOMStoreWriteTransaction transaction) {
- transaction.delete(path);
- }
+ public DeleteModification(YangInstanceIdentifier path) {
+ super(path);
+ }
+
+ @Override
+ public void apply(DOMStoreWriteTransaction transaction) {
+ transaction.delete(path);
+ }
- @Override public Object toSerializable() {
- return PersistentMessages.Modification.newBuilder()
- .setType(this.getClass().toString())
- .setPath(InstanceIdentifierUtils.toSerializable(this.path))
- .build();
+ @Override
+ public Object toSerializable() {
+ return PersistentMessages.Modification.newBuilder().setType(this.getClass().toString())
+ .setPath(InstanceIdentifierUtils.toSerializable(this.path)).build();
}
- public static DeleteModification fromSerializable(Object serializable){
+ public static DeleteModification fromSerializable(Object serializable) {
PersistentMessages.Modification o = (PersistentMessages.Modification) serializable;
return new DeleteModification(InstanceIdentifierUtils.fromSerializable(o.getPath()));
}
package org.opendaylight.controller.cluster.datastore.modification;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Decoded;
import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
/**
* MergeModification stores all the parameters required to merge data into the specified path
*/
-public class MergeModification extends AbstractModification {
- private final NormalizedNode data;
- private final SchemaContext schemaContext;
-
+public class MergeModification extends WriteModification {
public MergeModification(YangInstanceIdentifier path, NormalizedNode data,
SchemaContext schemaContext) {
- super(path);
- this.data = data;
- this.schemaContext = schemaContext;
+ super(path, data, schemaContext);
}
@Override
transaction.merge(path, data);
}
- @Override public Object toSerializable() {
- NormalizedNodeMessages.Container encode =
- new NormalizedNodeToNodeCodec(schemaContext).encode(
- path, data);
-
- return PersistentMessages.Modification.newBuilder()
- .setType(this.getClass().toString())
- .setPath(InstanceIdentifierUtils.toSerializable(this.path))
- .setData(encode.getNormalizedNode())
- .build();
-
- }
-
- public static MergeModification fromSerializable(
- Object serializable,
- SchemaContext schemaContext) {
+ public static MergeModification fromSerializable(Object serializable, SchemaContext schemaContext) {
PersistentMessages.Modification o = (PersistentMessages.Modification) serializable;
-
- YangInstanceIdentifier path = InstanceIdentifierUtils.fromSerializable(o.getPath());
- NormalizedNode data = new NormalizedNodeToNodeCodec(schemaContext).decode(
- path, o.getData());
-
- return new MergeModification(path, data, schemaContext);
+ Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(o.getPath(), o.getData());
+ return new MergeModification(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
}
-
}
package org.opendaylight.controller.cluster.datastore.modification;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
-import org.opendaylight.controller.cluster.datastore.utils.InstanceIdentifierUtils;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Decoded;
+import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec.Encoded;
import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
*/
public class WriteModification extends AbstractModification {
- private final NormalizedNode data;
+ protected final NormalizedNode data;
private final SchemaContext schemaContext;
public WriteModification(YangInstanceIdentifier path, NormalizedNode data, SchemaContext schemaContext) {
- super(path);
- this.data = data;
+ super(path);
+ this.data = data;
this.schemaContext = schemaContext;
}
- @Override
- public void apply(DOMStoreWriteTransaction transaction) {
- transaction.write(path, data);
- }
+ @Override
+ public void apply(DOMStoreWriteTransaction transaction) {
+ transaction.write(path, data);
+ }
- @Override public Object toSerializable() {
- NormalizedNodeMessages.Container encode =
- new NormalizedNodeToNodeCodec(schemaContext).encode(
- path, data);
+ public NormalizedNode getData() {
+ return data;
+ }
+ @Override
+ public Object toSerializable() {
+ Encoded encoded = new NormalizedNodeToNodeCodec(schemaContext).encode(path, data);
return PersistentMessages.Modification.newBuilder()
- .setType(this.getClass().toString())
- .setPath(InstanceIdentifierUtils.toSerializable(this.path))
- .setData(encode.getNormalizedNode())
- .build();
-
+ .setType(this.getClass().toString())
+ .setPath(encoded.getEncodedPath())
+ .setData(encoded.getEncodedNode().getNormalizedNode())
+ .build();
}
- public static WriteModification fromSerializable(
- Object serializable,
- SchemaContext schemaContext) {
+ public static WriteModification fromSerializable(Object serializable, SchemaContext schemaContext) {
PersistentMessages.Modification o = (PersistentMessages.Modification) serializable;
-
- YangInstanceIdentifier path = InstanceIdentifierUtils.fromSerializable(o.getPath());
- NormalizedNode data = new NormalizedNodeToNodeCodec(schemaContext).decode(
- path, o.getData());
-
- return new WriteModification(path, data, schemaContext);
+ Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(o.getPath(), o.getData());
+ return new WriteModification(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
}
}
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
-import akka.pattern.Patterns;
import akka.util.Timeout;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.Configuration;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
+import org.opendaylight.controller.cluster.datastore.exceptions.NotInitializedException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import org.opendaylight.controller.cluster.datastore.messages.ActorNotInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
}
/**
- * Finds the primary for a given shard
+ * Finds the primary shard for the given shard name
*
* @param shardName
* @return
*/
- public ActorSelection findPrimary(String shardName) {
- String path = findPrimaryPath(shardName);
- return actorSystem.actorSelection(path);
+ public Optional<ActorSelection> findPrimaryShard(String shardName) {
+ String path = findPrimaryPathOrNull(shardName);
+ if (path == null){
+ return Optional.absent();
+ }
+ return Optional.of(actorSystem.actorSelection(path));
}
/**
* @return a reference to a local shard actor which represents the shard
* specified by the shardName
*/
- public ActorRef findLocalShard(String shardName) {
- Object result = executeLocalOperation(shardManager,
- new FindLocalShard(shardName));
+ public Optional<ActorRef> findLocalShard(String shardName) {
+ Object result = executeOperation(shardManager, new FindLocalShard(shardName));
if (result instanceof LocalShardFound) {
LocalShardFound found = (LocalShardFound) result;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Local shard found {}", found.getPath());
- }
- return found.getPath();
+ LOG.debug("Local shard found {}", found.getPath());
+ return Optional.of(found.getPath());
}
- return null;
+ return Optional.absent();
}
- public String findPrimaryPath(String shardName) {
- Object result = executeLocalOperation(shardManager,
- new FindPrimary(shardName).toSerializable());
+ private String findPrimaryPathOrNull(String shardName) {
+ Object result = executeOperation(shardManager, new FindPrimary(shardName).toSerializable());
if (result.getClass().equals(PrimaryFound.SERIALIZABLE_CLASS)) {
PrimaryFound found = PrimaryFound.fromSerializable(result);
- if(LOG.isDebugEnabled()) {
- LOG.debug("Primary found {}", found.getPrimaryPath());
- }
+ LOG.debug("Primary found {}", found.getPrimaryPath());
return found.getPrimaryPath();
+
+ } else if (result.getClass().equals(ActorNotInitialized.class)){
+ throw new NotInitializedException(
+ String.format("Found primary shard[%s] but its not initialized yet. Please try again later", shardName)
+ );
+
+ } else {
+ return null;
}
- throw new PrimaryNotFoundException("Could not find primary for shardName " + shardName);
}
* @param message
* @return The response of the operation
*/
- public Object executeLocalOperation(ActorRef actor, Object message) {
- Future<Object> future = ask(actor, message, operationTimeout);
+ public Object executeOperation(ActorRef actor, Object message) {
+ Future<Object> future = executeOperationAsync(actor, message, operationTimeout);
try {
return Await.result(future, operationDuration);
} catch (Exception e) {
- throw new TimeoutException("Sending message " + message.getClass().toString() + " to actor " + actor.toString() + " failed" , e);
+ throw new TimeoutException("Sending message " + message.getClass().toString() +
+ " to actor " + actor.toString() + " failed. Try again later.", e);
}
}
+ public Future<Object> executeOperationAsync(ActorRef actor, Object message, Timeout timeout) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
+ return ask(actor, message, timeout);
+ }
+
/**
* Execute an operation on a remote actor and wait for it's response
*
* @param message
* @return
*/
- public Object executeRemoteOperation(ActorSelection actor, Object message) {
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Sending remote message {} to {}", message.getClass().toString(),
- actor.toString());
- }
- Future<Object> future = ask(actor, message, operationTimeout);
+ public Object executeOperation(ActorSelection actor, Object message) {
+ Future<Object> future = executeOperationAsync(actor, message);
try {
return Await.result(future, operationDuration);
} catch (Exception e) {
throw new TimeoutException("Sending message " + message.getClass().toString() +
- " to actor " + actor.toString() + " failed" , e);
+ " to actor " + actor.toString() + " failed. Try again later.", e);
}
}
* @param message the message to send
* @return a Future containing the eventual result
*/
- public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message) {
+ public Future<Object> executeOperationAsync(ActorSelection actor, Object message) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
- if(LOG.isDebugEnabled()) {
- LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
- }
return ask(actor, message, operationTimeout);
}
* @param actor the ActorSelection
* @param message the message to send
*/
- public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
- actor.tell(message, ActorRef.noSender());
- }
-
- public void sendShardOperationAsync(String shardName, Object message) {
- ActorSelection primary = findPrimary(shardName);
-
- primary.tell(message, ActorRef.noSender());
- }
-
-
- /**
- * Execute an operation on the primary for a given shard
- * <p>
- * This method first finds the primary for a given shard ,then sends
- * the message to the remote shard and waits for a response
- * </p>
- *
- * @param shardName
- * @param message
- * @return
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException if the message to the remote shard times out
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException if the primary shard is not found
- */
- public Object executeShardOperation(String shardName, Object message) {
- ActorSelection primary = findPrimary(shardName);
-
- return executeRemoteOperation(primary, message);
- }
-
- /**
- * Execute an operation on the the local shard only
- * <p>
- * This method first finds the address of the local shard if any. It then
- * executes the operation on it.
- * </p>
- *
- * @param shardName the name of the shard on which the operation needs to be executed
- * @param message the message that needs to be sent to the shard
- * @return the message that was returned by the local actor on which the
- * the operation was executed. If a local shard was not found then
- * null is returned
- * @throws org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException
- * if the operation does not complete in a specified time duration
- */
- public Object executeLocalShardOperation(String shardName, Object message) {
- ActorRef local = findLocalShard(shardName);
-
- if(local != null) {
- return executeLocalOperation(local, message);
- }
-
- return null;
- }
+ public void sendOperationAsync(ActorSelection actor, Object message) {
+ Preconditions.checkArgument(actor != null, "actor must not be null");
+ Preconditions.checkArgument(message != null, "message must not be null");
+ LOG.debug("Sending message {} to {}", message.getClass().toString(), actor.toString());
- /**
- * Execute an operation on the the local shard only asynchronously
- *
- * <p>
- * This method first finds the address of the local shard if any. It then
- * executes the operation on it.
- * </p>
- *
- * @param shardName the name of the shard on which the operation needs to be executed
- * @param message the message that needs to be sent to the shard
- * @param timeout the amount of time that this method should wait for a response before timing out
- * @return null if the shard could not be located else a future on which the caller can wait
- *
- */
- public Future executeLocalShardOperationAsync(String shardName, Object message, Timeout timeout) {
- ActorRef local = findLocalShard(shardName);
- if(local == null){
- return null;
- }
- return Patterns.ask(local, message, timeout);
+ actor.tell(message, ActorRef.noSender());
}
-
-
public void shutdown() {
shardManager.tell(PoisonPill.getInstance(), null);
actorSystem.shutdown();
}
- /**
- * @deprecated Need to stop using this method. There are ways to send a
- * remote ActorRef as a string which should be used instead of this hack
- *
- * @param primaryPath
- * @param localPathOfRemoteActor
- * @return
- */
- @Deprecated
- public String resolvePath(final String primaryPath,
- final String localPathOfRemoteActor) {
- StringBuilder builder = new StringBuilder();
- String[] primaryPathElements = primaryPath.split("/");
- builder.append(primaryPathElements[0]).append("//")
- .append(primaryPathElements[1]).append(primaryPathElements[2]);
- String[] remotePathElements = localPathOfRemoteActor.split("/");
- for (int i = 3; i < remotePathElements.length; i++) {
- builder.append("/").append(remotePathElements[i]);
- }
-
- return builder.toString();
-
- }
-
- public ActorPath actorFor(String path){
- return actorSystem.actorFor(path).path();
- }
-
public String getCurrentMemberName(){
return clusterWrapper.getCurrentMemberName();
}
*/
public void broadcast(Object message){
for(String shardName : configuration.getAllShardNames()){
- try {
- sendShardOperationAsync(shardName, message);
- } catch(Exception e){
- LOG.warn("broadcast failed to send message " + message.getClass().getSimpleName() + " to shard " + shardName, e);
+
+ Optional<ActorSelection> primary = findPrimaryShard(shardName);
+ if (primary.isPresent()) {
+ primary.get().tell(message, ActorRef.noSender());
+ } else {
+ LOG.warn("broadcast failed to send message {} to shard {}. Primary not found",
+ message.getClass().getSimpleName(), shardName);
}
}
}
+++ /dev/null
-package org.opendaylight.controller.cluster.datastore.utils;
-
-import org.opendaylight.controller.cluster.datastore.node.utils.NodeIdentifierFactory;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * @author: syedbahm
- */
-public class InstanceIdentifierUtils {
-
- protected static final Logger logger = LoggerFactory
- .getLogger(InstanceIdentifierUtils.class);
-
- public static String getParentPath(String currentElementPath) {
-
- StringBuilder parentPath = new StringBuilder();
-
- if (currentElementPath != null) {
- String[] parentPaths = currentElementPath.split("/");
- if (parentPaths.length > 2) {
- for (int i = 0; i < parentPaths.length - 1; i++) {
- if (parentPaths[i].length() > 0) {
- parentPath.append( "/");
- parentPath.append( parentPaths[i]);
- }
- }
- }
- }
- return parentPath.toString();
- }
-
- @Deprecated
- public static YangInstanceIdentifier from(String path) {
- String[] ids = path.split("/");
-
- List<YangInstanceIdentifier.PathArgument> pathArguments =
- new ArrayList<>();
- for (String nodeId : ids) {
- if (!"".equals(nodeId)) {
- pathArguments
- .add(NodeIdentifierFactory.getArgument(nodeId));
- }
- }
- final YangInstanceIdentifier instanceIdentifier =
- YangInstanceIdentifier.create(pathArguments);
- return instanceIdentifier;
- }
-
- /**
- * @deprecated Use {@link org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils} instead
- * @param path
- * @return
- */
- @Deprecated
- public static NormalizedNodeMessages.InstanceIdentifier toSerializable(YangInstanceIdentifier path){
- return org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils.toSerializable(path);
- }
-
- /**
- * @deprecated Use {@link org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils} instead
- * @param path
- * @return
- */
- @Deprecated
- public static YangInstanceIdentifier fromSerializable(NormalizedNodeMessages.InstanceIdentifier path){
- return org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils.fromSerializable(path);
- }
-}
props = new ConfigProperties();
}
- DatastoreContext datastoreContext = new DatastoreContext("DistributedConfigDatastore",
- InMemoryDOMDataStoreConfigProperties.create(
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder()
+ .dataStoreMXBeanType("DistributedConfigDatastore")
+ .dataStoreProperties(InMemoryDOMDataStoreConfigProperties.create(
props.getMaxShardDataChangeExecutorPoolSize().getValue().intValue(),
props.getMaxShardDataChangeExecutorQueueSize().getValue().intValue(),
props.getMaxShardDataChangeListenerQueueSize().getValue().intValue(),
- props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()),
- Duration.create(props.getShardTransactionIdleTimeoutInMinutes().getValue(),
- TimeUnit.MINUTES),
- props.getOperationTimeoutInSeconds().getValue(),
- props.getShardJournalRecoveryLogBatchSize().getValue().intValue(),
- props.getShardSnapshotBatchCount().getValue().intValue(),
- props.getShardHearbeatIntervalInMillis().getValue());
+ props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()))
+ .shardTransactionIdleTimeout(Duration.create(
+ props.getShardTransactionIdleTimeoutInMinutes().getValue(), TimeUnit.MINUTES))
+ .operationTimeoutInSeconds(props.getOperationTimeoutInSeconds().getValue())
+ .shardJournalRecoveryLogBatchSize(props.getShardJournalRecoveryLogBatchSize().
+ getValue().intValue())
+ .shardSnapshotBatchCount(props.getShardSnapshotBatchCount().getValue().intValue())
+ .shardHeartbeatIntervalInMillis(props.getShardHearbeatIntervalInMillis().getValue())
+ .shardTransactionCommitTimeoutInSeconds(
+ props.getShardTransactionCommitTimeoutInSeconds().getValue().intValue())
+ .shardTransactionCommitQueueCapacity(
+ props.getShardTransactionCommitQueueCapacity().getValue().intValue())
+ .build();
return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(),
datastoreContext, bundleContext);
props = new OperationalProperties();
}
- DatastoreContext datastoreContext = new DatastoreContext("DistributedOperationalDatastore",
- InMemoryDOMDataStoreConfigProperties.create(
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder()
+ .dataStoreMXBeanType("DistributedOperationalDatastore")
+ .dataStoreProperties(InMemoryDOMDataStoreConfigProperties.create(
props.getMaxShardDataChangeExecutorPoolSize().getValue().intValue(),
props.getMaxShardDataChangeExecutorQueueSize().getValue().intValue(),
props.getMaxShardDataChangeListenerQueueSize().getValue().intValue(),
- props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()),
- Duration.create(props.getShardTransactionIdleTimeoutInMinutes().getValue(),
- TimeUnit.MINUTES),
- props.getOperationTimeoutInSeconds().getValue(),
- props.getShardJournalRecoveryLogBatchSize().getValue().intValue(),
- props.getShardSnapshotBatchCount().getValue().intValue(),
- props.getShardHearbeatIntervalInMillis().getValue());
+ props.getMaxShardDataStoreExecutorQueueSize().getValue().intValue()))
+ .shardTransactionIdleTimeout(Duration.create(
+ props.getShardTransactionIdleTimeoutInMinutes().getValue(), TimeUnit.MINUTES))
+ .operationTimeoutInSeconds(props.getOperationTimeoutInSeconds().getValue())
+ .shardJournalRecoveryLogBatchSize(props.getShardJournalRecoveryLogBatchSize().
+ getValue().intValue())
+ .shardSnapshotBatchCount(props.getShardSnapshotBatchCount().getValue().intValue())
+ .shardHeartbeatIntervalInMillis(props.getShardHearbeatIntervalInMillis().getValue())
+ .shardTransactionCommitTimeoutInSeconds(
+ props.getShardTransactionCommitTimeoutInSeconds().getValue().intValue())
+ .shardTransactionCommitQueueCapacity(
+ props.getShardTransactionCommitQueueCapacity().getValue().intValue())
+ .build();
return DistributedDataStoreFactory.createInstance("operational",
getOperationalSchemaServiceDependency(), datastoreContext, bundleContext);
leaf max-shard-data-change-listener-queue-size {
default 1000;
type non-zero-uint32-type;
- description "The maximum queue size for each shard's data store data change listeners.";
+ description "The maximum queue size for each shard's data store data change listener.";
}
leaf max-shard-data-store-executor-queue-size {
description "The maximum number of journal log entries to batch on recovery for a shard before committing to the data store.";
}
+ leaf shard-transaction-commit-timeout-in-seconds {
+ default 30;
+ type non-zero-uint32-type;
+ description "The maximum amount of time a shard transaction three-phase commit can be idle without receiving the next messages before it aborts the transaction";
+ }
+
+ leaf shard-transaction-commit-queue-capacity {
+ default 20000;
+ type non-zero-uint32-type;
+ description "The maximum allowed capacity for each shard's transaction commit queue.";
+ }
+
leaf enable-metric-capture {
default false;
type boolean;
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorPath;
-import akka.actor.ActorRef;
-import akka.actor.ActorSelection;
-import akka.actor.Props;
-import akka.event.Logging;
-import akka.testkit.JavaTestKit;
-import org.junit.Test;
-import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
-import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
-import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
-import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
-import org.opendaylight.controller.cluster.datastore.messages.WriteData;
-import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
-import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
-import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.FiniteDuration;
-
-import java.util.Collections;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
-
-public class BasicIntegrationTest extends AbstractActorTest {
-
- @Test
- public void integrationTest() throws Exception{
- // System.setProperty("shard.persistent", "true");
- // This test will
- // - create a Shard
- // - initiate a transaction
- // - write something
- // - read the transaction for commit
- // - commit the transaction
-
-
- new JavaTestKit(getSystem()) {{
- final ShardIdentifier identifier =
- ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config").build();
-
- final SchemaContext schemaContext = TestModel.createTestContext();
- DatastoreContext datastoreContext = new DatastoreContext();
-
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext());
- final ActorRef shard = getSystem().actorOf(props);
-
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- shard.tell(new UpdateSchemaContext(schemaContext), getRef());
-
-
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(shard.path().toString())
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
-
- assertEquals(true, result);
-
- // Create a transaction on the shard
- shard.tell(new CreateTransaction("txn-1", TransactionProxy.TransactionType.WRITE_ONLY.ordinal() ).toSerializable(), getRef());
-
- final ActorSelection transaction =
- new ExpectMsg<ActorSelection>(duration("3 seconds"), "CreateTransactionReply") {
- @Override
- protected ActorSelection match(Object in) {
- if (CreateTransactionReply.SERIALIZABLE_CLASS.equals(in.getClass())) {
- CreateTransactionReply reply = CreateTransactionReply.fromSerializable(in);
- return getSystem()
- .actorSelection(reply
- .getTransactionPath());
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertNotNull(transaction);
-
- System.out.println("Successfully created transaction");
-
- // 3. Write some data
- transaction.tell(new WriteData(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.TEST_QNAME), schemaContext).toSerializable(),
- getRef());
-
- Boolean writeDone = new ExpectMsg<Boolean>(duration("3 seconds"), "WriteDataReply") {
- @Override
- protected Boolean match(Object in) {
- if (in.getClass().equals(WriteDataReply.SERIALIZABLE_CLASS)) {
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(writeDone);
-
- System.out.println("Successfully wrote data");
-
- // 4. Ready the transaction for commit
-
- transaction.tell(new ReadyTransaction().toSerializable(), getRef());
-
- final ActorSelection cohort =
- new ExpectMsg<ActorSelection>(duration("3 seconds"), "ReadyTransactionReply") {
- @Override
- protected ActorSelection match(Object in) {
- if (in.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)) {
- ActorPath cohortPath =
- ReadyTransactionReply.fromSerializable(getSystem(),in)
- .getCohortPath();
- return getSystem()
- .actorSelection(cohortPath);
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertNotNull(cohort);
-
- System.out.println("Successfully readied the transaction");
-
- // 5. PreCommit the transaction
-
- cohort.tell(new PreCommitTransaction().toSerializable(), getRef());
-
- Boolean preCommitDone =
- new ExpectMsg<Boolean>(duration("3 seconds"), "PreCommitTransactionReply") {
- @Override
- protected Boolean match(Object in) {
- if (in.getClass().equals(PreCommitTransactionReply.SERIALIZABLE_CLASS)) {
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(preCommitDone);
-
- System.out.println("Successfully pre-committed the transaction");
-
- // 6. Commit the transaction
- cohort.tell(new CommitTransaction().toSerializable(), getRef());
-
- // FIXME : Add assertions that the commit worked and that the cohort and transaction actors were terminated
-
- System.out.println("TODO : Check Successfully committed the transaction");
- }
-
-
- };
- }
-
- private ActorRef watchActor(ActorSelection actor) {
- Future<ActorRef> future = actor
- .resolveOne(FiniteDuration.apply(100, "milliseconds"));
-
- try {
- ActorRef actorRef = Await.result(future,
- FiniteDuration.apply(100, "milliseconds"));
-
- watch(actorRef);
-
- return actorRef;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- }
- };
-
-
- }
-}
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)), new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
Assert.assertNotNull(messages);
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)),new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
assertNotNull(messages);
ActorContext
testContext = new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)),new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
assertNotNull(messages);
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
+import akka.actor.DeadLetter;
import akka.actor.Props;
import akka.testkit.JavaTestKit;
import org.junit.Test;
+import org.mockito.Mockito;
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.controller.cluster.datastore.messages.DataChangedReply;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
import org.opendaylight.controller.md.cluster.datastore.model.CompositeModel;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
public class DataChangeListenerTest extends AbstractActorTest {
- private static class MockDataChangedEvent implements AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> createdData = new HashMap<>();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> updatedData = new HashMap<>();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> originalData = new HashMap<>();
-
-
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getCreatedData() {
- createdData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return createdData;
- }
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getUpdatedData() {
- updatedData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return updatedData;
-
- }
-
- @Override
- public Set<YangInstanceIdentifier> getRemovedPaths() {
- Set<YangInstanceIdentifier>ids = new HashSet();
- ids.add( CompositeModel.TEST_PATH);
- return ids;
- }
-
- @Override
- public Map<YangInstanceIdentifier, NormalizedNode<?, ?>> getOriginalData() {
- originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- return originalData;
- }
-
- @Override public NormalizedNode<?, ?> getOriginalSubtree() {
-
-
- return originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- }
-
- @Override public NormalizedNode<?, ?> getUpdatedSubtree() {
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Test
+ public void testDataChangedWhenNotificationsAreEnabled(){
+ new JavaTestKit(getSystem()) {{
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
+ final ActorRef subject = getSystem().actorOf(props, "testDataChangedNotificationsEnabled");
- //fixme: need to have some valid data here
- return originalData.put(CompositeModel.FAMILY_PATH, CompositeModel.createFamily());
- }
- }
+ // Let the DataChangeListener know that notifications should be enabled
+ subject.tell(new EnableNotification(true), getRef());
- private class MockDataChangeListener implements AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- private boolean gotIt = false;
- private AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change;
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ getRef());
- @Override public void onDataChanged(
- AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- gotIt = true;this.change=change;
- }
+ expectMsgClass(DataChangedReply.class);
- public boolean gotIt() {
- return gotIt;
- }
- public AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> getChange(){
- return change;
- }
+ Mockito.verify(mockListener).onDataChanged(mockChangeEvent);
+ }};
}
+ @SuppressWarnings({ "rawtypes", "unchecked" })
@Test
- public void testDataChangedWhenNotificationsAreEnabled(){
+ public void testDataChangedWhenNotificationsAreDisabled(){
new JavaTestKit(getSystem()) {{
- final MockDataChangeListener listener = new MockDataChangeListener();
- final Props props = DataChangeListener.props(listener);
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
final ActorRef subject =
- getSystem().actorOf(props, "testDataChangedNotificationsEnabled");
+ getSystem().actorOf(props, "testDataChangedNotificationsDisabled");
+
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ getRef());
new Within(duration("1 seconds")) {
@Override
protected void run() {
-
- // Let the DataChangeListener know that notifications should
- // be enabled
- subject.tell(new EnableNotification(true), getRef());
-
- subject.tell(
- new DataChanged(CompositeModel.createTestContext(),new MockDataChangedEvent()),
- getRef());
-
- final Boolean out = new ExpectMsg<Boolean>(duration("800 millis"), "dataChanged") {
- // do not put code outside this method, will run afterwards
- @Override
- protected Boolean match(Object in) {
- if (in != null && in.getClass().equals(DataChangedReply.class)) {
-
- return true;
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
-
- assertTrue(out);
- assertTrue(listener.gotIt());
- assertNotNull(listener.getChange().getCreatedData());
-
expectNoMsg();
+
+ Mockito.verify(mockListener, Mockito.never()).onDataChanged(
+ Mockito.any(AsyncDataChangeEvent.class));
}
};
}};
}
+ @SuppressWarnings({ "rawtypes", "unchecked" })
@Test
- public void testDataChangedWhenNotificationsAreDisabled(){
+ public void testDataChangedWithNoSender(){
new JavaTestKit(getSystem()) {{
- final MockDataChangeListener listener = new MockDataChangeListener();
- final Props props = DataChangeListener.props(listener);
- final ActorRef subject =
- getSystem().actorOf(props, "testDataChangedNotificationsDisabled");
+ final AsyncDataChangeEvent mockChangeEvent = Mockito.mock(AsyncDataChangeEvent.class);
+ final AsyncDataChangeListener mockListener = Mockito.mock(AsyncDataChangeListener.class);
+ final Props props = DataChangeListener.props(mockListener);
+ final ActorRef subject = getSystem().actorOf(props, "testDataChangedWithNoSender");
+ // Let the DataChangeListener know that notifications should be enabled
+ subject.tell(new EnableNotification(true), ActorRef.noSender());
+
+ subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
+ ActorRef.noSender());
+
+ getSystem().eventStream().subscribe(getRef(), DeadLetter.class);
new Within(duration("1 seconds")) {
@Override
protected void run() {
-
- subject.tell(
- new DataChanged(CompositeModel.createTestContext(),new MockDataChangedEvent()),
- getRef());
-
expectNoMsg();
}
};
package org.opendaylight.controller.cluster.datastore;
+import akka.actor.ActorRef;
import akka.actor.ActorSystem;
-import akka.event.Logging;
-import akka.testkit.JavaTestKit;
-
+import akka.actor.PoisonPill;
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.ListenableFuture;
-
-import junit.framework.Assert;
-
-import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Before;
+import com.google.common.util.concurrent.Uninterruptibles;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.md.cluster.datastore.model.PeopleModel;
import org.opendaylight.controller.md.cluster.datastore.model.SchemaContextHelper;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.concurrent.ExecutionException;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
+public class DistributedDataStoreIntegrationTest extends AbstractActorTest {
-public class DistributedDataStoreIntegrationTest {
+ @Test
+ public void testWriteTransactionWithSingleShard() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionIntegrationTest", "test-1");
- private static ActorSystem system;
+ testWriteTransaction(dataStore, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- @Before
- public void setUp() throws IOException {
- File journal = new File("journal");
+ testWriteTransaction(dataStore, TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build());
- if(journal.exists()) {
- FileUtils.deleteDirectory(journal);
- }
+ cleanup(dataStore);
+ }};
+ }
+ @Test
+ public void testWriteTransactionWithMultipleShards() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("testWriteTransactionWithMultipleShards", "cars-1", "people-1");
- System.setProperty("shard.persistent", "false");
- system = ActorSystem.create("test");
- }
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- @After
- public void tearDown() {
- JavaTestKit.shutdownActorSystem(system);
- system = null;
- }
+ YangInstanceIdentifier nodePath1 = CarsModel.BASE_PATH;
+ NormalizedNode<?, ?> nodeToWrite1 = CarsModel.emptyContainer();
+ writeTx.write(nodePath1, nodeToWrite1);
- protected ActorSystem getSystem() {
- return system;
- }
+ YangInstanceIdentifier nodePath2 = PeopleModel.BASE_PATH;
+ NormalizedNode<?, ?> nodeToWrite2 = PeopleModel.emptyContainer();
+ writeTx.write(nodePath2, nodeToWrite2);
- @Test
- public void integrationTest() throws Exception {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
- ShardStrategyFactory.setConfiguration(configuration);
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- new JavaTestKit(getSystem()) {
- {
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration,
- new DatastoreContext());
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(nodePath1).get();
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite1, optional.get());
- distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
+ optional = readTx.read(nodePath2).get();
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite2, optional.get());
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from("akka://test/user/shardmanager-config/member-1-shard-test-1-config")
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
+ cleanup(dataStore);
+ }};
+ }
- assertEquals(true, result);
+ @Test
+ public void testReadWriteTransaction() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("testReadWriteTransaction", "test-1");
- DOMStoreReadWriteTransaction transaction =
- distributedDataStore.newReadWriteTransaction();
+ // 1. Create a read-write Tx
- transaction
- .write(TestModel.TEST_PATH, ImmutableNodes
- .containerNode(TestModel.TEST_QNAME));
+ DOMStoreReadWriteTransaction readWriteTx = dataStore.newReadWriteTransaction();
+ assertNotNull("newReadWriteTransaction returned null", readWriteTx);
- ListenableFuture<Optional<NormalizedNode<?, ?>>>
- future =
- transaction.read(TestModel.TEST_PATH);
+ // 2. Write some data
- Optional<NormalizedNode<?, ?>> optional =
- future.get();
+ YangInstanceIdentifier nodePath = TestModel.TEST_PATH;
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ readWriteTx.write(nodePath, nodeToWrite );
- Assert.assertTrue("Node not found", optional.isPresent());
+ // 3. Read the data from Tx
- NormalizedNode<?, ?> normalizedNode =
- optional.get();
+ Boolean exists = readWriteTx.exists(nodePath).checkedGet(5, TimeUnit.SECONDS);
+ assertEquals("exists", true, exists);
- assertEquals(TestModel.TEST_QNAME,
- normalizedNode.getNodeType());
+ Optional<NormalizedNode<?, ?>> optional = readWriteTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
- DOMStoreThreePhaseCommitCohort ready =
- transaction.ready();
+ // 4. Ready the Tx for commit
- ListenableFuture<Boolean> canCommit =
- ready.canCommit();
+ DOMStoreThreePhaseCommitCohort cohort = readWriteTx.ready();
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 5. Commit the Tx
- ListenableFuture<Void> preCommit =
- ready.preCommit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- preCommit.get(5, TimeUnit.SECONDS);
+ // 6. Verify the data in the store
- ListenableFuture<Void> commit = ready.commit();
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
- commit.get(5, TimeUnit.SECONDS);
- } catch (ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ optional = readTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
+ cleanup(dataStore);
+ }};
}
@Test
- public void transactionChainIntegrationTest() throws Exception {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
- ShardStrategyFactory.setConfiguration(configuration);
+ public void testTransactionAbort() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionAbortIntegrationTest", "test-1");
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
+ writeTx.write(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- new JavaTestKit(getSystem()) {
- {
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration,
- new DatastoreContext());
+ cohort.canCommit().get(5, TimeUnit.SECONDS);
- distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
+ cohort.abort().get(5, TimeUnit.SECONDS);
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from("akka://test/user/shardmanager-config/member-1-shard-test-1-config")
- .message("Switching from state Candidate to Leader")
- .occurrences(1).exec();
+ testWriteTransaction(dataStore, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME));
- assertEquals(true, result);
-
- DOMStoreTransactionChain transactionChain =
- distributedDataStore.createTransactionChain();
-
- DOMStoreReadWriteTransaction transaction =
- transactionChain.newReadWriteTransaction();
+ cleanup(dataStore);
+ }};
+ }
- transaction
- .write(TestModel.TEST_PATH, ImmutableNodes
- .containerNode(TestModel.TEST_QNAME));
+ @Test
+ public void testTransactionChain() throws Exception{
+ System.setProperty("shard.persistent", "true");
+ new IntegrationTestKit(getSystem()) {{
+ DistributedDataStore dataStore =
+ setupDistributedDataStore("transactionChainIntegrationTest", "test-1");
- ListenableFuture<Optional<NormalizedNode<?, ?>>>
- future =
- transaction.read(TestModel.TEST_PATH);
+ // 1. Create a Tx chain and write-only Tx
- Optional<NormalizedNode<?, ?>> optional =
- future.get();
+ DOMStoreTransactionChain txChain = dataStore.createTransactionChain();
- Assert.assertTrue("Node not found", optional.isPresent());
+ DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- NormalizedNode<?, ?> normalizedNode =
- optional.get();
+ // 2. Write some data
- assertEquals(TestModel.TEST_QNAME,
- normalizedNode.getNodeType());
+ NormalizedNode<?, ?> containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ writeTx.write(TestModel.TEST_PATH, containerNode);
- DOMStoreThreePhaseCommitCohort ready =
- transaction.ready();
+ // 3. Ready the Tx for commit
- ListenableFuture<Boolean> canCommit =
- ready.canCommit();
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 4. Commit the Tx
- ListenableFuture<Void> preCommit =
- ready.preCommit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- preCommit.get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- ListenableFuture<Void> commit = ready.commit();
+ DOMStoreReadTransaction readTx = txChain.newReadOnlyTransaction();
- commit.get(5, TimeUnit.SECONDS);
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", containerNode, optional.get());
- transactionChain.close();
- } catch (ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ txChain.close();
+ cleanup(dataStore);
+ }};
}
+ class IntegrationTestKit extends ShardTestKit {
- //FIXME : Disabling test because it's flaky
- //@Test
- public void integrationTestWithMultiShardConfiguration()
- throws ExecutionException, InterruptedException, TimeoutException {
- final Configuration configuration = new ConfigurationImpl("module-shards.conf", "modules.conf");
-
- ShardStrategyFactory.setConfiguration(configuration);
-
- new JavaTestKit(getSystem()) {
- {
+ IntegrationTestKit(ActorSystem actorSystem) {
+ super(actorSystem);
+ }
- new Within(duration("10 seconds")) {
- @Override
- protected void run() {
- try {
- final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration, null);
+ DistributedDataStore setupDistributedDataStore(String typeName, String... shardNames) {
+ MockClusterWrapper cluster = new MockClusterWrapper();
+ Configuration config = new ConfigurationImpl("module-shards.conf", "modules.conf");
+ ShardStrategyFactory.setConfiguration(config);
+
+ DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
+ DistributedDataStore dataStore = new DistributedDataStore(getSystem(), typeName, cluster,
+ config, datastoreContext);
+
+ SchemaContext schemaContext = SchemaContextHelper.full();
+ dataStore.onGlobalContextUpdated(schemaContext);
+
+ for(String shardName: shardNames) {
+ ActorRef shard = null;
+ for(int i = 0; i < 20 * 5 && shard == null; i++) {
+ Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
+ Optional<ActorRef> shardReply = dataStore.getActorContext().findLocalShard(shardName);
+ if(shardReply.isPresent()) {
+ shard = shardReply.get();
+ }
+ }
- distributedDataStore.onGlobalContextUpdated(
- SchemaContextHelper.full());
+ assertNotNull("Shard was not created", shard);
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(
- Logging.Info.class
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(
- "akka://test/user/shardmanager-config/member-1-shard-cars-1-config")
- .message(
- "Switching from state Candidate to Leader")
- .occurrences(1)
- .exec();
+ System.out.println("!!!!!!shard: "+shard.path().toString());
+ waitUntilLeader(shard);
+ }
- Thread.sleep(1000);
+ return dataStore;
+ }
+ void testWriteTransaction(DistributedDataStore dataStore, YangInstanceIdentifier nodePath,
+ NormalizedNode<?, ?> nodeToWrite) throws Exception {
- DOMStoreReadWriteTransaction transaction =
- distributedDataStore.newReadWriteTransaction();
+ // 1. Create a write-only Tx
- transaction.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
- transaction.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
+ DOMStoreWriteTransaction writeTx = dataStore.newWriteOnlyTransaction();
+ assertNotNull("newWriteOnlyTransaction returned null", writeTx);
- DOMStoreThreePhaseCommitCohort ready = transaction.ready();
+ // 2. Write some data
- ListenableFuture<Boolean> canCommit = ready.canCommit();
+ writeTx.write(nodePath, nodeToWrite);
- assertTrue(canCommit.get(5, TimeUnit.SECONDS));
+ // 3. Ready the Tx for commit
- ListenableFuture<Void> preCommit = ready.preCommit();
+ DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- preCommit.get(5, TimeUnit.SECONDS);
+ // 4. Commit the Tx
- ListenableFuture<Void> commit = ready.commit();
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
- commit.get(5, TimeUnit.SECONDS);
+ // 5. Verify the data in the store
- assertEquals(true, result);
- } catch(ExecutionException | TimeoutException | InterruptedException e){
- fail(e.getMessage());
- }
- }
- };
- }
- };
+ DOMStoreReadTransaction readTx = dataStore.newReadOnlyTransaction();
+ Optional<NormalizedNode<?, ?>> optional = readTx.read(nodePath).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", nodeToWrite, optional.get());
+ }
+ void cleanup(DistributedDataStore dataStore) {
+ dataStore.getActorContext().getShardManager().tell(PoisonPill.getInstance(), null);
+ }
}
}
import akka.dispatch.ExecutionContexts;
import akka.dispatch.Futures;
import akka.util.Timeout;
+import com.google.common.base.Optional;
import com.google.common.util.concurrent.MoreExecutors;
import org.junit.After;
import org.junit.Before;
import scala.concurrent.ExecutionContextExecutor;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-
import java.util.concurrent.TimeUnit;
-
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertNull;
import static org.junit.Assert.assertNotNull;
new DistributedDataStore(actorSystem, "config",
mock(ClusterWrapper.class), mock(Configuration.class),
- new DatastoreContext());
+ DatastoreContext.newBuilder().build());
verify(actorSystem).actorOf(any(Props.class), eq("shardmanager-config"));
}
ListenerRegistration registration =
distributedDataStore.registerChangeListener(TestModel.TEST_PATH, new AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>() {
- @Override
- public void onDataChanged(AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
- throw new UnsupportedOperationException("onDataChanged");
- }
- }, AsyncDataBroker.DataChangeScope.BASE);
+ @Override
+ public void onDataChanged(AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
+ throw new UnsupportedOperationException("onDataChanged");
+ }
+ }, AsyncDataBroker.DataChangeScope.BASE);
// Since we do not expect the shard to be local registration will return a NoOpRegistration
assertTrue(registration instanceof NoOpDataChangeListenerRegistration);
Future future = mock(Future.class);
when(actorContext.getOperationDuration()).thenReturn(FiniteDuration.apply(5, TimeUnit.SECONDS));
when(actorContext.getActorSystem()).thenReturn(getSystem());
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(future);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(future);
ListenerRegistration registration =
distributedDataStore.registerChangeListener(TestModel.TEST_PATH,
when(actorSystem.dispatcher()).thenReturn(executor);
when(actorSystem.actorOf(any(Props.class))).thenReturn(doNothingActorRef);
when(actorContext.getActorSystem()).thenReturn(actorSystem);
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(f);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(f);
when(actorContext.actorSelection(any(ActorPath.class))).thenReturn(actorSelection);
ListenerRegistration registration =
when(actorSystem.dispatcher()).thenReturn(executor);
when(actorSystem.actorOf(any(Props.class))).thenReturn(doNothingActorRef);
when(actorContext.getActorSystem()).thenReturn(actorSystem);
+ when(actorContext.findLocalShard(anyString())).thenReturn(Optional.of(doNothingActorRef));
when(actorContext
- .executeLocalShardOperationAsync(anyString(), anyObject(), any(Timeout.class))).thenReturn(f);
+ .executeOperationAsync(eq(doNothingActorRef), anyObject(), any(Timeout.class))).thenReturn(f);
when(actorContext.actorSelection(any(ActorPath.class))).thenReturn(actorSelection);
ListenerRegistration registration =
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
import akka.actor.Props;
-import akka.dispatch.Futures;
-import akka.japi.Procedure;
-import akka.persistence.PersistentConfirmation;
-import akka.persistence.PersistentId;
-import akka.persistence.PersistentImpl;
-import akka.persistence.PersistentRepr;
-import akka.persistence.journal.japi.AsyncWriteJournal;
+import akka.persistence.RecoveryCompleted;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
+import akka.japi.Creator;
import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigValueFactory;
-import org.junit.AfterClass;
+import org.junit.After;
import org.junit.Before;
-import org.junit.BeforeClass;
import org.junit.Test;
+import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.ActorInitialized;
+import org.opendaylight.controller.cluster.datastore.messages.ActorNotInitialized;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryNotFound;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
+import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
+import org.opendaylight.controller.cluster.datastore.utils.InMemoryJournal;
import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.yangtools.yang.model.api.ModuleIdentifier;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import scala.concurrent.Future;
-
import java.net.URI;
import java.util.Collection;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-
-import static junit.framework.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-public class ShardManagerTest {
- private static ActorSystem system;
+public class ShardManagerTest extends AbstractActorTest {
+ private static int ID_COUNTER = 1;
- @BeforeClass
- public static void setUpClass() {
- Map<String, String> myJournal = new HashMap<>();
- myJournal.put("class", "org.opendaylight.controller.cluster.datastore.ShardManagerTest$MyJournal");
- myJournal.put("plugin-dispatcher", "akka.actor.default-dispatcher");
- Config config = ConfigFactory.load()
- .withValue("akka.persistence.journal.plugin",
- ConfigValueFactory.fromAnyRef("my-journal"))
- .withValue("my-journal", ConfigValueFactory.fromMap(myJournal));
+ private final String shardMrgIDSuffix = "config" + ID_COUNTER++;
+ private final String shardMgrID = "shard-manager-" + shardMrgIDSuffix;
- MyJournal.clear();
+ private static ActorRef mockShardActor;
+
+ @Before
+ public void setUp() {
+ InMemoryJournal.clear();
- system = ActorSystem.create("test", config);
+ if(mockShardActor == null) {
+ String name = new ShardIdentifier(Shard.DEFAULT_NAME, "member-1","config").toString();
+ mockShardActor = getSystem().actorOf(Props.create(DoNothingActor.class), name);
+ }
}
- @AfterClass
- public static void tearDown() {
- JavaTestKit.shutdownActorSystem(system);
- system = null;
+ @After
+ public void tearDown() {
+ InMemoryJournal.clear();
}
- @Before
- public void setUpTest(){
- MyJournal.clear();
+ private Props newShardMgrProps() {
+ return ShardManager.props(shardMrgIDSuffix, new MockClusterWrapper(), new MockConfiguration(),
+ DatastoreContext.newBuilder().build());
}
@Test
public void testOnReceiveFindPrimaryForNonExistentShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- new JavaTestKit(system) {
- {
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
- final ActorRef subject = getSystem().actorOf(props);
+ shardManager.tell(new FindPrimary("non-existent").toSerializable(), getRef());
- subject.tell(new FindPrimary("inventory").toSerializable(), getRef());
-
- expectMsgEquals(duration("2 seconds"),
- new PrimaryNotFound("inventory").toSerializable());
- }};
+ expectMsgEquals(duration("5 seconds"),
+ new PrimaryNotFound("non-existent").toSerializable());
+ }};
}
@Test
public void testOnReceiveFindPrimaryForExistentShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ shardManager.tell(new ActorInitialized(), mockShardActor);
- final ActorRef subject = getSystem().actorOf(props);
+ shardManager.tell(new FindPrimary(Shard.DEFAULT_NAME).toSerializable(), getRef());
- subject.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ expectMsgClass(duration("5 seconds"), PrimaryFound.SERIALIZABLE_CLASS);
+ }};
+ }
+
+ @Test
+ public void testOnReceiveFindPrimaryForNotInitialzedShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- subject.tell(new FindPrimary(Shard.DEFAULT_NAME).toSerializable(), getRef());
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
- expectMsgClass(duration("1 seconds"), PrimaryFound.SERIALIZABLE_CLASS);
+ shardManager.tell(new FindPrimary(Shard.DEFAULT_NAME).toSerializable(), getRef());
+
+ expectMsgClass(duration("5 seconds"), ActorNotInitialized.class);
}};
}
@Test
public void testOnReceiveFindLocalShardForNonExistentShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
-
- final ActorRef subject = getSystem().actorOf(props);
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
- subject.tell(new FindLocalShard("inventory"), getRef());
+ shardManager.tell(new FindLocalShard("non-existent"), getRef());
- final String out = new ExpectMsg<String>(duration("3 seconds"), "find local") {
- @Override
- protected String match(Object in) {
- if (in instanceof LocalShardNotFound) {
- return ((LocalShardNotFound) in).getShardName();
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
+ LocalShardNotFound notFound = expectMsgClass(duration("5 seconds"), LocalShardNotFound.class);
- assertEquals("inventory", out);
+ assertEquals("getShardName", "non-existent", notFound.getShardName());
}};
}
@Test
public void testOnReceiveFindLocalShardForExistentShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- final MockClusterWrapper mockClusterWrapper = new MockClusterWrapper();
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ shardManager.tell(new ActorInitialized(), mockShardActor);
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", mockClusterWrapper,
- new MockConfiguration(), new DatastoreContext());
+ shardManager.tell(new FindLocalShard(Shard.DEFAULT_NAME), getRef());
- final ActorRef subject = getSystem().actorOf(props);
+ LocalShardFound found = expectMsgClass(duration("5 seconds"), LocalShardFound.class);
- subject.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ assertTrue("Found path contains " + found.getPath().path().toString(),
+ found.getPath().path().toString().contains("member-1-shard-default-config"));
+ }};
+ }
+
+ @Test
+ public void testOnReceiveFindLocalShardForNotInitializedShard() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- subject.tell(new FindLocalShard(Shard.DEFAULT_NAME), getRef());
+ shardManager.tell(new UpdateSchemaContext(TestModel.createTestContext()), getRef());
+ //shardManager.tell(new ActorInitialized(), mockShardActor);
- final ActorRef out = new ExpectMsg<ActorRef>(duration("3 seconds"), "find local") {
- @Override
- protected ActorRef match(Object in) {
- if (in instanceof LocalShardFound) {
- return ((LocalShardFound) in).getPath();
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
+ shardManager.tell(new FindLocalShard(Shard.DEFAULT_NAME), getRef());
- assertTrue(out.path().toString(),
- out.path().toString().contains("member-1-shard-default-config"));
+ expectMsgClass(duration("5 seconds"), ActorNotInitialized.class);
}};
}
@Test
public void testOnReceiveMemberUp() throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ MockClusterWrapper.sendMemberUp(shardManager, "member-2", getRef().path().toString());
- final ActorRef subject = getSystem().actorOf(props);
-
- MockClusterWrapper.sendMemberUp(subject, "member-2", getRef().path().toString());
-
- subject.tell(new FindPrimary("astronauts").toSerializable(), getRef());
-
- final String out = new ExpectMsg<String>(duration("3 seconds"), "primary found") {
- // do not put code outside this method, will run afterwards
- @Override
- protected String match(Object in) {
- if (in.getClass().equals(PrimaryFound.SERIALIZABLE_CLASS)) {
- PrimaryFound f = PrimaryFound.fromSerializable(in);
- return f.getPrimaryPath();
- } else {
- throw noMatch();
- }
- }
- }.get(); // this extracts the received message
+ shardManager.tell(new FindPrimary("astronauts").toSerializable(), getRef());
- assertTrue(out, out.contains("member-2-shard-astronauts-config"));
+ PrimaryFound found = PrimaryFound.fromSerializable(expectMsgClass(duration("5 seconds"),
+ PrimaryFound.SERIALIZABLE_CLASS));
+ String path = found.getPrimaryPath();
+ assertTrue("Found path contains " + path, path.contains("member-2-shard-astronauts-config"));
}};
}
@Test
public void testOnReceiveMemberDown() throws Exception {
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
+ new JavaTestKit(getSystem()) {{
+ final ActorRef shardManager = getSystem().actorOf(newShardMgrProps());
- final ActorRef subject = getSystem().actorOf(props);
+ MockClusterWrapper.sendMemberUp(shardManager, "member-2", getRef().path().toString());
- MockClusterWrapper.sendMemberUp(subject, "member-2", getRef().path().toString());
+ shardManager.tell(new FindPrimary("astronauts").toSerializable(), getRef());
- subject.tell(new FindPrimary("astronauts").toSerializable(), getRef());
+ expectMsgClass(duration("5 seconds"), PrimaryFound.SERIALIZABLE_CLASS);
- expectMsgClass(duration("3 seconds"), PrimaryFound.SERIALIZABLE_CLASS);
+ MockClusterWrapper.sendMemberRemoved(shardManager, "member-2", getRef().path().toString());
- MockClusterWrapper.sendMemberRemoved(subject, "member-2", getRef().path().toString());
+ shardManager.tell(new FindPrimary("astronauts").toSerializable(), getRef());
- subject.tell(new FindPrimary("astronauts").toSerializable(), getRef());
-
- expectMsgClass(duration("1 seconds"), PrimaryNotFound.SERIALIZABLE_CLASS);
+ expectMsgClass(duration("5 seconds"), PrimaryNotFound.SERIALIZABLE_CLASS);
}};
}
@Test
- public void testOnRecoveryJournalIsEmptied(){
- MyJournal.addToJournal(1L, new ShardManager.SchemaContextModules(
- ImmutableSet.of("foo")));
-
- assertEquals(1, MyJournal.get().size());
-
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
-
- final ActorRef subject = getSystem().actorOf(props);
-
- // Send message to check that ShardManager is ready
- subject.tell(new FindPrimary("unknown").toSerializable(), getRef());
-
- expectMsgClass(duration("3 seconds"), PrimaryNotFound.SERIALIZABLE_CLASS);
-
- assertEquals(0, MyJournal.get().size());
+ public void testOnRecoveryJournalIsCleaned() {
+ InMemoryJournal.addEntry(shardMgrID, 1L, new ShardManager.SchemaContextModules(
+ ImmutableSet.of("foo")));
+ InMemoryJournal.addEntry(shardMgrID, 2L, new ShardManager.SchemaContextModules(
+ ImmutableSet.of("bar")));
+ InMemoryJournal.addDeleteMessagesCompleteLatch(shardMgrID);
+
+ new JavaTestKit(getSystem()) {{
+ TestActorRef<TestShardManager> shardManager = TestActorRef.create(getSystem(),
+ Props.create(new TestShardManagerCreator(shardMrgIDSuffix)));
+
+ shardManager.underlyingActor().waitForRecoveryComplete();
+ InMemoryJournal.waitForDeleteMessagesComplete(shardMgrID);
+
+ // Journal entries up to the last one should've been deleted
+ Map<Long, Object> journal = InMemoryJournal.get(shardMgrID);
+ synchronized (journal) {
+ assertEquals("Journal size", 1, journal.size());
+ assertEquals("Journal entry seq #", Long.valueOf(2), journal.keySet().iterator().next());
+ }
}};
}
@Test
public void testOnRecoveryPreviouslyKnownModulesAreDiscovered() throws Exception {
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
- final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
+ final ImmutableSet<String> persistedModules = ImmutableSet.of("foo", "bar");
+ InMemoryJournal.addEntry(shardMgrID, 1L, new ShardManager.SchemaContextModules(
+ persistedModules));
+ new JavaTestKit(getSystem()) {{
+ TestActorRef<TestShardManager> shardManager = TestActorRef.create(getSystem(),
+ Props.create(new TestShardManagerCreator(shardMrgIDSuffix)));
- subject.underlyingActor().onReceiveRecover(new ShardManager.SchemaContextModules(ImmutableSet.of("foo")));
+ shardManager.underlyingActor().waitForRecoveryComplete();
- Collection<String> knownModules = subject.underlyingActor().getKnownModules();
+ Collection<String> knownModules = shardManager.underlyingActor().getKnownModules();
- assertTrue(knownModules.contains("foo"));
+ assertEquals("getKnownModules", persistedModules, Sets.newHashSet(knownModules));
}};
}
@Test
public void testOnUpdateSchemaContextUpdateKnownModulesIfTheyContainASuperSetOfTheKnownModules()
- throws Exception {
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
- final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
-
- Collection<String> knownModules = subject.underlyingActor().getKnownModules();
+ throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final TestActorRef<ShardManager> shardManager =
+ TestActorRef.create(getSystem(), newShardMgrProps());
- assertEquals(0, knownModules.size());
-
- SchemaContext schemaContext = mock(SchemaContext.class);
- Set<ModuleIdentifier> moduleIdentifierSet = new HashSet<>();
+ assertEquals("getKnownModules size", 0, shardManager.underlyingActor().getKnownModules().size());
ModuleIdentifier foo = mock(ModuleIdentifier.class);
when(foo.getNamespace()).thenReturn(new URI("foo"));
+ Set<ModuleIdentifier> moduleIdentifierSet = new HashSet<>();
moduleIdentifierSet.add(foo);
+ SchemaContext schemaContext = mock(SchemaContext.class);
when(schemaContext.getAllModuleIdentifiers()).thenReturn(moduleIdentifierSet);
- subject.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
-
- assertTrue(knownModules.contains("foo"));
+ shardManager.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
- assertEquals(1, knownModules.size());
+ assertEquals("getKnownModules", Sets.newHashSet("foo"),
+ Sets.newHashSet(shardManager.underlyingActor().getKnownModules()));
ModuleIdentifier bar = mock(ModuleIdentifier.class);
when(bar.getNamespace()).thenReturn(new URI("bar"));
moduleIdentifierSet.add(bar);
- subject.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
-
- assertTrue(knownModules.contains("bar"));
-
- assertEquals(2, knownModules.size());
+ shardManager.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
+ assertEquals("getKnownModules", Sets.newHashSet("foo", "bar"),
+ Sets.newHashSet(shardManager.underlyingActor().getKnownModules()));
}};
-
}
-
@Test
public void testOnUpdateSchemaContextDoNotUpdateKnownModulesIfTheyDoNotContainASuperSetOfKnownModules()
- throws Exception {
- new JavaTestKit(system) {{
- final Props props = ShardManager
- .props("config", new MockClusterWrapper(),
- new MockConfiguration(), new DatastoreContext());
- final TestActorRef<ShardManager> subject =
- TestActorRef.create(system, props);
-
- Collection<String> knownModules = subject.underlyingActor().getKnownModules();
-
- assertEquals(0, knownModules.size());
+ throws Exception {
+ new JavaTestKit(getSystem()) {{
+ final TestActorRef<ShardManager> shardManager =
+ TestActorRef.create(getSystem(), newShardMgrProps());
SchemaContext schemaContext = mock(SchemaContext.class);
Set<ModuleIdentifier> moduleIdentifierSet = new HashSet<>();
when(schemaContext.getAllModuleIdentifiers()).thenReturn(moduleIdentifierSet);
- subject.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
-
- assertTrue(knownModules.contains("foo"));
+ shardManager.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
- assertEquals(1, knownModules.size());
+ assertEquals("getKnownModules", Sets.newHashSet("foo"),
+ Sets.newHashSet(shardManager.underlyingActor().getKnownModules()));
//Create a completely different SchemaContext with only the bar module in it
- schemaContext = mock(SchemaContext.class);
- moduleIdentifierSet = new HashSet<>();
+ //schemaContext = mock(SchemaContext.class);
+ moduleIdentifierSet.clear();
ModuleIdentifier bar = mock(ModuleIdentifier.class);
when(bar.getNamespace()).thenReturn(new URI("bar"));
moduleIdentifierSet.add(bar);
- subject.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
+ shardManager.underlyingActor().onReceiveCommand(new UpdateSchemaContext(schemaContext));
- assertFalse(knownModules.contains("bar"));
-
- assertEquals(1, knownModules.size());
+ assertEquals("getKnownModules", Sets.newHashSet("foo"),
+ Sets.newHashSet(shardManager.underlyingActor().getKnownModules()));
}};
-
- }
-
-
- private void sleep(long period){
- Uninterruptibles.sleepUninterruptibly(period, TimeUnit.MILLISECONDS);
}
- public static class MyJournal extends AsyncWriteJournal {
-
- private static Map<Long, Object> journal = Maps.newTreeMap();
- public static void addToJournal(Long sequenceNr, Object value){
- journal.put(sequenceNr, value);
- }
+ private static class TestShardManager extends ShardManager {
+ private final CountDownLatch recoveryComplete = new CountDownLatch(1);
- public static Map<Long, Object> get(){
- return journal;
+ TestShardManager(String shardMrgIDSuffix) {
+ super(shardMrgIDSuffix, new MockClusterWrapper(), new MockConfiguration(),
+ DatastoreContext.newBuilder().build());
}
- public static void clear(){
- journal.clear();
- }
-
- @Override public Future<Void> doAsyncReplayMessages(final String persistenceId, long fromSequenceNr, long toSequenceNr, long max,
- final Procedure<PersistentRepr> replayCallback) {
- if(journal.size() == 0){
- return Futures.successful(null);
- }
- return Futures.future(new Callable<Void>() {
- @Override
- public Void call() throws Exception {
- for (Map.Entry<Long, Object> entry : journal.entrySet()) {
- PersistentRepr persistentMessage =
- new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId,
- false, null, null);
- replayCallback.apply(persistentMessage);
- }
- return null;
+ @Override
+ public void handleRecover(Object message) throws Exception {
+ try {
+ super.handleRecover(message);
+ } finally {
+ if(message instanceof RecoveryCompleted) {
+ recoveryComplete.countDown();
}
- }, context().dispatcher());
+ }
}
- @Override public Future<Long> doAsyncReadHighestSequenceNr(String s, long l) {
- return Futures.successful(-1L);
+ void waitForRecoveryComplete() {
+ assertEquals("Recovery complete", true,
+ Uninterruptibles.awaitUninterruptibly(recoveryComplete, 5, TimeUnit.SECONDS));
}
+ }
- @Override public Future<Void> doAsyncWriteMessages(
- final Iterable<PersistentRepr> persistentReprs) {
- return Futures.future(new Callable<Void>() {
- @Override
- public Void call() throws Exception {
- for (PersistentRepr repr : persistentReprs){
- if(repr.payload() instanceof ShardManager.SchemaContextModules) {
- journal.put(repr.sequenceNr(), repr.payload());
- }
- }
- return null;
- }
- }, context().dispatcher());
- }
+ @SuppressWarnings("serial")
+ static class TestShardManagerCreator implements Creator<TestShardManager> {
+ String shardMrgIDSuffix;
- @Override public Future<Void> doAsyncWriteConfirmations(
- Iterable<PersistentConfirmation> persistentConfirmations) {
- return Futures.successful(null);
+ TestShardManagerCreator(String shardMrgIDSuffix) {
+ this.shardMrgIDSuffix = shardMrgIDSuffix;
}
- @Override public Future<Void> doAsyncDeleteMessages(Iterable<PersistentId> persistentIds,
- boolean b) {
- clear();
- return Futures.successful(null);
+ @Override
+ public TestShardManager create() throws Exception {
+ return new TestShardManager(shardMrgIDSuffix);
}
- @Override public Future<Void> doAsyncDeleteMessagesTo(String s, long l, boolean b) {
- clear();
- return Futures.successful(null);
- }
}
}
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
import akka.actor.Props;
-import akka.event.Logging;
+import akka.dispatch.Dispatchers;
+import akka.dispatch.OnComplete;
import akka.japi.Creator;
+import akka.pattern.Patterns;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
+import akka.util.Timeout;
+import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import org.mockito.InOrder;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.EnableNotification;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolved;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import scala.concurrent.duration.Duration;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.inOrder;
public class ShardTest extends AbstractActorTest {
- private static final DatastoreContext DATA_STORE_CONTEXT =
- new DatastoreContext("", null, Duration.create(10, TimeUnit.MINUTES), 5, 3, 5000, 500);
-
private static final SchemaContext SCHEMA_CONTEXT = TestModel.createTestContext();
private static final ShardIdentifier IDENTIFIER = ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
+ private static final AtomicInteger NEXT_SHARD_NUM = new AtomicInteger();
+
+ private static String shardName() {
+ return "shard" + NEXT_SHARD_NUM.getAndIncrement();
+ }
+
+ private DatastoreContext dataStoreContext = DatastoreContext.newBuilder().
+ shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).build();
+
@Before
public void setUp() {
System.setProperty("shard.persistent", "false");
private Props newShardProps() {
return Shard.props(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT);
+ dataStoreContext, SCHEMA_CONTEXT);
}
@Test
Props props = Shard.props(identifier,
Collections.<ShardIdentifier, String>singletonMap(identifier, null),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT);
+ dataStoreContext, SCHEMA_CONTEXT);
final ActorRef subject = getSystem().actorOf(props, "testPeerAddressResolved");
new Within(duration("3 seconds")) {
NormalizedNodeToNodeCodec codec =
new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT);
- ref.underlyingActor().writeToStore(TestModel.TEST_PATH, ImmutableNodes.containerNode(
- TestModel.TEST_QNAME));
+ writeToStore(ref, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
YangInstanceIdentifier root = YangInstanceIdentifier.builder().build();
- NormalizedNode<?,?> expected = ref.underlyingActor().readStore(root);
+ NormalizedNode<?,?> expected = readStore(ref, root);
- NormalizedNodeMessages.Container encode = codec.encode(root, expected);
+ NormalizedNodeMessages.Container encode = codec.encode(expected);
ApplySnapshot applySnapshot = new ApplySnapshot(Snapshot.create(
encode.getNormalizedNode().toByteString().toByteArray(),
ref.underlyingActor().onReceiveCommand(applySnapshot);
- NormalizedNode<?,?> actual = ref.underlyingActor().readStore(root);
+ NormalizedNode<?,?> actual = readStore(ref, root);
assertEquals(expected, actual);
}
shard.underlyingActor().onReceiveCommand(applyState);
- NormalizedNode<?,?> actual = shard.underlyingActor().readStore(TestModel.TEST_PATH);
+ NormalizedNode<?,?> actual = readStore(shard, TestModel.TEST_PATH);
assertEquals("Applied state", node, actual);
}
InMemorySnapshotStore.addSnapshot(IDENTIFIER.toString(), Snapshot.create(
new NormalizedNodeToNodeCodec(SCHEMA_CONTEXT).encode(
- YangInstanceIdentifier.builder().build(), root).
+ root).
getNormalizedNode().toByteString().toByteArray(),
Collections.<ReplicatedLogEntry>emptyList(), 0, 1, -1, -1));
@Override
public Shard create() throws Exception {
return new Shard(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
- DATA_STORE_CONTEXT, SCHEMA_CONTEXT) {
+ dataStoreContext, SCHEMA_CONTEXT) {
@Override
protected void onRecoveryComplete() {
try {
// Verify data in the data store.
- NormalizedNode<?, ?> outerList = shard.underlyingActor().readStore(TestModel.OUTER_LIST_PATH);
+ NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
outerList.getValue() instanceof Iterable);
return new CompositeModificationPayload(compMod.toSerializable());
}
- @SuppressWarnings("unchecked")
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
+ InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
+ MutableCompositeModification modification) {
+ return setupMockWriteTransaction(cohortName, dataStore, path, data, modification, null);
+ }
+
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
+ InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
+ MutableCompositeModification modification,
+ final Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit) {
+
+ DOMStoreWriteTransaction tx = dataStore.newWriteOnlyTransaction();
+ tx.write(path, data);
+ final DOMStoreThreePhaseCommitCohort realCohort = tx.ready();
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, cohortName);
+
+ doAnswer(new Answer<ListenableFuture<Boolean>>() {
+ @Override
+ public ListenableFuture<Boolean> answer(InvocationOnMock invocation) {
+ return realCohort.canCommit();
+ }
+ }).when(cohort).canCommit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ if(preCommit != null) {
+ return preCommit.apply(realCohort);
+ } else {
+ return realCohort.preCommit();
+ }
+ }
+ }).when(cohort).preCommit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ return realCohort.commit();
+ }
+ }).when(cohort).commit();
+
+ doAnswer(new Answer<ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ return realCohort.abort();
+ }
+ }).when(cohort).abort();
+
+ modification.addModification(new WriteModification(path, data, SCHEMA_CONTEXT));
+
+ return cohort;
+ }
+
+ @SuppressWarnings({ "unchecked" })
@Test
- public void testForwardedCommitTransactionWithPersistence() throws IOException {
+ public void testConcurrentThreePhaseCommits() throws Throwable {
System.setProperty("shard.persistent", "true");
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ // Setup 3 simulated transactions with mock cohorts backed by real cohorts.
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1);
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore,
+ TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(),
+ modification2);
+
+ String transactionID3 = "tx3";
+ MutableCompositeModification modification3 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore,
+ YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1),
+ modification3);
+
+ long timeoutSec = 5;
+ final FiniteDuration duration = FiniteDuration.create(timeoutSec, TimeUnit.SECONDS);
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction message for the first Tx that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath());
+
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the ForwardedReadyTransaction for the next 2 Tx's.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message for the next 2 Tx's. These should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture1 = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ Future<Object> canCommitFuture2 = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID3).toSerializable(), timeout);
+
+ // Send the CommitTransaction message for the first Tx. After it completes, it should
+ // trigger the 2nd Tx to proceed which should in turn then trigger the 3rd.
+
+ shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Wait for the next 2 Tx's to complete.
+
+ final AtomicReference<Throwable> caughtEx = new AtomicReference<>();
+ final CountDownLatch commitLatch = new CountDownLatch(2);
+
+ class OnFutureComplete extends OnComplete<Object> {
+ private final Class<?> expRespType;
+
+ OnFutureComplete(Class<?> expRespType) {
+ this.expRespType = expRespType;
+ }
+
+ @Override
+ public void onComplete(Throwable error, Object resp) {
+ if(error != null) {
+ System.out.println(new java.util.Date()+": "+getClass().getSimpleName() + " failure: "+error);
+ caughtEx.set(new AssertionError(getClass().getSimpleName() + " failure", error));
+ } else {
+ try {
+ assertEquals("Commit response type", expRespType, resp.getClass());
+ onSuccess(resp);
+ } catch (Exception e) {
+ caughtEx.set(e);
+ }
+ }
+ }
+
+ void onSuccess(Object resp) throws Exception {
+ }
+ }
+
+ class OnCommitFutureComplete extends OnFutureComplete {
+ OnCommitFutureComplete() {
+ super(CommitTransactionReply.SERIALIZABLE_CLASS);
+ }
+
+ @Override
+ public void onComplete(Throwable error, Object resp) {
+ super.onComplete(error, resp);
+ commitLatch.countDown();
+ }
+ }
+
+ class OnCanCommitFutureComplete extends OnFutureComplete {
+ private final String transactionID;
+ OnCanCommitFutureComplete(String transactionID) {
+ super(CanCommitTransactionReply.SERIALIZABLE_CLASS);
+ this.transactionID = transactionID;
+ }
+
+ @Override
+ void onSuccess(Object resp) throws Exception {
+ CanCommitTransactionReply canCommitReply =
+ CanCommitTransactionReply.fromSerializable(resp);
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ Future<Object> commitFuture = Patterns.ask(shard,
+ new CommitTransaction(transactionID).toSerializable(), timeout);
+ commitFuture.onComplete(new OnCommitFutureComplete(), getSystem().dispatcher());
+ }
+ }
+
+ canCommitFuture1.onComplete(new OnCanCommitFutureComplete(transactionID2),
+ getSystem().dispatcher());
+
+ canCommitFuture2.onComplete(new OnCanCommitFutureComplete(transactionID3),
+ getSystem().dispatcher());
+
+ boolean done = commitLatch.await(timeoutSec, TimeUnit.SECONDS);
+
+ if(caughtEx.get() != null) {
+ throw caughtEx.get();
+ }
+
+ assertEquals("Commits complete", true, done);
+
+ InOrder inOrder = inOrder(cohort1, cohort2, cohort3);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort1).preCommit();
+ inOrder.verify(cohort1).commit();
+ inOrder.verify(cohort2).canCommit();
+ inOrder.verify(cohort2).preCommit();
+ inOrder.verify(cohort2).commit();
+ inOrder.verify(cohort3).canCommit();
+ inOrder.verify(cohort3).preCommit();
+ inOrder.verify(cohort3).commit();
+
+ // Verify data in the data store.
+
+ NormalizedNode<?, ?> outerList = readStore(shard, TestModel.OUTER_LIST_PATH);
+ assertNotNull(TestModel.OUTER_LIST_QNAME.getLocalName() + " not found", outerList);
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " value is not Iterable",
+ outerList.getValue() instanceof Iterable);
+ Object entry = ((Iterable<Object>)outerList.getValue()).iterator().next();
+ assertTrue(TestModel.OUTER_LIST_QNAME.getLocalName() + " entry is not MapEntryNode",
+ entry instanceof MapEntryNode);
+ MapEntryNode mapEntry = (MapEntryNode)entry;
+ Optional<DataContainerChild<? extends PathArgument, ?>> idLeaf =
+ mapEntry.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.ID_QNAME));
+ assertTrue("Missing leaf " + TestModel.ID_QNAME.getLocalName(), idLeaf.isPresent());
+ assertEquals(TestModel.ID_QNAME.getLocalName() + " value", 1, idLeaf.get().getValue());
+
+ assertEquals("Last log index", 2, shard.underlyingActor().getShardMBean().getLastLogIndex());
+ }};
+ }
+
+ @Test
+ public void testCommitPhaseFailure() throws Throwable {
new ShardTestKit(getSystem()) {{
- TestActorRef<Shard> shard = TestActorRef.create(getSystem(), newShardProps());
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
waitUntilLeader(shard);
- NormalizedNode<?, ?> node = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ // Setup 2 simulated transactions with mock cohorts. The first one fails in the
+ // commit phase.
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(cohort1).preCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort1).commit();
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class);
- doReturn(Futures.immediateFuture(null)).when(cohort).commit();
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ // Send the CommitTransaction message for the first Tx. This should send back an error
+ // and trigger the 2nd Tx to proceed.
+
+ shard.tell(new CommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+
+ // Wait for the 2nd Tx to complete the canCommit phase.
+
+ final CountDownLatch latch = new CountDownLatch(1);
+ canCommitFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable t, Object resp) {
+ latch.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS));
+
+ InOrder inOrder = inOrder(cohort1, cohort2);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort1).preCommit();
+ inOrder.verify(cohort1).commit();
+ inOrder.verify(cohort2).canCommit();
+ }};
+ }
+
+ @Test
+ public void testPreCommitPhaseFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ String transactionID = "tx1";
MutableCompositeModification modification = new MutableCompositeModification();
- modification.addModification(new WriteModification(TestModel.TEST_PATH, node,
- SCHEMA_CONTEXT));
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort).canCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).preCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
- shard.tell(new ForwardedCommitTransaction(cohort, modification), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- expectMsgClass(duration("5 seconds"), CommitTransactionReply.SERIALIZABLE_CLASS);
+ // Send the CanCommitTransaction message.
- verify(cohort).commit();
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
- assertEquals("Last log index", 0, shard.underlyingActor().getShardMBean().getLastLogIndex());
+ // Send the CommitTransaction message. This should send back an error
+ // for preCommit failure.
+
+ shard.tell(new CommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+
+ InOrder inOrder = inOrder(cohort);
+ inOrder.verify(cohort).canCommit();
+ inOrder.verify(cohort).preCommit();
+ }};
+ }
+
+ @Test
+ public void testCanCommitPhaseFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ String transactionID = "tx1";
+ MutableCompositeModification modification = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock"))).when(cohort).canCommit();
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message.
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testAbortBeforeFinishCommit() throws Throwable {
+ System.setProperty("shard.persistent", "true");
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ final String transactionID = "tx1";
+ final CountDownLatch abortComplete = new CountDownLatch(1);
+ Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit =
+ new Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>>() {
+ @Override
+ public ListenableFuture<Void> apply(final DOMStoreThreePhaseCommitCohort cohort) {
+ ListenableFuture<Void> preCommitFuture = cohort.preCommit();
+
+ Future<Object> abortFuture = Patterns.ask(shard,
+ new AbortTransaction(transactionID).toSerializable(), timeout);
+ abortFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable e, Object resp) {
+ abortComplete.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ return preCommitFuture;
+ }
+ };
+
+ MutableCompositeModification modification = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME),
+ modification, preCommit);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ Future<Object> commitFuture = Patterns.ask(shard,
+ new CommitTransaction(transactionID).toSerializable(), timeout);
+
+ assertEquals("Abort complete", true, abortComplete.await(5, TimeUnit.SECONDS));
+
+ Await.result(commitFuture, duration);
+
+ NormalizedNode<?, ?> node = readStore(shard, TestModel.TEST_PATH);
+ assertNotNull(TestModel.TEST_QNAME.getLocalName() + " not found", node);
+ }};
+ }
+
+ @Test
+ public void testTransactionCommitTimeout() throws Throwable {
+ dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitTimeoutInSeconds(1).build();
+
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ writeToStore(shard, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+ writeToStore(shard, TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build());
+
+ // Create 1st Tx - will timeout
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1).build(),
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1),
+ modification1);
+
+ // Create 2nd Tx
+
+ String transactionID2 = "tx3";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ YangInstanceIdentifier listNodePath = YangInstanceIdentifier.builder(TestModel.OUTER_LIST_PATH)
+ .nodeWithKey(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2).build();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort3", dataStore,
+ listNodePath,
+ ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 2),
+ modification2);
+
+ // Ready the Tx's
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit 1st Tx. We don't send the commit so it should timeout.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit the 2nd Tx - it should complete after the 1st Tx times out.
+
+ shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Commit the 2nd Tx.
+
+ shard.tell(new CommitTransaction(transactionID2).toSerializable(), getRef());
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ NormalizedNode<?, ?> node = readStore(shard, listNodePath);
+ assertNotNull(listNodePath + " not found", node);
+ }};
+ }
+
+ @Test
+ public void testTransactionCommitQueueCapacityExceeded() throws Throwable {
+ dataStoreContext = DatastoreContext.newBuilder().shardTransactionCommitQueueCapacity(1).build();
+
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ InMemoryDOMDataStore dataStore = shard.underlyingActor().getDataStore();
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = setupMockWriteTransaction("cohort1", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification1);
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = setupMockWriteTransaction("cohort2", dataStore,
+ TestModel.OUTER_LIST_PATH,
+ ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build(),
+ modification2);
+
+ String transactionID3 = "tx3";
+ MutableCompositeModification modification3 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort3 = setupMockWriteTransaction("cohort3", dataStore,
+ TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), modification3);
+
+ // Ready the Tx's
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit 1st Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // canCommit the 2nd Tx - it should get queued.
+
+ shard.tell(new CanCommitTransaction(transactionID2).toSerializable(), getRef());
+
+ // canCommit the 3rd Tx - should exceed queue capacity and fail.
+
+ shard.tell(new CanCommitTransaction(transactionID3).toSerializable(), getRef());
+ expectMsgClass(duration, akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testCanCommitBeforeReadyFailure() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ shard.tell(new CanCommitTransaction("tx").toSerializable(), getRef());
+ expectMsgClass(duration("5 seconds"), akka.actor.Status.Failure.class);
+ }};
+ }
+
+ @Test
+ public void testAbortTransaction() throws Throwable {
+ new ShardTestKit(getSystem()) {{
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ newShardProps().withDispatcher(Dispatchers.DefaultDispatcherId()), shardName());
+
+ waitUntilLeader(shard);
+
+ // Setup 2 simulated transactions with mock cohorts. The first one will be aborted.
+
+ String transactionID1 = "tx1";
+ MutableCompositeModification modification1 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort1 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort1");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(cohort1).abort();
+
+ String transactionID2 = "tx2";
+ MutableCompositeModification modification2 = new MutableCompositeModification();
+ DOMStoreThreePhaseCommitCohort cohort2 = mock(DOMStoreThreePhaseCommitCohort.class, "cohort2");
+ doReturn(Futures.immediateFuture(Boolean.TRUE)).when(cohort2).canCommit();
+
+ FiniteDuration duration = duration("5 seconds");
+ final Timeout timeout = new Timeout(duration);
+
+ // Simulate the ForwardedReadyTransaction messages that would be sent
+ // by the ShardTransaction.
+
+ shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2), getRef());
+ expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CanCommitTransaction message for the first Tx.
+
+ shard.tell(new CanCommitTransaction(transactionID1).toSerializable(), getRef());
+ CanCommitTransactionReply canCommitReply = CanCommitTransactionReply.fromSerializable(
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS));
+ assertEquals("Can commit", true, canCommitReply.getCanCommit());
+
+ // Send the CanCommitTransaction message for the 2nd Tx. This should get queued and
+ // processed after the first Tx completes.
+
+ Future<Object> canCommitFuture = Patterns.ask(shard,
+ new CanCommitTransaction(transactionID2).toSerializable(), timeout);
+
+ // Send the AbortTransaction message for the first Tx. This should trigger the 2nd
+ // Tx to proceed.
+
+ shard.tell(new AbortTransaction(transactionID1).toSerializable(), getRef());
+ expectMsgClass(duration, AbortTransactionReply.SERIALIZABLE_CLASS);
+
+ // Wait for the 2nd Tx to complete the canCommit phase.
+
+ final CountDownLatch latch = new CountDownLatch(1);
+ canCommitFuture.onComplete(new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable t, Object resp) {
+ latch.countDown();
+ }
+ }, getSystem().dispatcher());
+
+ assertEquals("2nd CanCommit complete", true, latch.await(5, TimeUnit.SECONDS));
+
+ InOrder inOrder = inOrder(cohort1, cohort2);
+ inOrder.verify(cohort1).canCommit();
+ inOrder.verify(cohort2).canCommit();
}};
}
@Test
public void testCreateSnapshot() throws IOException, InterruptedException {
new ShardTestKit(getSystem()) {{
- final ActorRef subject = getSystem().actorOf(newShardProps(), "testCreateSnapshot");
+ final AtomicReference<CountDownLatch> latch = new AtomicReference<>(new CountDownLatch(1));
+ Creator<Shard> creator = new Creator<Shard>() {
+ @Override
+ public Shard create() throws Exception {
+ return new Shard(IDENTIFIER, Collections.<ShardIdentifier,String>emptyMap(),
+ dataStoreContext, SCHEMA_CONTEXT) {
+ @Override
+ public void saveSnapshot(Object snapshot) {
+ super.saveSnapshot(snapshot);
+ latch.get().countDown();
+ }
+ };
+ }
+ };
- waitUntilLeader(subject);
+ TestActorRef<Shard> shard = TestActorRef.create(getSystem(),
+ Props.create(new DelegatingShardCreator(creator)), "testCreateSnapshot");
- subject.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ waitUntilLeader(shard);
+
+ shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
- waitForLogMessage(Logging.Info.class, subject, "CaptureSnapshotReply received by actor");
+ assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
- subject.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
+ latch.set(new CountDownLatch(1));
+ shard.tell(new CaptureSnapshot(-1,-1,-1,-1), getRef());
- waitForLogMessage(Logging.Info.class, subject, "CaptureSnapshotReply received by actor");
+ assertEquals("Snapshot saved", true, latch.get().await(5, TimeUnit.SECONDS));
}};
}
};
}
- private static final class DelegatingShardCreator implements Creator<Shard> {
- private final Creator<Shard> delegate;
+ private NormalizedNode<?,?> readStore(TestActorRef<Shard> shard, YangInstanceIdentifier id)
+ throws ExecutionException, InterruptedException {
+ DOMStoreReadTransaction transaction = shard.underlyingActor().getDataStore().newReadOnlyTransaction();
- DelegatingShardCreator(Creator<Shard> delegate) {
- this.delegate = delegate;
- }
+ CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> future =
+ transaction.read(id);
- @Override
- public Shard create() throws Exception {
- return delegate.create();
- }
+ Optional<NormalizedNode<?, ?>> optional = future.get();
+ NormalizedNode<?, ?> node = optional.isPresent()? optional.get() : null;
+
+ transaction.close();
+
+ return node;
}
- private static class ShardTestKit extends JavaTestKit {
+ private void writeToStore(TestActorRef<Shard> shard, YangInstanceIdentifier id, NormalizedNode<?,?> node)
+ throws ExecutionException, InterruptedException {
+ DOMStoreWriteTransaction transaction = shard.underlyingActor().getDataStore().newWriteOnlyTransaction();
- private ShardTestKit(ActorSystem actorSystem) {
- super(actorSystem);
- }
+ transaction.write(id, node);
- protected void waitForLogMessage(final Class logLevel, ActorRef subject, String logMessage){
- // Wait for a specific log message to show up
- final boolean result =
- new JavaTestKit.EventFilter<Boolean>(logLevel
- ) {
- @Override
- protected Boolean run() {
- return true;
- }
- }.from(subject.path().toString())
- .message(logMessage)
- .occurrences(1).exec();
+ DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
+ commitCohort.preCommit().get();
+ commitCohort.commit().get();
+ }
- Assert.assertEquals(true, result);
+ private static final class DelegatingShardCreator implements Creator<Shard> {
+ private final Creator<Shard> delegate;
+ DelegatingShardCreator(Creator<Shard> delegate) {
+ this.delegate = delegate;
}
- protected void waitUntilLeader(ActorRef subject) {
- waitForLogMessage(Logging.Info.class, subject,
- "Switching from state Candidate to Leader");
+ @Override
+ public Shard create() throws Exception {
+ return delegate.create();
}
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore;
+
+import java.util.concurrent.TimeUnit;
+import org.junit.Assert;
+import org.opendaylight.controller.cluster.raft.client.messages.FindLeader;
+import org.opendaylight.controller.cluster.raft.client.messages.FindLeaderReply;
+import com.google.common.util.concurrent.Uninterruptibles;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.pattern.Patterns;
+import akka.testkit.JavaTestKit;
+import akka.util.Timeout;
+
+class ShardTestKit extends JavaTestKit {
+
+ ShardTestKit(ActorSystem actorSystem) {
+ super(actorSystem);
+ }
+
+ protected void waitForLogMessage(final Class logLevel, ActorRef subject, String logMessage){
+ // Wait for a specific log message to show up
+ final boolean result =
+ new JavaTestKit.EventFilter<Boolean>(logLevel
+ ) {
+ @Override
+ protected Boolean run() {
+ return true;
+ }
+ }.from(subject.path().toString())
+ .message(logMessage)
+ .occurrences(1).exec();
+
+ Assert.assertEquals(true, result);
+
+ }
+
+ protected void waitUntilLeader(ActorRef shard) {
+ for(int i = 0; i < 20 * 5; i++) {
+ Future<Object> future = Patterns.ask(shard, new FindLeader(), new Timeout(5, TimeUnit.SECONDS));
+ try {
+ FindLeaderReply resp = (FindLeaderReply)Await.result(future, Duration.create(5, TimeUnit.SECONDS));
+ if(resp.getLeaderActor() != null) {
+ return;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
+ }
+
+ Assert.fail("Leader not found for shard " + shard.path());
+ }
+}
\ No newline at end of file
import akka.actor.ActorRef;
import akka.actor.Props;
import akka.testkit.TestActorRef;
-
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
-
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
-
import java.util.Collections;
import java.util.concurrent.TimeUnit;
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("operational").build();
- private final DatastoreContext datastoreContext = new DatastoreContext();
+ private final DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
}
private ActorRef createShard(){
- return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, new DatastoreContext(), TestModel.createTestContext()));
+ return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext,
+ TestModel.createTestContext()));
}
@Test(expected = ReadFailedException.class)
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props, "testNegativeMergeTransactionReady");
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
import akka.actor.Terminated;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
-
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
-
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException;
import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
-import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.duration.Duration;
-
import java.util.Collections;
import java.util.concurrent.TimeUnit;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- private DatastoreContext datastoreContext = new DatastoreContext();
+ private DatastoreContext datastoreContext = DatastoreContext.newBuilder().build();
private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
private ActorRef createShard(){
return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER,
- Collections.EMPTY_MAP, new DatastoreContext(), TestModel.createTestContext()));
+ Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()));
}
@Test
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testReadData");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testReadDataWhenDataNotFound");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testDataExistsPositive");
new Within(duration("1 seconds")) {
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject = getSystem().actorOf(props, "testDataExistsNegative");
new Within(duration("1 seconds")) {
private void assertModification(final ActorRef subject,
final Class<? extends Modification> modificationType) {
new JavaTestKit(getSystem()) {{
- new Within(duration("1 seconds")) {
+ new Within(duration("3 seconds")) {
@Override
protected void run() {
subject
- .tell(new ShardTransaction.GetCompositedModification(),
+ .tell(new ShardWriteTransaction.GetCompositedModification(),
getRef());
final CompositeModification compositeModification =
- new ExpectMsg<CompositeModification>(duration("1 seconds"), "match hint") {
+ new ExpectMsg<CompositeModification>(duration("3 seconds"), "match hint") {
// do not put code outside this method, will run afterwards
@Override
protected CompositeModification match(Object in) {
- if (in instanceof ShardTransaction.GetCompositeModificationReply) {
- return ((ShardTransaction.GetCompositeModificationReply) in)
+ if (in instanceof ShardWriteTransaction.GetCompositeModificationReply) {
+ return ((ShardWriteTransaction.GetCompositeModificationReply) in)
.getModification();
} else {
throw noMatch();
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testWriteData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testMergeData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testDeleteData");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testReadyTransaction");
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testCloseTransaction");
public void testNegativePerformingWriteOperationOnReadTransaction() throws Exception {
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final TestActorRef subject = TestActorRef.apply(props,getSystem());
subject.receive(new DeleteData(TestModel.TEST_PATH).toSerializable(), ActorRef.noSender());
@Test
public void testShardTransactionInactivity() {
- datastoreContext = new DatastoreContext("Test",
- InMemoryDOMDataStoreConfigProperties.getDefault(),
- Duration.create(500, TimeUnit.MILLISECONDS), 5, 1000, 1000, 500);
+ datastoreContext = DatastoreContext.newBuilder().shardTransactionIdleTimeout(
+ Duration.create(500, TimeUnit.MILLISECONDS)).build();
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats);
+ testSchemaContext, datastoreContext, shardStats, "txn");
final ActorRef subject =
getSystem().actorOf(props, "testShardTransactionInactivity");
+++ /dev/null
-/*
- *
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- *
- */
-
-package org.opendaylight.controller.cluster.datastore;
-
-import akka.actor.ActorRef;
-import akka.actor.Props;
-import akka.testkit.TestActorRef;
-import akka.util.Timeout;
-
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
-import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
-import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
-import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
-import org.opendaylight.controller.cluster.datastore.modification.Modification;
-import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
-import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
-import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
-import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
-import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
-import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.Duration;
-import scala.concurrent.duration.FiniteDuration;
-
-import java.util.Collections;
-import java.util.concurrent.TimeUnit;
-
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.when;
-
-
-public class ThreePhaseCommitCohortFailureTest extends AbstractActorTest {
-
- private static ListeningExecutorService storeExecutor =
- MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor());
-
- private static final InMemoryDOMDataStore store =
- new InMemoryDOMDataStore("OPER", storeExecutor,
- MoreExecutors.sameThreadExecutor());
-
- private static final SchemaContext testSchemaContext =
- TestModel.createTestContext();
-
- private static final ShardIdentifier SHARD_IDENTIFIER =
- ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config").build();
-
- private final DatastoreContext datastoreContext = new DatastoreContext();
-
- private final ShardStats shardStats = new ShardStats(SHARD_IDENTIFIER.toString(), "DataStore");
-
- @BeforeClass
- public static void staticSetup() {
- store.onGlobalContextUpdated(testSchemaContext);
- }
-
- private final FiniteDuration ASK_RESULT_DURATION = Duration.create(5000, TimeUnit.MILLISECONDS);
-
- private ActorRef createShard(){
- return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()));
- }
-
- @Test(expected = TestException.class)
- public void testNegativeAbortResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativeAbortResultsInException");
-
- when(mockCohort.abort()).thenReturn(
- Futures.<Void>immediateFailedFuture(new TestException()));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder()
- .build(), 3000);
- assertTrue(future.isCompleted());
-
- Await.result(future, ASK_RESULT_DURATION);
- }
-
-
- @Test(expected = OptimisticLockFailedException.class)
- public void testNegativeCanCommitResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativeCanCommitResultsInException");
-
- when(mockCohort.canCommit()).thenReturn(
- Futures
- .<Boolean>immediateFailedFuture(
- new OptimisticLockFailedException("some exception")));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder()
- .build(), 3000);
-
-
- Await.result(future, ASK_RESULT_DURATION);
-
- }
-
-
- @Test(expected = TestException.class)
- public void testNegativePreCommitResultsInException() throws Exception {
-
- final ActorRef shard = createShard();
- final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
- .mock(DOMStoreThreePhaseCommitCohort.class);
- final CompositeModification mockComposite =
- Mockito.mock(CompositeModification.class);
- final Props props =
- ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite, shardStats);
-
- final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
- .create(getSystem(), props,
- "testNegativePreCommitResultsInException");
-
- when(mockCohort.preCommit()).thenReturn(
- Futures
- .<Void>immediateFailedFuture(
- new TestException()));
-
- Future<Object> future =
- akka.pattern.Patterns.ask(subject,
- ThreePhaseCommitCohortMessages.PreCommitTransaction.newBuilder()
- .build(), 3000);
-
- Await.result(future, ASK_RESULT_DURATION);
-
- }
-
- @Test(expected = TestException.class)
- public void testNegativeCommitResultsInException() throws Exception {
-
- final TestActorRef<Shard> subject = TestActorRef.create(getSystem(),
- Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()),
- "testNegativeCommitResultsInException");
-
- final ActorRef shardTransaction =
- getSystem().actorOf(ShardTransaction.props(store.newReadWriteTransaction(), subject,
- testSchemaContext, datastoreContext, shardStats));
-
- ShardTransactionMessages.WriteData writeData =
- ShardTransactionMessages.WriteData.newBuilder()
- .setInstanceIdentifierPathArguments(
- NormalizedNodeMessages.InstanceIdentifier.newBuilder()
- .build()).setNormalizedNode(
- NormalizedNodeMessages.Node.newBuilder().build()
-
- ).build();
-
- Timeout askTimeout = new Timeout(ASK_RESULT_DURATION);
-
- //This is done so that Modification list is updated which is used during commit
- Future<Object> future = akka.pattern.Patterns.ask(shardTransaction, writeData, askTimeout);
-
- //ready transaction creates the cohort so that we get into the
- //block where in commmit is done
- ShardTransactionMessages.ReadyTransaction readyTransaction =
- ShardTransactionMessages.ReadyTransaction.newBuilder().build();
-
- future = akka.pattern.Patterns.ask(shardTransaction, readyTransaction, askTimeout);
-
- //but when the message is sent it will have the MockCommit object
- //so that we can simulate throwing of exception
- ForwardedCommitTransaction mockForwardCommitTransaction =
- Mockito.mock(ForwardedCommitTransaction.class);
- DOMStoreThreePhaseCommitCohort mockThreePhaseCommitTransaction =
- Mockito.mock(DOMStoreThreePhaseCommitCohort.class);
- when(mockForwardCommitTransaction.getCohort())
- .thenReturn(mockThreePhaseCommitTransaction);
- when(mockThreePhaseCommitTransaction.commit()).thenReturn(Futures
- .<Void>immediateFailedFuture(
- new TestException()));
- Modification mockModification = Mockito.mock(
- Modification.class);
- when(mockForwardCommitTransaction.getModification())
- .thenReturn(mockModification);
-
- when(mockModification.toSerializable()).thenReturn(
- PersistentMessages.CompositeModification.newBuilder().build());
-
- future = akka.pattern.Patterns.ask(subject, mockForwardCommitTransaction, askTimeout);
- Await.result(future, ASK_RESULT_DURATION);
- }
-
- private class TestException extends Exception {
- }
-}
import akka.actor.ActorSelection;
import akka.actor.Props;
import akka.dispatch.Futures;
-
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.isA;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.times;
-
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.opendaylight.controller.cluster.datastore.messages.SerializableMessage;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
-
import scala.concurrent.Future;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.isA;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
public class ThreePhaseCommitCohortProxyTest extends AbstractActorTest {
@SuppressWarnings("serial")
doReturn(getSystem()).when(actorContext).getActorSystem();
}
- private Future<ActorPath> newCohortPath() {
+ private Future<ActorSelection> newCohort() {
ActorPath path = getSystem().actorOf(Props.create(DoNothingActor.class)).path();
- doReturn(mock(ActorSelection.class)).when(actorContext).actorSelection(path);
- return Futures.successful(path);
+ ActorSelection actorSelection = getSystem().actorSelection(path);
+ return Futures.successful(actorSelection);
}
private final ThreePhaseCommitCohortProxy setupProxy(int nCohorts) throws Exception {
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
for(int i = 1; i <= nCohorts; i++) {
- cohortPathFutures.add(newCohortPath());
+ cohortFutures.add(newCohort());
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures, "txn-1");
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures, "txn-1");
}
private ThreePhaseCommitCohortProxy setupProxyWithFailedCohortPath()
throws Exception {
- List<Future<ActorPath>> cohortPathFutures = Lists.newArrayList();
- cohortPathFutures.add(newCohortPath());
- cohortPathFutures.add(Futures.<ActorPath>failed(new TestException()));
+ List<Future<ActorSelection>> cohortFutures = Lists.newArrayList();
+ cohortFutures.add(newCohort());
+ cohortFutures.add(Futures.<ActorSelection>failed(new TestException()));
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPathFutures, "txn-1");
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures, "txn-1");
}
private void setupMockActorContext(Class<?> requestType, Object... responses) {
.successful(((SerializableMessage) responses[i]).toSerializable()));
}
- stubber.when(actorContext).executeRemoteOperationAsync(any(ActorSelection.class),
+ stubber.when(actorContext).executeOperationAsync(any(ActorSelection.class),
isA(requestType));
}
private void verifyCohortInvocations(int nCohorts, Class<?> requestType) {
- verify(actorContext, times(nCohorts)).executeRemoteOperationAsync(
+ verify(actorContext, times(nCohorts)).executeOperationAsync(
any(ActorSelection.class), isA(requestType));
}
@Test
public void testPreCommit() throws Exception {
+ // Precommit is currently a no-op
ThreePhaseCommitCohortProxy proxy = setupProxy(1);
setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
new PreCommitTransactionReply());
proxy.preCommit().get(5, TimeUnit.SECONDS);
-
- verifyCohortInvocations(1, PreCommitTransaction.SERIALIZABLE_CLASS);
- }
-
- @Test(expected = ExecutionException.class)
- public void testPreCommitWithFailure() throws Exception {
- ThreePhaseCommitCohortProxy proxy = setupProxy(2);
-
- setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
- new PreCommitTransactionReply(), new RuntimeException("mock"));
-
- proxy.preCommit().get(5, TimeUnit.SECONDS);
}
@Test
proxy.commit().get(5, TimeUnit.SECONDS);
verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
- verifyCohortInvocations(2, PreCommitTransaction.SERIALIZABLE_CLASS);
verifyCohortInvocations(2, CommitTransaction.SERIALIZABLE_CLASS);
}
}
package org.opendaylight.controller.cluster.datastore;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
-import akka.actor.ActorPath;
+import com.google.common.util.concurrent.CheckedFuture;
+
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.Props;
import akka.dispatch.Futures;
-
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
-
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentMatcher;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
-
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
-import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
-
import org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
+
import java.util.List;
import java.util.concurrent.TimeUnit;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.eq;
-import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.isA;
import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
@SuppressWarnings("resource")
public class TransactionProxyTest extends AbstractActorTest {
return argThat(matcher);
}
- private Future<Object> readyTxReply(ActorPath path) {
+ private Future<Object> readyTxReply(String path) {
return Futures.successful((Object)new ReadyTransactionReply(path).toSerializable());
}
ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
doReturn(getSystem().actorSelection(actorRef.path())).
when(mockActorContext).actorSelection(actorRef.path().toString());
+
+ doReturn(Optional.of(getSystem().actorSelection(actorRef.path()))).
+ when(mockActorContext).findPrimaryShard(eq(DefaultShardStrategy.DEFAULT_SHARD));
+
doReturn(createTransactionReply(actorRef)).when(mockActorContext).
- executeShardOperation(eq(DefaultShardStrategy.DEFAULT_SHARD),
+ executeOperation(eq(getSystem().actorSelection(actorRef.path())),
eqCreateTransaction(memberName, type));
- doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(
- anyString(), eq(actorRef.path().toString()));
- doReturn(actorRef.path()).when(mockActorContext).actorFor(actorRef.path().toString());
-
return actorRef;
}
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
Optional<NormalizedNode<?, ?>> readOptional = transactionProxy.read(
NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
readOptional = transactionProxy.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
private void testExceptionOnInitialCreateTransaction(Exception exToThrow, Invoker invoker)
throws Throwable {
+ ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
- doThrow(exToThrow).when(mockActorContext).executeShardOperation(
- anyString(), any());
+ if (exToThrow instanceof PrimaryNotFoundException) {
+ doReturn(Optional.absent()).when(mockActorContext).findPrimaryShard(anyString());
+ } else {
+ doReturn(Optional.of(getSystem().actorSelection(actorRef.path()))).
+ when(mockActorContext).findPrimaryShard(anyString());
+ }
+ doThrow(exToThrow).when(mockActorContext).executeOperation(any(ActorSelection.class), any());
- TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
- READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext, READ_ONLY);
propagateReadFailedExceptionCause(invoker.invoke(transactionProxy));
}
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
+ executeOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
try {
propagateReadFailedExceptionCause(transactionProxy.read(TestModel.TEST_PATH));
} finally {
- verify(mockActorContext, times(0)).executeRemoteOperationAsync(
+ verify(mockActorContext, times(0)).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
}
}
NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(expectedNode));
- doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
- doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
Boolean exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
assertEquals("Exists response", false, exists);
- doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
setupActorContextWithInitialCreateTransaction(READ_ONLY);
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(any(ActorSelection.class), any());
+ executeOperationAsync(any(ActorSelection.class), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
READ_ONLY);
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
+ executeOperationAsync(eq(actorSelection(actorRef)), eqDeleteData());
- doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
try {
propagateReadFailedExceptionCause(transactionProxy.exists(TestModel.TEST_PATH));
} finally {
- verify(mockActorContext, times(0)).executeRemoteOperationAsync(
+ verify(mockActorContext, times(0)).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
}
}
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDataExists());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.merge(TestModel.TEST_PATH, nodeToWrite);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
public void testDelete() throws Exception {
ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- doReturn(deleteDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(deleteDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDeleteData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.delete(TestModel.TEST_PATH);
- verify(mockActorContext).executeRemoteOperationAsync(
+ verify(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqDeleteData());
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
DeleteDataReply.SERIALIZABLE_CLASS);
}
- private void verifyCohortPathFutures(ThreePhaseCommitCohortProxy proxy,
- Object... expReplies) throws Exception {
+ private void verifyCohortFutures(ThreePhaseCommitCohortProxy proxy,
+ Object... expReplies) throws Exception {
assertEquals("getReadyOperationFutures size", expReplies.length,
- proxy.getCohortPathFutures().size());
+ proxy.getCohortFutures().size());
int i = 0;
- for( Future<ActorPath> future: proxy.getCohortPathFutures()) {
+ for( Future<ActorSelection> future: proxy.getCohortFutures()) {
assertNotNull("Ready operation Future is null", future);
Object expReply = expReplies[i++];
- if(expReply instanceof ActorPath) {
- ActorPath actual = Await.result(future, Duration.create(5, TimeUnit.SECONDS));
- assertEquals("Cohort actor path", expReply, actual);
+ if(expReply instanceof ActorSelection) {
+ ActorSelection actual = Await.result(future, Duration.create(5, TimeUnit.SECONDS));
+ assertEquals("Cohort actor path", (ActorSelection) expReply, actual);
} else {
// Expecting exception.
try {
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readyTxReply(actorRef.path().toString())).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
WriteDataReply.SERIALIZABLE_CLASS);
- verifyCohortPathFutures(proxy, actorRef.path());
+ verifyCohortFutures(proxy, getSystem().actorSelection(actorRef.path()));
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
+ executeOperationAsync(eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
- doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readyTxReply(actorRef.path().toString())).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
MergeDataReply.SERIALIZABLE_CLASS, TestException.class);
- verifyCohortPathFutures(proxy, TestException.class);
+ verifyCohortFutures(proxy, TestException.class);
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(mergeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(mergeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
doReturn(Futures.failed(new TestException())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)),
+ executeOperationAsync(eq(actorSelection(actorRef)),
isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
MergeDataReply.SERIALIZABLE_CLASS);
- verifyCohortPathFutures(proxy, TestException.class);
+ verifyCohortFutures(proxy, TestException.class);
}
@Test
public void testReadyWithInitialCreateTransactionFailure() throws Exception {
- doThrow(new PrimaryNotFoundException("mock")).when(mockActorContext).executeShardOperation(
- anyString(), any());
+ doReturn(Optional.absent()).when(mockActorContext).findPrimaryShard(anyString());
+// doThrow(new PrimaryNotFoundException("mock")).when(mockActorContext).executeShardOperation(
+// anyString(), any());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
WRITE_ONLY);
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- verifyCohortPathFutures(proxy, PrimaryNotFoundException.class);
+ verifyCohortFutures(proxy, PrimaryNotFoundException.class);
}
@SuppressWarnings("unchecked")
NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- doReturn(writeDataReply()).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(writeDataReply()).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
doReturn(Futures.successful(new Object())).when(mockActorContext).
- executeRemoteOperationAsync(eq(actorSelection(actorRef)),
+ executeOperationAsync(eq(actorSelection(actorRef)),
isA(ReadyTransaction.SERIALIZABLE_CLASS));
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- verifyCohortPathFutures(proxy, IllegalArgumentException.class);
+ verifyCohortFutures(proxy, IllegalArgumentException.class);
}
@Test
public void testClose() throws Exception{
ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ doReturn(readDataReply(null)).when(mockActorContext).executeOperationAsync(
eq(actorSelection(actorRef)), eqReadData());
TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
transactionProxy.close();
- verify(mockActorContext).sendRemoteOperationAsync(
+ verify(mockActorContext).sendOperationAsync(
eq(actorSelection(actorRef)), isA(CloseTransaction.SERIALIZABLE_CLASS));
}
}
assertEquals("member-1-shard-inventory-config", id.toString());
}
+ @Test
+ public void testFromShardIdString(){
+ String shardIdStr = "member-1-shard-inventory-config";
+
+ ShardIdentifier id = ShardIdentifier.builder().fromShardIdString(shardIdStr).build();
+ assertEquals("member-1", id.getMemberName());
+ assertEquals("inventory", id.getShardName());
+ assertEquals("config", id.getType());
+ }
}
package org.opendaylight.controller.cluster.datastore.messages;
-import junit.framework.Assert;
+import org.junit.Assert;
import org.junit.Test;
-import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
-import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
public class MergeDataTest {
@Test
- public void testBasic(){
- MergeData mergeData = new MergeData(TestModel.TEST_PATH, ImmutableNodes
- .containerNode(TestModel.TEST_QNAME),
- TestModel.createTestContext());
-
- MergeData output = MergeData
- .fromSerializable(mergeData.toSerializable(),
- TestModel.createTestContext());
-
- }
-
- @Test
- public void testNormalizedNodeEncodeDecode(){
- NormalizedNode<?, ?> expected =
- ImmutableNodes.containerNode(TestModel.TEST_QNAME);
-
-
- NormalizedNodeMessages.Container node =
- new NormalizedNodeToNodeCodec(TestModel.createTestContext())
- .encode(TestModel.TEST_PATH,
- expected);
-
- String parentPath = node.getParentPath();
-
- NormalizedNodeMessages.Node normalizedNode =
- node.getNormalizedNode();
-
- NormalizedNode<?,?> actual = new NormalizedNodeToNodeCodec(TestModel.createTestContext()).decode(TestModel.TEST_PATH,
- normalizedNode);
-
-
- Assert.assertEquals(expected, actual);
+ public void testSerialization() {
+ SchemaContext schemaContext = TestModel.createTestContext();
+ MergeData expected = new MergeData(TestModel.TEST_PATH, ImmutableNodes
+ .containerNode(TestModel.TEST_QNAME), schemaContext);
+
+ MergeData actual = MergeData.fromSerializable(expected.toSerializable(), schemaContext);
+ Assert.assertEquals("getPath", expected.getPath(), actual.getPath());
+ Assert.assertEquals("getData", expected.getData(), actual.getData());
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.messages;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+/**
+ * Unit tests for WriteData.
+ *
+ * @author Thomas Pantelis
+ */
+public class WriteDataTest {
+
+ @Test
+ public void testSerialization() {
+ SchemaContext schemaContext = TestModel.createTestContext();
+ WriteData expected = new WriteData(TestModel.TEST_PATH, ImmutableNodes
+ .containerNode(TestModel.TEST_QNAME), schemaContext);
+
+ WriteData actual = WriteData.fromSerializable(expected.toSerializable(), schemaContext);
+ Assert.assertEquals("getPath", expected.getPath(), actual.getPath());
+ Assert.assertEquals("getData", expected.getData(), actual.getData());
+ }
+}
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
public class MergeModificationTest extends AbstractModificationTest{
- @Test
- public void testApply() throws Exception {
- //TODO : Need to write a better test for this
+ @Test
+ public void testApply() throws Exception {
+ //TODO : Need to write a better test for this
- //Write something into the datastore
- DOMStoreReadWriteTransaction writeTransaction = store.newReadWriteTransaction();
- MergeModification writeModification = new MergeModification(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), TestModel.createTestContext());
- writeModification.apply(writeTransaction);
- commitTransaction(writeTransaction);
+ //Write something into the datastore
+ DOMStoreReadWriteTransaction writeTransaction = store.newReadWriteTransaction();
+ MergeModification writeModification = new MergeModification(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), TestModel.createTestContext());
+ writeModification.apply(writeTransaction);
+ commitTransaction(writeTransaction);
- //Check if it's in the datastore
- Optional<NormalizedNode<?,?>> data = readData(TestModel.TEST_PATH);
- Assert.assertTrue(data.isPresent());
+ //Check if it's in the datastore
+ Optional<NormalizedNode<?,?>> data = readData(TestModel.TEST_PATH);
+ Assert.assertTrue(data.isPresent());
- }
+ }
+
+ @Test
+ public void testSerialization() {
+ SchemaContext schemaContext = TestModel.createTestContext();
+ NormalizedNode<?, ?> node = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ MergeModification mergeModification = new MergeModification(TestModel.TEST_PATH,
+ node, schemaContext);
+
+ Object serialized = mergeModification.toSerializable();
+
+ MergeModification newModification = MergeModification.fromSerializable(serialized, schemaContext);
+
+ Assert.assertEquals("getPath", TestModel.TEST_PATH, newModification.getPath());
+ Assert.assertEquals("getData", node, newModification.getData());
+ }
}
MutableCompositeModification compositeModification = new MutableCompositeModification();
compositeModification.addModification(new WriteModification(TestModel.TEST_PATH,
ImmutableNodes.containerNode(TestModel.TEST_QNAME), TestModel.createTestContext()));
-
- assertNotEquals(compositeModification.toSerializable(), compositeModification.toSerializable());
-
+ Object one = compositeModification.toSerializable();
+ try{Thread.sleep(10);}catch(Exception err){}
+ Object two = compositeModification.toSerializable();
+ assertNotEquals(one,two);
}
}
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
public class WriteModificationTest extends AbstractModificationTest{
- @Test
- public void testApply() throws Exception {
- //Write something into the datastore
- DOMStoreReadWriteTransaction writeTransaction = store.newReadWriteTransaction();
- WriteModification writeModification = new WriteModification(TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME), TestModel.createTestContext());
- writeModification.apply(writeTransaction);
- commitTransaction(writeTransaction);
+ @Test
+ public void testApply() throws Exception {
+ //Write something into the datastore
+ DOMStoreReadWriteTransaction writeTransaction = store.newReadWriteTransaction();
+ WriteModification writeModification = new WriteModification(TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME), TestModel.createTestContext());
+ writeModification.apply(writeTransaction);
+ commitTransaction(writeTransaction);
- //Check if it's in the datastore
- Optional<NormalizedNode<?,?>> data = readData(TestModel.TEST_PATH);
- Assert.assertTrue(data.isPresent());
+ //Check if it's in the datastore
+ Optional<NormalizedNode<?,?>> data = readData(TestModel.TEST_PATH);
+ Assert.assertTrue(data.isPresent());
+ }
- }
+ @Test
+ public void testSerialization() {
+ SchemaContext schemaContext = TestModel.createTestContext();
+ NormalizedNode<?, ?> node = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ WriteModification writeModification = new WriteModification(TestModel.TEST_PATH,
+ node, schemaContext);
+
+ Object serialized = writeModification.toSerializable();
+
+ WriteModification newModification = WriteModification.fromSerializable(serialized, schemaContext);
+
+ Assert.assertEquals("getPath", TestModel.TEST_PATH, newModification.getPath());
+ Assert.assertEquals("getData", node, newModification.getData());
+ }
}
package org.opendaylight.controller.cluster.datastore.utils;
-import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
-import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.japi.Creator;
import akka.testkit.JavaTestKit;
+import com.google.common.base.Optional;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.AbstractActorTest;
import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardNotFound;
-
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
+
+import java.util.concurrent.TimeUnit;
+
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class ActorContextTest extends AbstractActorTest{
- @Test
- public void testResolvePathForRemoteActor(){
- ActorContext actorContext =
- new ActorContext(mock(ActorSystem.class), mock(ActorRef.class),mock(
- ClusterWrapper.class),
- mock(Configuration.class));
-
- String actual = actorContext.resolvePath(
- "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard",
- "akka://system/user/shardmanager/shard/transaction");
-
- String expected = "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard/transaction";
-
- assertEquals(expected, actual);
- }
-
- @Test
- public void testResolvePathForLocalActor(){
- ActorContext actorContext =
- new ActorContext(getSystem(), mock(ActorRef.class), mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- String actual = actorContext.resolvePath(
- "akka://system/user/shardmanager/shard",
- "akka://system/user/shardmanager/shard/transaction");
-
- String expected = "akka://system/user/shardmanager/shard/transaction";
-
- assertEquals(expected, actual);
-
- System.out.println(actorContext
- .actorFor("akka://system/user/shardmanager/shard/transaction"));
- }
-
private static class MockShardManager extends UntypedActor {
}
}
- @Test
- public void testExecuteLocalShardOperationWithShardFound(){
- new JavaTestKit(getSystem()) {{
-
- new Within(duration("1 seconds")) {
- @Override
- protected void run() {
-
- ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
-
- ActorRef shardManagerActorRef = getSystem()
- .actorOf(MockShardManager.props(true, shardActorRef));
-
- ActorContext actorContext =
- new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- Object out = actorContext.executeLocalShardOperation("default", "hello");
-
- assertEquals("hello", out);
-
-
- expectNoMsg();
- }
- };
- }};
-
- }
-
- @Test
- public void testExecuteLocalShardOperationWithShardNotFound(){
- new JavaTestKit(getSystem()) {{
-
- new Within(duration("1 seconds")) {
- @Override
- protected void run() {
-
- ActorRef shardManagerActorRef = getSystem()
- .actorOf(MockShardManager.props(false, null));
-
- ActorContext actorContext =
- new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
- mock(Configuration.class));
-
- Object out = actorContext.executeLocalShardOperation("default", "hello");
-
- assertNull(out);
-
-
- expectNoMsg();
- }
- };
- }};
-
- }
-
-
@Test
public void testFindLocalShardWithShardFound(){
new JavaTestKit(getSystem()) {{
new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
mock(Configuration.class));
- Object out = actorContext.findLocalShard("default");
+ Optional<ActorRef> out = actorContext.findLocalShard("default");
- assertEquals(shardActorRef, out);
+ assertEquals(shardActorRef, out.get());
expectNoMsg();
new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
mock(Configuration.class));
- Object out = actorContext.findLocalShard("default");
-
- assertNull(out);
-
-
+ Optional<ActorRef> out = actorContext.findLocalShard("default");
+ assertTrue(!out.isPresent());
expectNoMsg();
}
};
ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
- Object out = actorContext.executeRemoteOperation(actor, "hello");
+ Object out = actorContext.executeOperation(actor, "hello");
assertEquals("hello", out);
ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
- Future<Object> future = actorContext.executeRemoteOperationAsync(actor, "hello");
+ Future<Object> future = actorContext.executeOperationAsync(actor, "hello");
try {
Object result = Await.result(future, Duration.create(3, TimeUnit.SECONDS));
*/
package org.opendaylight.controller.cluster.datastore.utils;
+import static org.junit.Assert.assertEquals;
+import java.util.Collections;
+import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
import com.google.common.collect.Maps;
+import com.google.common.util.concurrent.Uninterruptibles;
import scala.concurrent.Future;
import akka.dispatch.Futures;
import akka.japi.Procedure;
public class InMemoryJournal extends AsyncWriteJournal {
- private static Map<String, Map<Long, Object>> journals = new ConcurrentHashMap<>();
+ private static final Map<String, Map<Long, Object>> journals = new ConcurrentHashMap<>();
+
+ private static final Map<String, CountDownLatch> deleteMessagesCompleteLatches = new ConcurrentHashMap<>();
public static void addEntry(String persistenceId, long sequenceNr, Object data) {
Map<Long, Object> journal = journals.get(persistenceId);
journals.put(persistenceId, journal);
}
- journal.put(sequenceNr, data);
+ synchronized (journal) {
+ journal.put(sequenceNr, data);
+ }
}
public static void clear() {
journals.clear();
}
+ public static Map<Long, Object> get(String persistenceId) {
+ Map<Long, Object> journal = journals.get(persistenceId);
+ return journal != null ? journal : Collections.<Long, Object>emptyMap();
+ }
+
+ public static void waitForDeleteMessagesComplete(String persistenceId) {
+ assertEquals("Recovery complete", true, Uninterruptibles.awaitUninterruptibly(
+ deleteMessagesCompleteLatches.get(persistenceId), 5, TimeUnit.SECONDS));
+ }
+
+ public static void addDeleteMessagesCompleteLatch(String persistenceId) {
+ deleteMessagesCompleteLatches.put(persistenceId, new CountDownLatch(1));
+ }
+
@Override
public Future<Void> doAsyncReplayMessages(final String persistenceId, long fromSequenceNr,
long toSequenceNr, long max, final Procedure<PersistentRepr> replayCallback) {
return null;
}
- for (Map.Entry<Long,Object> entry : journal.entrySet()) {
- PersistentRepr persistentMessage =
- new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId, false, null, null);
- replayCallback.apply(persistentMessage);
+ synchronized (journal) {
+ for (Map.Entry<Long,Object> entry : journal.entrySet()) {
+ PersistentRepr persistentMessage =
+ new PersistentImpl(entry.getValue(), entry.getKey(), persistenceId,
+ false, null, null);
+ replayCallback.apply(persistentMessage);
+ }
}
return null;
@Override
public Future<Long> doAsyncReadHighestSequenceNr(String persistenceId, long fromSequenceNr) {
- return Futures.successful(new Long(0));
+ return Futures.successful(-1L);
}
@Override
- public Future<Void> doAsyncWriteMessages(Iterable<PersistentRepr> messages) {
- return Futures.successful(null);
+ public Future<Void> doAsyncWriteMessages(final Iterable<PersistentRepr> messages) {
+ return Futures.future(new Callable<Void>() {
+ @Override
+ public Void call() throws Exception {
+ for (PersistentRepr repr : messages) {
+ Map<Long, Object> journal = journals.get(repr.persistenceId());
+ if(journal == null) {
+ journal = Maps.newLinkedHashMap();
+ journals.put(repr.persistenceId(), journal);
+ }
+
+ synchronized (journal) {
+ journal.put(repr.sequenceNr(), repr.payload());
+ }
+ }
+ return null;
+ }
+ }, context().dispatcher());
}
@Override
@Override
public Future<Void> doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent) {
+ Map<Long, Object> journal = journals.get(persistenceId);
+ if(journal != null) {
+ synchronized (journal) {
+ Iterator<Long> iter = journal.keySet().iterator();
+ while(iter.hasNext()) {
+ Long n = iter.next();
+ if(n <= toSequenceNr) {
+ iter.remove();
+ }
+ }
+ }
+ }
+
+ CountDownLatch latch = deleteMessagesCompleteLatches.get(persistenceId);
+ if(latch != null) {
+ latch.countDown();
+ }
+
return Futures.successful(null);
}
}
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
+import com.google.common.base.Optional;
public class MockActorContext extends ActorContext {
super(actorSystem, shardManager, new MockClusterWrapper(), new MockConfiguration());
}
-
- @Override public Object executeShardOperation(String shardName,
- Object message) {
- return executeShardOperationResponse;
- }
-
- @Override public Object executeRemoteOperation(ActorSelection actor,
- Object message) {
+ @Override public Object executeOperation(ActorSelection actor,
+ Object message) {
return executeRemoteOperationResponse;
}
- @Override public ActorSelection findPrimary(String shardName) {
- return null;
+ @Override public Optional<ActorSelection> findPrimaryShard(String shardName) {
+ return Optional.absent();
}
public void setExecuteShardOperationResponse(Object response){
}
@Override
- public Object executeLocalOperation(ActorRef actor,
- Object message) {
+ public Object executeOperation(ActorRef actor,
+ Object message) {
return this.executeLocalOperationResponse;
}
- @Override
- public Object executeLocalShardOperation(String shardName,
- Object message) {
- return this.executeLocalShardOperationResponse;
- }
}
ActorContext testContext = new ActorContext(actorSystem, actorSystem.actorOf(
Props.create(DoNothingActor.class)), new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
- .executeLocalOperation(actorRef, "messages");
+ .executeOperation(actorRef, "messages");
Assert.assertNotNull(messages);
Set<QName> childAugmentations = new HashSet<>();
childAugmentations.add(AUG_QNAME);
final YangInstanceIdentifier.AugmentationIdentifier augmentationIdentifier =
- new YangInstanceIdentifier.AugmentationIdentifier(null, childAugmentations);
+ new YangInstanceIdentifier.AugmentationIdentifier(childAugmentations);
final AugmentationNode augmentationNode =
Builders.augmentationBuilder()
.withNodeIdentifier(augmentationIdentifier)
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
public class SampleModelsTest {
final NormalizedNodeMessages.Container node =
new NormalizedNodeToNodeCodec(SchemaContextHelper.full())
- .encode(YangInstanceIdentifier.of(PeopleModel.BASE_QNAME),
- expected);
+ .encode(expected);
final NormalizedNodeMessages.Node normalizedNode =
node.getNormalizedNode();
- final NormalizedNode<?,?> actual = new NormalizedNodeToNodeCodec(SchemaContextHelper.full()).decode(YangInstanceIdentifier.of(PeopleModel.BASE_QNAME),
- normalizedNode);
+ final NormalizedNode<?,?> actual = new NormalizedNodeToNodeCodec(SchemaContextHelper.full()).decode(normalizedNode);
Assert.assertEquals(expected, actual);
final NormalizedNodeMessages.Container node =
new NormalizedNodeToNodeCodec(SchemaContextHelper.full())
- .encode(YangInstanceIdentifier.of(CarsModel.BASE_QNAME),
- expected);
+ .encode(expected);
final NormalizedNodeMessages.Node normalizedNode =
node.getNormalizedNode();
final NormalizedNode<?,?> actual = new NormalizedNodeToNodeCodec(SchemaContextHelper.full()).decode(
- YangInstanceIdentifier.of(CarsModel.BASE_QNAME),
normalizedNode);
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-core-api</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-broker-impl</artifactId>
<packaging>bundle</packaging>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-inmemory-datastore</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
package org.opendaylight.controller.config.yang.md.sal.dom.impl;
import java.util.EnumMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitDeadlockException;
import org.opendaylight.controller.md.sal.common.util.jmx.AbstractMXBean;
import org.opendaylight.controller.md.sal.common.util.jmx.ThreadExecutorStatsMXBeanImpl;
+import org.opendaylight.controller.md.sal.dom.broker.impl.DOMConcurrentDataCommitCoordinator;
import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataBrokerImpl;
+import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataCommitCoordinatorImpl;
+import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataCommitExecutor;
import org.opendaylight.controller.md.sal.dom.broker.impl.jmx.CommitStatsMXBeanImpl;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
+import org.opendaylight.yangtools.util.DurationStatisticsTracker;
import org.opendaylight.yangtools.util.concurrent.DeadlockDetectingListeningExecutorService;
import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
+import com.google.common.collect.Lists;
/**
*
datastores.put(LogicalDatastoreType.OPERATIONAL, operStore);
datastores.put(LogicalDatastoreType.CONFIGURATION, configStore);
- /*
- * We use a single-threaded executor for commits with a bounded queue capacity. If the
- * queue capacity is reached, subsequent commit tasks will be rejected and the commits will
- * fail. This is done to relieve back pressure. This should be an extreme scenario - either
- * there's deadlock(s) somewhere and the controller is unstable or some rogue component is
- * continuously hammering commits too fast or the controller is just over-capacity for the
- * system it's running on.
- */
- ExecutorService commitExecutor = SpecialExecutors.newBoundedSingleThreadExecutor(
- getMaxDataBrokerCommitQueueSize(), "WriteTxCommit");
-
/*
* We use an executor for commit ListenableFuture callbacks that favors reusing available
* threads over creating new threads at the expense of execution time. The assumption is
getMaxDataBrokerFutureCallbackPoolSize(), getMaxDataBrokerFutureCallbackQueueSize(),
"CommitFutures");
- DOMDataBrokerImpl newDataBroker = new DOMDataBrokerImpl(datastores,
- new DeadlockDetectingListeningExecutorService(commitExecutor,
- TransactionCommitDeadlockException.DEADLOCK_EXCEPTION_SUPPLIER,
- listenableFutureExecutor));
+ final List<AbstractMXBean> mBeans = Lists.newArrayList();
+
+ DOMDataCommitExecutor commitCoordinator;
+ DurationStatisticsTracker commitStatsTracker = null;
+
+ if(getAllowConcurrentCommits()) {
+ DOMConcurrentDataCommitCoordinator coordinator =
+ new DOMConcurrentDataCommitCoordinator(listenableFutureExecutor);
+ commitStatsTracker = coordinator.getCommitStatsTracker();
+ commitCoordinator = coordinator;
+ } else {
+ /*
+ * We use a single-threaded executor for commits with a bounded queue capacity. If the
+ * queue capacity is reached, subsequent commit tasks will be rejected and the commits will
+ * fail. This is done to relieve back pressure. This should be an extreme scenario - either
+ * there's deadlock(s) somewhere and the controller is unstable or some rogue component is
+ * continuously hammering commits too fast or the controller is just over-capacity for the
+ * system it's running on.
+ */
+ ExecutorService commitExecutor = SpecialExecutors.newBoundedSingleThreadExecutor(
+ getMaxDataBrokerCommitQueueSize(), "WriteTxCommit");
+
+ DOMDataCommitCoordinatorImpl coordinator = new DOMDataCommitCoordinatorImpl(
+ new DeadlockDetectingListeningExecutorService(commitExecutor,
+ TransactionCommitDeadlockException.DEADLOCK_EXCEPTION_SUPPLIER,
+ listenableFutureExecutor));
+
+ commitStatsTracker = coordinator.getCommitStatsTracker();
+ commitCoordinator = coordinator;
+
+ final AbstractMXBean commitExecutorStatsMXBean =
+ ThreadExecutorStatsMXBeanImpl.create(commitExecutor, "CommitExecutorStats",
+ JMX_BEAN_TYPE, null);
+ if(commitExecutorStatsMXBean != null) {
+ mBeans.add(commitExecutorStatsMXBean);
+ }
+ }
- final CommitStatsMXBeanImpl commitStatsMXBean = new CommitStatsMXBeanImpl(
- newDataBroker.getCommitStatsTracker(), JMX_BEAN_TYPE);
- commitStatsMXBean.registerMBean();
+ DOMDataBrokerImpl newDataBroker = new DOMDataBrokerImpl(datastores, commitCoordinator);
+
+ if(commitStatsTracker != null) {
+ final CommitStatsMXBeanImpl commitStatsMXBean = new CommitStatsMXBeanImpl(
+ commitStatsTracker, JMX_BEAN_TYPE);
+ commitStatsMXBean.registerMBean();
+ mBeans.add(commitStatsMXBean);
+ }
- final AbstractMXBean commitExecutorStatsMXBean =
- ThreadExecutorStatsMXBeanImpl.create(commitExecutor, "CommitExecutorStats",
- JMX_BEAN_TYPE, null);
final AbstractMXBean commitFutureStatsMXBean =
ThreadExecutorStatsMXBeanImpl.create(listenableFutureExecutor,
"CommitFutureExecutorStats", JMX_BEAN_TYPE, null);
+ if(commitFutureStatsMXBean != null) {
+ mBeans.add(commitFutureStatsMXBean);
+ }
newDataBroker.setCloseable(new AutoCloseable() {
@Override
public void close() {
- commitStatsMXBean.unregisterMBean();
- if (commitExecutorStatsMXBean != null) {
- commitExecutorStatsMXBean.unregisterMBean();
- }
- if (commitFutureStatsMXBean != null) {
- commitFutureStatsMXBean.unregisterMBean();
+ for(AbstractMXBean mBean: mBeans) {
+ mBean.unregisterMBean();
}
}
});
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import java.util.List;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.util.DurationStatisticsTracker;
+import org.opendaylight.yangtools.util.concurrent.MappingCheckedFuture;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterables;
+import com.google.common.util.concurrent.AbstractFuture;
+import com.google.common.util.concurrent.AbstractListeningExecutorService;
+import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ * Implementation of DOMDataCommitExecutor that coordinates transaction commits concurrently. The 3
+ * commit phases (canCommit, preCommit, and commit) are performed serially and non-blocking
+ * (ie async) per transaction but multiple transaction commits can run concurrent.
+ *
+ * @author Thomas Pantelis
+ */
+public class DOMConcurrentDataCommitCoordinator implements DOMDataCommitExecutor {
+
+ private static final String CAN_COMMIT = "CAN_COMMIT";
+ private static final String PRE_COMMIT = "PRE_COMMIT";
+ private static final String COMMIT = "COMMIT";
+
+ private static final Logger LOG = LoggerFactory.getLogger(DOMConcurrentDataCommitCoordinator.class);
+
+ private final DurationStatisticsTracker commitStatsTracker = DurationStatisticsTracker.createConcurrent();
+
+ /**
+ * This executor is used to execute Future listener callback Runnables async.
+ */
+ private final ExecutorService clientFutureCallbackExecutor;
+
+ /**
+ * This executor is re-used internally in calls to Futures#addCallback to avoid the overhead
+ * of Futures#addCallback creating a MoreExecutors#sameThreadExecutor for each call.
+ */
+ private final ExecutorService internalFutureCallbackExecutor = new SimpleSameThreadExecutor();
+
+ public DOMConcurrentDataCommitCoordinator(ExecutorService listenableFutureExecutor) {
+ this.clientFutureCallbackExecutor = Preconditions.checkNotNull(listenableFutureExecutor);
+ }
+
+ public DurationStatisticsTracker getCommitStatsTracker() {
+ return commitStatsTracker;
+ }
+
+ @Override
+ public CheckedFuture<Void, TransactionCommitFailedException> submit(DOMDataWriteTransaction transaction,
+ Iterable<DOMStoreThreePhaseCommitCohort> cohorts) {
+
+ Preconditions.checkArgument(transaction != null, "Transaction must not be null.");
+ Preconditions.checkArgument(cohorts != null, "Cohorts must not be null.");
+ LOG.debug("Tx: {} is submitted for execution.", transaction.getIdentifier());
+
+ final int cohortSize = Iterables.size(cohorts);
+ final AsyncNotifyingSettableFuture clientSubmitFuture =
+ new AsyncNotifyingSettableFuture(clientFutureCallbackExecutor);
+
+ doCanCommit(clientSubmitFuture, transaction, cohorts, cohortSize);
+
+ return MappingCheckedFuture.create(clientSubmitFuture,
+ TransactionCommitFailedExceptionMapper.COMMIT_ERROR_MAPPER);
+ }
+
+ private void doCanCommit(final AsyncNotifyingSettableFuture clientSubmitFuture,
+ final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts, final int cohortSize) {
+
+ final long startTime = System.nanoTime();
+
+ // Not using Futures.allAsList here to avoid its internal overhead.
+ final AtomicInteger remaining = new AtomicInteger(cohortSize);
+ FutureCallback<Boolean> futureCallback = new FutureCallback<Boolean>() {
+ @Override
+ public void onSuccess(Boolean result) {
+ if (result == null || !result) {
+ handleException(clientSubmitFuture, transaction, cohorts, cohortSize,
+ CAN_COMMIT, new TransactionCommitFailedException(
+ "Can Commit failed, no detailed cause available."));
+ } else {
+ if(remaining.decrementAndGet() == 0) {
+ // All cohorts completed successfully - we can move on to the preCommit phase
+ doPreCommit(startTime, clientSubmitFuture, transaction, cohorts, cohortSize);
+ }
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ handleException(clientSubmitFuture, transaction, cohorts, cohortSize, CAN_COMMIT, t);
+ }
+ };
+
+ for(DOMStoreThreePhaseCommitCohort cohort: cohorts) {
+ ListenableFuture<Boolean> canCommitFuture = cohort.canCommit();
+ Futures.addCallback(canCommitFuture, futureCallback, internalFutureCallbackExecutor);
+ }
+ }
+
+ private void doPreCommit(final long startTime, final AsyncNotifyingSettableFuture clientSubmitFuture,
+ final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts, final int cohortSize) {
+
+ // Not using Futures.allAsList here to avoid its internal overhead.
+ final AtomicInteger remaining = new AtomicInteger(cohortSize);
+ FutureCallback<Void> futureCallback = new FutureCallback<Void>() {
+ @Override
+ public void onSuccess(Void notUsed) {
+ if(remaining.decrementAndGet() == 0) {
+ // All cohorts completed successfully - we can move on to the commit phase
+ doCommit(startTime, clientSubmitFuture, transaction, cohorts, cohortSize);
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ handleException(clientSubmitFuture, transaction, cohorts, cohortSize, CAN_COMMIT, t);
+ }
+ };
+
+ for(DOMStoreThreePhaseCommitCohort cohort: cohorts) {
+ ListenableFuture<Void> preCommitFuture = cohort.preCommit();
+ Futures.addCallback(preCommitFuture, futureCallback, internalFutureCallbackExecutor);
+ }
+ }
+
+ private void doCommit(final long startTime, final AsyncNotifyingSettableFuture clientSubmitFuture,
+ final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts, final int cohortSize) {
+
+ // Not using Futures.allAsList here to avoid its internal overhead.
+ final AtomicInteger remaining = new AtomicInteger(cohortSize);
+ FutureCallback<Void> futureCallback = new FutureCallback<Void>() {
+ @Override
+ public void onSuccess(Void notUsed) {
+ if(remaining.decrementAndGet() == 0) {
+ // All cohorts completed successfully - we're done.
+ commitStatsTracker.addDuration(System.nanoTime() - startTime);
+
+ clientSubmitFuture.set();
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ handleException(clientSubmitFuture, transaction, cohorts, cohortSize, CAN_COMMIT, t);
+ }
+ };
+
+ for(DOMStoreThreePhaseCommitCohort cohort: cohorts) {
+ ListenableFuture<Void> commitFuture = cohort.commit();
+ Futures.addCallback(commitFuture, futureCallback, internalFutureCallbackExecutor);
+ }
+ }
+
+ private void handleException(final AsyncNotifyingSettableFuture clientSubmitFuture,
+ final DOMDataWriteTransaction transaction,
+ final Iterable<DOMStoreThreePhaseCommitCohort> cohorts, int cohortSize,
+ final String phase, final Throwable t) {
+
+ if(clientSubmitFuture.isDone()) {
+ // We must have had failures from multiple cohorts.
+ return;
+ }
+
+ LOG.warn("Tx: {} Error during phase {}, starting Abort", transaction.getIdentifier(), phase, t);
+ Exception e;
+ if(t instanceof Exception) {
+ e = (Exception)t;
+ } else {
+ e = new RuntimeException("Unexpected error occurred", t);
+ }
+
+ final TransactionCommitFailedException clientException =
+ TransactionCommitFailedExceptionMapper.CAN_COMMIT_ERROR_MAPPER.apply(e);
+
+ // Transaction failed - tell all cohorts to abort.
+
+ @SuppressWarnings("unchecked")
+ ListenableFuture<Void>[] canCommitFutures = new ListenableFuture[cohortSize];
+ int i = 0;
+ for(DOMStoreThreePhaseCommitCohort cohort: cohorts) {
+ canCommitFutures[i++] = cohort.abort();
+ }
+
+ ListenableFuture<List<Void>> combinedFuture = Futures.allAsList(canCommitFutures);
+ Futures.addCallback(combinedFuture, new FutureCallback<List<Void>>() {
+ @Override
+ public void onSuccess(List<Void> notUsed) {
+ // Propagate the original exception to the client.
+ clientSubmitFuture.setException(clientException);
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ LOG.error("Tx: {} Error during Abort.", transaction.getIdentifier(), t);
+
+ // Propagate the original exception as that is what caused the Tx to fail and is
+ // what's interesting to the client.
+ clientSubmitFuture.setException(clientException);
+ }
+ }, internalFutureCallbackExecutor);
+ }
+
+ /**
+ * A settable future that uses an {@link Executor} to execute listener callback Runnables,
+ * registered via {@link #addListener}, asynchronously when this future completes. This is
+ * done to guarantee listener executions are off-loaded onto another thread to avoid blocking
+ * the thread that completed this future, as a common use case is to pass an executor that runs
+ * tasks in the same thread as the caller (ie MoreExecutors#sameThreadExecutor)
+ * to {@link #addListener}.
+ *
+ * FIXME: This class should probably be moved to yangtools common utils for re-usability and
+ * unified with AsyncNotifyingListenableFutureTask.
+ */
+ private static class AsyncNotifyingSettableFuture extends AbstractFuture<Void> {
+
+ /**
+ * ThreadLocal used to detect if the task completion thread is running the future listener Runnables.
+ */
+ private static final ThreadLocal<Boolean> ON_TASK_COMPLETION_THREAD_TL = new ThreadLocal<Boolean>();
+
+ private final ExecutorService listenerExecutor;
+
+ AsyncNotifyingSettableFuture(ExecutorService listenerExecutor) {
+ this.listenerExecutor = listenerExecutor;
+ }
+
+ @Override
+ public void addListener(final Runnable listener, final Executor executor) {
+ // Wrap the listener Runnable in a DelegatingRunnable. If the specified executor is one
+ // that runs tasks in the same thread as the caller submitting the task
+ // (e.g. {@link com.google.common.util.concurrent.MoreExecutors#sameThreadExecutor}) and
+ // the listener is executed from the #set methods, then the DelegatingRunnable will detect
+ // this via the ThreadLocal and submit the listener Runnable to the listenerExecutor.
+ //
+ // On the other hand, if this task is already complete, the call to ExecutionList#add in
+ // superclass will execute the listener Runnable immediately and, since the ThreadLocal
+ // won't be set, the DelegatingRunnable will run the listener Runnable inline.
+ super.addListener(new DelegatingRunnable(listener, listenerExecutor), executor);
+ }
+
+ boolean set() {
+ ON_TASK_COMPLETION_THREAD_TL.set(Boolean.TRUE);
+ try {
+ return super.set(null);
+ } finally {
+ ON_TASK_COMPLETION_THREAD_TL.set(null);
+ }
+ }
+
+ @Override
+ protected boolean setException(Throwable throwable) {
+ ON_TASK_COMPLETION_THREAD_TL.set(Boolean.TRUE);
+ try {
+ return super.setException(throwable);
+ } finally {
+ ON_TASK_COMPLETION_THREAD_TL.set(null);
+ }
+ }
+
+ private static final class DelegatingRunnable implements Runnable {
+ private final Runnable delegate;
+ private final Executor executor;
+
+ DelegatingRunnable(final Runnable delegate, final Executor executor) {
+ this.delegate = Preconditions.checkNotNull(delegate);
+ this.executor = Preconditions.checkNotNull(executor);
+ }
+
+ @Override
+ public void run() {
+ if (ON_TASK_COMPLETION_THREAD_TL.get() != null) {
+ // We're running on the task completion thread so off-load to the executor.
+ LOG.trace("Submitting ListenenableFuture Runnable from thread {} to executor {}",
+ Thread.currentThread().getName(), executor);
+ executor.execute(delegate);
+ } else {
+ // We're not running on the task completion thread so run the delegate inline.
+ LOG.trace("Executing ListenenableFuture Runnable on this thread: {}",
+ Thread.currentThread().getName());
+ delegate.run();
+ }
+ }
+ }
+ }
+
+ /**
+ * A simple same-thread executor without the internal locking overhead that
+ * MoreExecutors#sameThreadExecutor has. The #execute method is the only one of concern - we
+ * don't shutdown the executor so the other methods irrelevant.
+ */
+ private static class SimpleSameThreadExecutor extends AbstractListeningExecutorService {
+
+ @Override
+ public void execute(Runnable command) {
+ command.run();
+ }
+
+ @Override
+ public boolean awaitTermination(long arg0, TimeUnit arg1) throws InterruptedException {
+ return true;
+ }
+
+ @Override
+ public boolean isShutdown() {
+ return false;
+ }
+
+ @Override
+ public boolean isTerminated() {
+ return false;
+ }
+
+ @Override
+ public void shutdown() {
+ }
+
+ @Override
+ public List<Runnable> shutdownNow() {
+ return null;
+ }
+ }
+}
package org.opendaylight.controller.md.sal.dom.broker.impl;
import static com.google.common.base.Preconditions.checkState;
+import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.EnumMap;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.util.DurationStatsTracker;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
private static final Logger LOG = LoggerFactory.getLogger(DOMDataBrokerImpl.class);
- private final DOMDataCommitCoordinatorImpl coordinator;
+ private final DOMDataCommitExecutor coordinator;
private final AtomicLong txNum = new AtomicLong();
private final AtomicLong chainNum = new AtomicLong();
private volatile AutoCloseable closeable;
public DOMDataBrokerImpl(final Map<LogicalDatastoreType, DOMStore> datastores,
final ListeningExecutorService executor) {
+ this(datastores, new DOMDataCommitCoordinatorImpl(executor));
+ }
+
+ public DOMDataBrokerImpl(final Map<LogicalDatastoreType, DOMStore> datastores,
+ final DOMDataCommitExecutor coordinator) {
super(datastores);
- this.coordinator = new DOMDataCommitCoordinatorImpl(executor);
+ this.coordinator = Preconditions.checkNotNull(coordinator);
}
public void setCloseable(final AutoCloseable closeable) {
this.closeable = closeable;
}
- public DurationStatsTracker getCommitStatsTracker() {
- return coordinator.getCommitStatsTracker();
- }
-
@Override
public void close() {
super.close();
import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-import org.opendaylight.yangtools.util.DurationStatsTracker;
+import org.opendaylight.yangtools.util.DurationStatisticsTracker;
import org.opendaylight.yangtools.util.concurrent.MappingCheckedFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DOMDataCommitCoordinatorImpl implements DOMDataCommitExecutor {
private static final Logger LOG = LoggerFactory.getLogger(DOMDataCommitCoordinatorImpl.class);
- private final DurationStatsTracker commitStatsTracker = new DurationStatsTracker();
+ private final DurationStatisticsTracker commitStatsTracker = DurationStatisticsTracker.createConcurrent();
private final ListeningExecutorService executor;
/**
this.executor = Preconditions.checkNotNull(executor, "executor must not be null.");
}
- public DurationStatsTracker getCommitStatsTracker() {
+ public DurationStatisticsTracker getCommitStatsTracker() {
return commitStatsTracker;
}
AtomicReferenceFieldUpdater.newUpdater(CommitCoordinationTask.class, CommitPhase.class, "currentPhase");
private final DOMDataWriteTransaction tx;
private final Iterable<DOMStoreThreePhaseCommitCohort> cohorts;
- private final DurationStatsTracker commitStatTracker;
+ private final DurationStatisticsTracker commitStatTracker;
private final int cohortSize;
private volatile CommitPhase currentPhase = CommitPhase.SUBMITTED;
public CommitCoordinationTask(final DOMDataWriteTransaction transaction,
final Iterable<DOMStoreThreePhaseCommitCohort> cohorts,
- final DurationStatsTracker commitStatTracker) {
+ final DurationStatisticsTracker commitStatsTracker) {
this.tx = Preconditions.checkNotNull(transaction, "transaction must not be null");
this.cohorts = Preconditions.checkNotNull(cohorts, "cohorts must not be null");
- this.commitStatTracker = commitStatTracker;
+ this.commitStatTracker = commitStatsTracker;
this.cohortSize = Iterables.size(cohorts);
}
*
*
*/
-interface DOMDataCommitExecutor {
+public interface DOMDataCommitExecutor {
/**
* Submits supplied transaction to be executed in context of provided
package org.opendaylight.controller.md.sal.dom.broker.impl.jmx;
import javax.annotation.Nonnull;
-
import org.opendaylight.controller.md.sal.common.util.jmx.AbstractMXBean;
-import org.opendaylight.yangtools.util.DurationStatsTracker;
+import org.opendaylight.yangtools.util.DurationStatisticsTracker;
/**
* Implementation of the CommitStatsMXBean interface.
*/
public class CommitStatsMXBeanImpl extends AbstractMXBean implements CommitStatsMXBean {
- private final DurationStatsTracker commitStatsTracker;
+ private final DurationStatisticsTracker commitStatsTracker;
/**
* Constructor.
* @param commitStatsTracker the DurationStatsTracker used to obtain the stats.
* @param mBeanType mBeanType Used as the <code>type</code> property in the bean's ObjectName.
*/
- public CommitStatsMXBeanImpl(@Nonnull DurationStatsTracker commitStatsTracker,
+ public CommitStatsMXBeanImpl(@Nonnull DurationStatisticsTracker commitStatsTracker,
@Nonnull String mBeanType) {
super("CommitStats", mBeanType, null);
this.commitStatsTracker = commitStatsTracker;
type uint16;
description "The maximum queue size for the data broker's commit executor.";
}
+
+ leaf allow-concurrent-commits {
+ default false;
+ type boolean;
+ description "Specifies whether or not to allow 3-phrase commits to run concurrently.
+ Use with caution. If set to true, the data store implementations must be prepared
+ to handle concurrent commits. The default is false";
+ }
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.broker.impl;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.inOrder;
+import java.util.Arrays;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.InOrder;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.SettableFuture;
+import com.google.common.util.concurrent.Uninterruptibles;
+
+/**
+ * Unit tests for DOMConcurrentDataCommitCoordinator.
+ *
+ * @author Thomas Pantelis
+ */
+public class DOMConcurrentDataCommitCoordinatorTest {
+
+ private final DOMDataWriteTransaction transaction = mock(DOMDataWriteTransaction.class);
+ private final DOMStoreThreePhaseCommitCohort mockCohort1 = mock(DOMStoreThreePhaseCommitCohort.class);
+ private final DOMStoreThreePhaseCommitCohort mockCohort2 = mock(DOMStoreThreePhaseCommitCohort.class);
+ private final ThreadPoolExecutor futureExecutor =
+ new ThreadPoolExecutor(0, 1, 5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>());
+ private final DOMConcurrentDataCommitCoordinator coordinator =
+ new DOMConcurrentDataCommitCoordinator(futureExecutor);
+
+ @Before
+ public void setup() {
+ doReturn("tx").when(transaction).getIdentifier();
+ }
+
+ @After
+ public void tearDown() {
+ futureExecutor.shutdownNow();
+ }
+
+ @Test
+ public void testSuccessfulSubmitAsync() throws Throwable {
+ testSuccessfulSubmit(true);
+ }
+
+ @Test
+ public void testSuccessfulSubmitSync() throws Throwable {
+ testSuccessfulSubmit(false);
+ }
+
+ private void testSuccessfulSubmit(final boolean doAsync) throws Throwable {
+ final CountDownLatch asyncCanCommitContinue = new CountDownLatch(1);
+ Answer<ListenableFuture<Boolean>> asyncCanCommit = new Answer<ListenableFuture<Boolean>>() {
+ @Override
+ public ListenableFuture<Boolean> answer(InvocationOnMock invocation) {
+ final SettableFuture<Boolean> future = SettableFuture.create();
+ if(doAsync) {
+ new Thread() {
+ @Override
+ public void run() {
+ Uninterruptibles.awaitUninterruptibly(asyncCanCommitContinue,
+ 10, TimeUnit.SECONDS);
+ future.set(true);
+ }
+ }.start();
+ } else {
+ future.set(true);
+ }
+
+ return future;
+ }
+ };
+
+ doAnswer(asyncCanCommit).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).commit();
+
+ doReturn(Futures.immediateFuture(true)).when(mockCohort2).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).commit();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2));
+
+ final CountDownLatch doneLatch = new CountDownLatch(1);
+ final AtomicReference<Throwable> caughtEx = new AtomicReference<>();
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ @Override
+ public void onSuccess(Void result) {
+ doneLatch.countDown();
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ caughtEx.set(t);
+ doneLatch.countDown();
+ }
+ });
+
+ asyncCanCommitContinue.countDown();
+
+ assertEquals("Submit complete", true, doneLatch.await(5, TimeUnit.SECONDS));
+
+ if(caughtEx.get() != null) {
+ throw caughtEx.get();
+ }
+
+ assertEquals("Task count", doAsync ? 1 : 0, futureExecutor.getTaskCount());
+
+ InOrder inOrder = inOrder(mockCohort1, mockCohort2);
+ inOrder.verify(mockCohort1).canCommit();
+ inOrder.verify(mockCohort2).canCommit();
+ inOrder.verify(mockCohort1).preCommit();
+ inOrder.verify(mockCohort2).preCommit();
+ inOrder.verify(mockCohort1).commit();
+ inOrder.verify(mockCohort2).commit();
+ }
+
+ @Test
+ public void testSubmitWithNegativeCanCommitResponse() throws Exception {
+ doReturn(Futures.immediateFuture(true)).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).abort();
+
+ doReturn(Futures.immediateFuture(false)).when(mockCohort2).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).abort();
+
+ DOMStoreThreePhaseCommitCohort mockCohort3 = mock(DOMStoreThreePhaseCommitCohort.class);
+ doReturn(Futures.immediateFuture(false)).when(mockCohort3).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort3).abort();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2, mockCohort3));
+
+ assertFailure(future, null, mockCohort1, mockCohort2, mockCohort3);
+ }
+
+ private void assertFailure(CheckedFuture<Void, TransactionCommitFailedException> future,
+ Exception expCause, DOMStoreThreePhaseCommitCohort... mockCohorts)
+ throws Exception {
+ try {
+ future.checkedGet(5, TimeUnit.SECONDS);
+ fail("Expected TransactionCommitFailedException");
+ } catch (TransactionCommitFailedException e) {
+ if(expCause != null) {
+ assertSame("Expected cause", expCause, e.getCause());
+ }
+
+ InOrder inOrder = inOrder((Object[])mockCohorts);
+ for(DOMStoreThreePhaseCommitCohort c: mockCohorts) {
+ inOrder.verify(c).abort();
+ }
+ } catch (TimeoutException e) {
+ throw e;
+ }
+ }
+
+ @Test
+ public void testSubmitWithCanCommitException() throws Exception {
+ doReturn(Futures.immediateFuture(true)).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).abort();
+
+ IllegalStateException cause = new IllegalStateException("mock");
+ doReturn(Futures.immediateFailedFuture(cause)).when(mockCohort2).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).abort();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2));
+
+ assertFailure(future, cause, mockCohort1, mockCohort2);
+ }
+
+ @Test
+ public void testSubmitWithPreCommitException() throws Exception {
+ doReturn(Futures.immediateFuture(true)).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).abort();
+
+ doReturn(Futures.immediateFuture(true)).when(mockCohort2).canCommit();
+ IllegalStateException cause = new IllegalStateException("mock");
+ doReturn(Futures.immediateFailedFuture(cause)).when(mockCohort2).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).abort();
+
+ DOMStoreThreePhaseCommitCohort mockCohort3 = mock(DOMStoreThreePhaseCommitCohort.class);
+ doReturn(Futures.immediateFuture(true)).when(mockCohort3).canCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock2"))).
+ when(mockCohort3).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort3).abort();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2, mockCohort3));
+
+ assertFailure(future, cause, mockCohort1, mockCohort2, mockCohort3);
+ }
+
+ @Test
+ public void testSubmitWithCommitException() throws Exception {
+ doReturn(Futures.immediateFuture(true)).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).preCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).commit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort1).abort();
+
+ doReturn(Futures.immediateFuture(true)).when(mockCohort2).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).preCommit();
+ IllegalStateException cause = new IllegalStateException("mock");
+ doReturn(Futures.immediateFailedFuture(cause)).when(mockCohort2).commit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).abort();
+
+ DOMStoreThreePhaseCommitCohort mockCohort3 = mock(DOMStoreThreePhaseCommitCohort.class);
+ doReturn(Futures.immediateFuture(true)).when(mockCohort3).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort3).preCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock2"))).
+ when(mockCohort3).commit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort3).abort();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2, mockCohort3));
+
+ assertFailure(future, cause, mockCohort1, mockCohort2, mockCohort3);
+ }
+
+ @Test
+ public void testSubmitWithAbortException() throws Exception {
+ doReturn(Futures.immediateFuture(true)).when(mockCohort1).canCommit();
+ doReturn(Futures.immediateFailedFuture(new IllegalStateException("mock abort error"))).
+ when(mockCohort1).abort();
+
+ IllegalStateException cause = new IllegalStateException("mock canCommit error");
+ doReturn(Futures.immediateFailedFuture(cause)).when(mockCohort2).canCommit();
+ doReturn(Futures.immediateFuture(null)).when(mockCohort2).abort();
+
+ CheckedFuture<Void, TransactionCommitFailedException> future = coordinator.submit(
+ transaction, Arrays.asList(mockCohort1, mockCohort2));
+
+ assertFailure(future, cause, mockCohort1, mockCohort2);
+ }
+}
private BackwardsCompatibleMountPointManager compatibleMountPointManager;
static final QName qName = QName.create("namespace", "12-12-1212", "mount");
- static final YangInstanceIdentifier id = YangInstanceIdentifier.builder(qName).build();
+ static final YangInstanceIdentifier id = YangInstanceIdentifier.of(qName);
@Before
public void setUp() throws Exception {
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-core-spi</artifactId>
<packaging>bundle</packaging>
<parent>\r
<artifactId>sal-parent</artifactId>\r
<groupId>org.opendaylight.controller</groupId>\r
- <version>1.1-SNAPSHOT</version>\r
+ <version>1.2.0-SNAPSHOT</version>\r
</parent>\r
<artifactId>sal-dom-xsql-config</artifactId>\r
<groupId>org.opendaylight.controller</groupId>\r
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-dom-xsql</artifactId>
<packaging>bundle</packaging>
-package org.opendaylight.controller.md.sal.dom.xsql.jdbc;
+package org.odl.xsql;
import java.sql.Connection;
import java.sql.Driver;
import java.util.Properties;
import java.util.logging.Logger;
+import org.opendaylight.controller.md.sal.dom.xsql.jdbc.JDBCConnection;
+
public class JDBCDriver implements Driver {
public static JDBCDriver drv = new JDBCDriver();
if (url.equals("svr")) {
return new JDBCConnection(true);
} else {
- return new JDBCConnection(url);
+ return new JDBCConnection(url).getProxy();
}
} catch (Exception err) {
err.printStackTrace();
}
+ System.err.println("Error JDBC Connection");
return null;
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String arg0, Properties arg1)
throws SQLException {
- DriverPropertyInfo i = new DriverPropertyInfo("NQL", "NQL");
+ DriverPropertyInfo i = new DriverPropertyInfo("OpenDayLight", "OpenDayLight");
return new DriverPropertyInfo[] {i};
}
}
+ public void loadBluePrint(){
+ try{
+ InputStream in = this.getClass().getClassLoader().getResourceAsStream("BluePrintCache.dat");
+ if(in!=null){
+ this.bluePrint = XSQLBluePrint.load(in);
+ }
+ in.close();
+ }catch(Exception err){
+ err.printStackTrace();
+ }
+ }
+
public static XSQLAdapter getInstance() {
return a;
}
}
public void execute(JDBCResultSet rs) {
+ if(this.domDataBroker==null){
+ rs.setFinished(true);
+ return;
+ }
List<XSQLBluePrintNode> tables = rs.getTables();
List<Object> roots = collectModuleRoots(tables.get(0),LogicalDatastoreType.OPERATIONAL);
roots.addAll(collectModuleRoots(tables.get(0),LogicalDatastoreType.CONFIGURATION));
sout.close();
} catch (Exception err) {
}
+ } else if (input.equals("save")) {
+ XSQLBluePrint.save(this.bluePrint);
} else if (input.equals("tocsv")) {
toCsv = !toCsv;
sout.println("to csv file is " + toCsv);
package org.opendaylight.controller.md.sal.dom.xsql;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.util.Map;
import java.util.Set;
-public class XSQLBluePrint implements DatabaseMetaData {
+public class XSQLBluePrint implements DatabaseMetaData, Serializable {
- public static final String CACHE_FILE_NAME = "BluePrintCache.dat";
+ private static final long serialVersionUID = 1L;
+
+ public static final String CACHE_FILE_NAME = "./BluePrintCache.dat";
private Map<String, XSQLBluePrintNode> tableNameToBluePrint = new HashMap<String, XSQLBluePrintNode>();
- private Map<String, Map<String,XSQLBluePrintNode>> odlNameToBluePrint = new HashMap<String, Map<String,XSQLBluePrintNode>>();
+ private Map<String, Map<String, XSQLBluePrintNode>> odlNameToBluePrint = new HashMap<String, Map<String, XSQLBluePrintNode>>();
private boolean cacheLoadedSuccessfuly = false;
private DatabaseMetaData myProxy = null;
public static final String replaceAll(String source, String toReplace,
- String withThis) {
+ String withThis) {
int index = source.indexOf(toReplace);
int index2 = 0;
StringBuffer result = new StringBuffer();
public XSQLBluePrint() {
}
+ public static void save(XSQLBluePrint bp) {
+ ObjectOutputStream out = null;
+ try {
+ out = new ObjectOutputStream(new DataOutputStream(
+ new FileOutputStream(CACHE_FILE_NAME)));
+ out.writeObject(bp);
+ } catch (Exception err) {
+ err.printStackTrace();
+ } finally {
+ try {
+ out.close();
+ } catch (Exception err) {
+ }
+ }
+ }
+
+ public static XSQLBluePrint load(InputStream ins) {
+ ObjectInputStream in = null;
+ try {
+ in = new ObjectInputStream(new DataInputStream(ins));
+ return (XSQLBluePrint) in.readObject();
+ } catch (Exception err) {
+ err.printStackTrace();
+ } finally {
+ try {
+ in.close();
+ } catch (Exception err) {
+ }
+ }
+ return null;
+ }
+
private class NQLBluePrintProxy implements InvocationHandler {
public Object invoke(Object proxy, Method method, Object[] args)
- throws Throwable {
+ throws Throwable {
System.out.println("Method " + method);
return method.invoke(XSQLBluePrint.this, args);
}
public DatabaseMetaData getProxy() {
if (myProxy == null) {
try {
- myProxy = (DatabaseMetaData) Proxy
- .newProxyInstance(getClass().getClassLoader(),
- new Class[] {DatabaseMetaData.class},
+ myProxy = (DatabaseMetaData) Proxy.newProxyInstance(getClass()
+ .getClassLoader(),
+ new Class[] { DatabaseMetaData.class },
new NQLBluePrintProxy());
} catch (Exception err) {
err.printStackTrace();
return myProxy;
}
- /*
- public void loadBluePrintCache(String hostName) {
- try {
- ObjectInputStream in = new ObjectInputStream(
- new FileInputStream(hostName + "-" + CACHE_FILE_NAME));
- cache = (Map) in.readObject();
- in.close();
- cacheLoadedSuccessfuly = true;
- } catch (Exception err) {
- //err.printStackTrace();
- }
- }*/
-
- public XSQLBluePrintNode[] getBluePrintNodeByODLTableName(String odlTableName) {
- Map<String,XSQLBluePrintNode> map = this.odlNameToBluePrint.get(odlTableName);
- if(map==null) return null;
+ public XSQLBluePrintNode[] getBluePrintNodeByODLTableName(
+ String odlTableName) {
+ Map<String, XSQLBluePrintNode> map = this.odlNameToBluePrint
+ .get(odlTableName);
+ if (map == null)
+ return null;
return map.values().toArray(new XSQLBluePrintNode[map.size()]);
}
}
for (XSQLBluePrintNode n : tableNameToBluePrint.values()) {
- if (n.getBluePrintNodeName().toLowerCase().endsWith(tableName.toLowerCase())) {
+ if (n.getBluePrintNodeName().toLowerCase()
+ .endsWith(tableName.toLowerCase())) {
return n;
}
}
for (XSQLBluePrintNode n : tableNameToBluePrint.values()) {
- if (n.getBluePrintNodeName().toLowerCase().equals(tableName.toLowerCase())) {
+ if (n.getBluePrintNodeName().toLowerCase()
+ .equals(tableName.toLowerCase())) {
return n;
}
}
for (XSQLBluePrintNode n : tableNameToBluePrint.values()) {
- if (n.getBluePrintNodeName().toLowerCase().indexOf(tableName.toLowerCase())!= -1) {
+ if (n.getBluePrintNodeName().toLowerCase()
+ .indexOf(tableName.toLowerCase()) != -1) {
return n;
}
}
return null;
}
-
public boolean isCacheLoaded() {
return cacheLoadedSuccessfuly;
}
private static Map<Class, Set<Class>> superClassMap = new HashMap<Class, Set<Class>>();
public static Set<Class> getInheritance(Class myObjectClass,
- Class returnType) {
+ Class returnType) {
if (returnType != null && myObjectClass.equals(returnType)) {
return new HashSet<Class>();
public void addToBluePrintCache(XSQLBluePrintNode blNode) {
this.tableNameToBluePrint.put(blNode.getBluePrintNodeName(), blNode);
- Map<String,XSQLBluePrintNode> map = this.odlNameToBluePrint.get(blNode.getODLTableName());
- if(map==null){
- map = new HashMap<String,XSQLBluePrintNode>();
- this.odlNameToBluePrint.put(blNode.getODLTableName(),map);
+ Map<String, XSQLBluePrintNode> map = this.odlNameToBluePrint.get(blNode
+ .getODLTableName());
+ if (map == null) {
+ map = new HashMap<String, XSQLBluePrintNode>();
+ this.odlNameToBluePrint.put(blNode.getODLTableName(), map);
}
map.put(blNode.getBluePrintNodeName(), blNode);
}
@Override
public ResultSet getAttributes(String catalog, String schemaPattern,
- String typeNamePattern, String attributeNamePattern)
- throws SQLException {
+ String typeNamePattern, String attributeNamePattern)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getBestRowIdentifier(String catalog, String schema,
- String table, int scope, boolean nullable) throws SQLException {
+ String table, int scope, boolean nullable) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getColumnPrivileges(String catalog, String schema,
- String table, String columnNamePattern) throws SQLException {
+ String table, String columnNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getColumns(String catalog, String schemaPattern,
- String tableNamePattern, String columnNamePattern)
- throws SQLException {
+ String tableNamePattern, String columnNamePattern)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getCrossReference(String parentCatalog,
- String parentSchema, String parentTable, String foreignCatalog,
- String foreignSchema, String foreignTable) throws SQLException {
+ String parentSchema, String parentTable, String foreignCatalog,
+ String foreignSchema, String foreignTable) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getDatabaseProductName() throws SQLException {
- return "VNE Query Language";
+ return "OpenDayLight";
}
@Override
}
@Override
- public ResultSet getExportedKeys(String catalog, String schema,
- String table)
- throws SQLException {
+ public ResultSet getExportedKeys(String catalog, String schema, String table)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getFunctionColumns(String catalog, String schemaPattern,
- String functionNamePattern, String columnNamePattern)
- throws SQLException {
+ String functionNamePattern, String columnNamePattern)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getFunctions(String catalog, String schemaPattern,
- String functionNamePattern) throws SQLException {
+ String functionNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
}
@Override
- public ResultSet getImportedKeys(String catalog, String schema,
- String table)
- throws SQLException {
+ public ResultSet getImportedKeys(String catalog, String schema, String table)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getIndexInfo(String catalog, String schema, String table,
- boolean unique, boolean approximate) throws SQLException {
+ boolean unique, boolean approximate) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getPrimaryKeys(String catalog, String schema, String table)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getProcedureColumns(String catalog, String schemaPattern,
- String procedureNamePattern, String columnNamePattern)
- throws SQLException {
+ String procedureNamePattern, String columnNamePattern)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getProcedures(String catalog, String schemaPattern,
- String procedureNamePattern) throws SQLException {
+ String procedureNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getSchemas(String catalog, String schemaPattern)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getSuperTables(String catalog, String schemaPattern,
- String tableNamePattern) throws SQLException {
+ String tableNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getSuperTypes(String catalog, String schemaPattern,
- String typeNamePattern) throws SQLException {
+ String typeNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getTablePrivileges(String catalog, String schemaPattern,
- String tableNamePattern) throws SQLException {
+ String tableNamePattern) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getTables(String catalog, String schemaPattern,
- String tableNamePattern, String[] types) throws SQLException {
+ String tableNamePattern, String[] types) throws SQLException {
return new TablesResultSet(this);
}
@Override
public ResultSet getUDTs(String catalog, String schemaPattern,
- String typeNamePattern, int[] types) throws SQLException {
+ String typeNamePattern, int[] types) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public ResultSet getVersionColumns(String catalog, String schema,
- String table) throws SQLException {
+ String table) throws SQLException {
// TODO Auto-generated method stub
return null;
}
}
@Override
- public boolean supportsCatalogsInPrivilegeDefinitions()
- throws SQLException {
+ public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsConvert(int fromType, int toType)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions()
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsDataManipulationTransactionsOnly()
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
- public boolean supportsDifferentTableCorrelationNames()
- throws SQLException {
+ public boolean supportsDifferentTableCorrelationNames() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsResultSetConcurrency(int type, int concurrency)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsResultSetHoldability(int holdability)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
}
@Override
- public boolean supportsStoredFunctionsUsingCallSyntax()
- throws SQLException {
+ public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean supportsTransactionIsolationLevel(int level)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public ResultSet getPseudoColumns(String catalog, String schemaPattern,
- String tableNamePattern, String columnNamePattern)
- throws SQLException {
+ String tableNamePattern, String columnNamePattern)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
import java.io.Serializable;
import java.sql.SQLException;
import java.util.Collection;
+import java.util.HashMap;
import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
public class XSQLBluePrintNode implements Serializable {
private static final long serialVersionUID = 1L;
private Class<?> myInterface = null;
private String myInterfaceString = null;
- private Set<XSQLBluePrintRelation> relations =
- new HashSet<XSQLBluePrintRelation>();
- private Set<XSQLBluePrintNode> inheritingNodes =
- new HashSet<XSQLBluePrintNode>();
+ private Set<XSQLBluePrintRelation> relations = new HashSet<XSQLBluePrintRelation>();
+ private Set<XSQLBluePrintNode> inheritingNodes = new HashSet<XSQLBluePrintNode>();
private Set<XSQLBluePrintNode> children = new HashSet<XSQLBluePrintNode>();
private XSQLBluePrintNode parent = null;
private transient Set<String> parentHierarchySet = null;
private String myInterfaceName = null;
private Set<XSQLColumn> columns = new HashSet<XSQLColumn>();
+ private Map<String, XSQLColumn> origNameToColumn = new HashMap<String, XSQLColumn>();
private transient Object odlNode = null;
private boolean module = false;
private String bluePrintTableName = null;
private String odlTableName = null;
+ private String origName = null;
+
+ public XSQLBluePrintNode(String name, String _origName, int _level) {
+ this.level = _level;
+ this.odlTableName = name;
+ this.bluePrintTableName = name;
+ this.origName = _origName;
+ }
public XSQLBluePrintNode(Class<?> _myInterface, int _level) {
this.myInterface = _myInterface;
this.level = _level;
}
- public XSQLBluePrintNode(Object _odlNode, int _level,XSQLBluePrintNode _parent) {
+ public XSQLBluePrintNode(Object _odlNode, int _level,
+ XSQLBluePrintNode _parent) {
this.odlNode = _odlNode;
this.level = _level;
this.module = XSQLODLUtils.isModule(_odlNode);
this.parent = _parent;
this.bluePrintTableName = XSQLODLUtils.getBluePrintName(_odlNode);
+ this.odlTableName = XSQLODLUtils.getODLNodeName(this.odlNode);
+ }
+ public String getOrigName() {
+ return this.origName;
}
- public String getBluePrintNodeName(){
+ public String getBluePrintNodeName() {
return this.bluePrintTableName;
}
}
for (XSQLBluePrintRelation dtr : this.relations) {
XSQLBluePrintNode parent = dtr.getParent();
- if (!parent.getInterface().equals(this.getInterface()) && !parent
- .getInterface().isAssignableFrom(this.getInterface()) &&
- this.getInterface().isAssignableFrom(parent.getInterface())
- && parent.isModelChild(p)) {
+ if (!parent.getInterface().equals(this.getInterface())
+ && !parent.getInterface().isAssignableFrom(
+ this.getInterface())
+ && this.getInterface().isAssignableFrom(
+ parent.getInterface()) && parent.isModelChild(p)) {
return true;
}
}
}
public void addColumn(Object node, String tableName) {
- XSQLColumn c = new XSQLColumn(node,getBluePrintNodeName(), this);
+ XSQLColumn c = new XSQLColumn(node, getBluePrintNodeName(), this);
+ this.columns.add(c);
+ }
+
+ public XSQLColumn addColumn(String name, String tableName, String origName,
+ String origTableName) {
+ XSQLColumn c = new XSQLColumn(name, tableName, origName, origTableName);
this.columns.add(c);
+ this.origNameToColumn.put(origName, c);
+ return c;
}
public void addColumn(String methodName) {
throw new SQLException("Unknown field name '" + name + "'");
}
-
public void addParent(XSQLBluePrintNode parent, String property) {
try {
if (property.equals("ContainingTPs")) {
return;
}
- //Method m = parent.getInterface().getMethod("get"+property, null);
- //if(!m.getDeclaringClass().equals(parent.getInterface()))
- //return;
- XSQLBluePrintRelation rel =
- new XSQLBluePrintRelation(parent, property, myInterface);
+ // Method m = parent.getInterface().getMethod("get"+property, null);
+ // if(!m.getDeclaringClass().equals(parent.getInterface()))
+ // return;
+ XSQLBluePrintRelation rel = new XSQLBluePrintRelation(parent,
+ property, myInterface);
relations.add(rel);
} catch (Exception err) {
err.printStackTrace();
}
public Set<XSQLBluePrintRelation> getClonedParents() {
- Set<XSQLBluePrintRelation> result =
- new HashSet<XSQLBluePrintRelation>();
+ Set<XSQLBluePrintRelation> result = new HashSet<XSQLBluePrintRelation>();
result.addAll(this.relations);
return result;
}
if (odlNode != null) {
return getBluePrintNodeName();
}
+ if (odlTableName != null) {
+ return odlTableName;
+ }
return "Unknown";
}
XSQLBluePrintNode other = (XSQLBluePrintNode) obj;
if (odlNode != null) {
return getBluePrintNodeName().equals(other.getBluePrintNodeName());
- } else if (this.odlTableName != null) {
+ } else if (this.odlTableName == null && other.odlTableName != null)
+ return false;
+ if (this.odlTableName != null && other.odlTableName == null)
+ return false;
+ else
return this.odlTableName.equals(other.odlTableName);
- } else {
- return other.myInterface.equals(myInterface);
- }
}
@Override
private int charWidth = -1;
private Class type = null;
private transient Object bluePrintNode = null;
+ private String origName = null;
+ private String origTableName = null;
public XSQLColumn(Object odlNode, String _tableName, Object _bluePrintNode) {
this.name = XSQLODLUtils.getNodeNameFromDSN(odlNode);
this.type = XSQLODLUtils.getTypeForODLColumn(odlNode);
}
+ public XSQLColumn(String _name, String _tableName,String _origName, String _origTableName){
+ this.name = _name;
+ this.tableName = _tableName;
+ this.origName = _origName;
+ this.origTableName = _origTableName;
+ }
+
public String getName() {
return name;
}
import java.io.Serializable;
import java.util.Map;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrint;
+
public class JDBCCommand implements Serializable {
public int type = 0;
public static final int TYPE_EXECUTE_QUERY = 1;
public static final int TYPE_QUERY_RECORD = 3;
public static final int TYPE_QUERY_FINISH = 4;
public static final int TYPE_QUERY_ERROR = 5;
+ public static final int TYPE_METADATA = 6;
+ public static final int TYPE_METADATA_REPLY = 7;
private JDBCResultSet rs = null;
private Map record = null;
private int rsID = -1;
private Exception err = null;
+ private XSQLBluePrint bluePrint = null;
+
+ public JDBCCommand() {
+
+ }
+
+ public void setType(int t) {
+ this.type = t;
+ }
public JDBCCommand(Exception _err, int _RSID) {
this.type = TYPE_QUERY_ERROR;
this.rsID = _RSID;
}
+ public JDBCCommand(XSQLBluePrint bl) {
+ this.type = TYPE_METADATA_REPLY;
+ this.bluePrint = bl;
+ }
+
public JDBCCommand(JDBCResultSet _rs, int _type) {
this.type = TYPE_EXECUTE_QUERY;
this.rs = _rs;
public Exception getERROR() {
return this.err;
}
+
+ public XSQLBluePrint getBluePrint() {
+ return this.bluePrint;
+ }
}
import java.util.concurrent.Executor;
import org.opendaylight.controller.md.sal.dom.xsql.XSQLAdapter;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrint;
-public class JDBCConnection extends Thread implements Connection {
+public class JDBCConnection implements Connection, Runnable {
private Socket socket = null;
private DataInputStream in = null;
private DataOutputStream out = null;
private LinkedList<byte[]> queue = new LinkedList<byte[]>();
private XSQLAdapter adapter = null;
+ private XSQLBluePrint metaData = null;
+ private String addr = null;
+ private boolean wasClosed = false;
public JDBCConnection(Socket s, XSQLAdapter _a) {
this.socket = s;
this.adapter = _a;
try {
in = new DataInputStream(
- new BufferedInputStream(s.getInputStream()));
- out = new DataOutputStream(
- new BufferedOutputStream(s.getOutputStream()));
+ new BufferedInputStream(s.getInputStream()));
+ out = new DataOutputStream(new BufferedOutputStream(
+ s.getOutputStream()));
new JDBCObjectReader();
- this.start();
+ new Thread(this).start();
} catch (Exception err) {
err.printStackTrace();
}
}
- public JDBCConnection(String addr) throws Exception {
+ public Connection getProxy() {
+ return this;
+ /*
+ return (Connection) Proxy.newProxyInstance(this.getClass()
+ .getClassLoader(), new Class[] { Connection.class },
+ new JDBCProxy(this));
+ */
+ }
+
+ public JDBCConnection(String _addr) throws Exception {
+ this.addr = _addr;
+ init();
+ }
+
+ private void init() throws Exception {
+ if (addr.startsWith("http://"))
+ addr = addr.substring(7);
+ System.err.print("Address is:" + addr);
socket = new Socket(addr, 40004);
try {
- in = new DataInputStream(
- new BufferedInputStream(socket.getInputStream()));
- out = new DataOutputStream(
- new BufferedOutputStream(socket.getOutputStream()));
+ in = new DataInputStream(new BufferedInputStream(
+ socket.getInputStream()));
+ out = new DataOutputStream(new BufferedOutputStream(
+ socket.getOutputStream()));
new JDBCObjectReader();
- this.start();
+ new Thread(this).start();
} catch (Exception err) {
err.printStackTrace();
}
ServerSocket s = new ServerSocket(50003);
socket = s.accept();
try {
- in = new DataInputStream(
- new BufferedInputStream(socket.getInputStream()));
- out = new DataOutputStream(
- new BufferedOutputStream(socket.getOutputStream()));
+ in = new DataInputStream(new BufferedInputStream(
+ socket.getInputStream()));
+ out = new DataOutputStream(new BufferedOutputStream(
+ socket.getOutputStream()));
new JDBCObjectReader();
- this.start();
+ new Thread(this).start();
} catch (Exception err) {
err.printStackTrace();
}
}
}
-
private boolean isStopped() {
if (adapter != null && adapter.stopped) {
return true;
} catch (Exception err) {
System.out.println("Connection Lost or Closed.");
+ try {
+ out.close();
+ } catch (Exception er) {
+ }
+ out = null;
+ try {
+ in.close();
+ } catch (Exception er) {
+ }
+ in = null;
try {
socket.close();
} catch (Exception err2) {
}
- //err.printStackTrace();
+ socket = null;
}
}
}
public void processCommand(JDBCCommand cmd) {
switch (cmd.getType()) {
- case JDBCCommand.TYPE_EXECUTE_QUERY:
- try {
- JDBCServer.execute(cmd.getRS(), adapter);
- send(new JDBCCommand(cmd.getRS(),
- JDBCCommand.TYPE_QUERY_REPLY));
- QueryUpdater u = new QueryUpdater(cmd.getRS());
- new Thread(u).start();
- } catch (Exception err) {
- send(new JDBCCommand(err, cmd.getRSID()));
- }
- break;
- case JDBCCommand.TYPE_QUERY_REPLY:
- JDBCResultSet rs1 = JDBCStatement.getQuery(cmd.getRS().getID());
- rs1.updateData(cmd.getRS());
- break;
- case JDBCCommand.TYPE_QUERY_RECORD:
- JDBCResultSet rs2 = JDBCStatement.getQuery(cmd.getRSID());
- rs2.addRecord(cmd.getRecord());
- break;
- case JDBCCommand.TYPE_QUERY_FINISH:
- JDBCResultSet rs3 = JDBCStatement.removeQuery(cmd.getRSID());
- rs3.setFinished(true);
- break;
- case JDBCCommand.TYPE_QUERY_ERROR:
- System.err.println("ERROR Executing Query\n");
- cmd.getERROR().printStackTrace();
- JDBCResultSet rs4 = JDBCStatement.removeQuery(cmd.getRSID());
- rs4.setError(cmd.getERROR());
- rs4.setFinished(true);
- synchronized (rs4) {
- rs4.notifyAll();
- }
+ case JDBCCommand.TYPE_METADATA_REPLY:
+ this.metaData = cmd.getBluePrint();
+ synchronized (this) {
+ this.notifyAll();
+ }
+ break;
+ case JDBCCommand.TYPE_METADATA:
+ send(new JDBCCommand(this.adapter.getBluePrint()));
+ break;
+ case JDBCCommand.TYPE_EXECUTE_QUERY:
+ try {
+ JDBCServer.execute(cmd.getRS(), adapter);
+ send(new JDBCCommand(cmd.getRS(), JDBCCommand.TYPE_QUERY_REPLY));
+ QueryUpdater u = new QueryUpdater(cmd.getRS());
+ new Thread(u).start();
+ } catch (Exception err) {
+ send(new JDBCCommand(err, cmd.getRSID()));
+ }
+ break;
+ case JDBCCommand.TYPE_QUERY_REPLY:
+ JDBCResultSet rs1 = JDBCStatement.getQuery(cmd.getRS().getID());
+ rs1.updateData(cmd.getRS());
+ break;
+ case JDBCCommand.TYPE_QUERY_RECORD:
+ JDBCResultSet rs2 = JDBCStatement.getQuery(cmd.getRSID());
+ rs2.addRecord(cmd.getRecord());
+ break;
+ case JDBCCommand.TYPE_QUERY_FINISH:
+ JDBCResultSet rs3 = JDBCStatement.removeQuery(cmd.getRSID());
+ rs3.setFinished(true);
+ break;
+ case JDBCCommand.TYPE_QUERY_ERROR:
+ System.err.println("ERROR Executing Query\n");
+ cmd.getERROR().printStackTrace();
+ JDBCResultSet rs4 = JDBCStatement.removeQuery(cmd.getRSID());
+ rs4.setError(cmd.getERROR());
+ rs4.setFinished(true);
+ synchronized (rs4) {
+ rs4.notifyAll();
+ }
}
}
}
public void send(Object o) {
+
+ if (this.socket == null) {
+ try {
+ init();
+ } catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream oout = new ObjectOutputStream(bout);
@Override
public void close() throws SQLException {
+ wasClosed = true;
try {
socket.close();
} catch (Exception err) {
@Override
public Array createArrayOf(String typeName, Object[] elements)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public Statement createStatement() throws SQLException {
- return new JDBCStatement(this);
+ return new JDBCStatement(this).getProxy();
}
@Override
public Statement createStatement(int resultSetType,
- int resultSetConcurrency, int resultSetHoldability)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException {
+ return new JDBCStatement(this).getProxy();
}
@Override
- public Statement createStatement(int resultSetType,
- int resultSetConcurrency)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ public Statement createStatement(int resultSetType, int resultSetConcurrency)
+ throws SQLException {
+ return new JDBCStatement(this).getProxy();
}
@Override
public Struct createStruct(String typeName, Object[] attributes)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ if (this.metaData == null) {
+ JDBCCommand cmd = new JDBCCommand();
+ cmd.setType(JDBCCommand.TYPE_METADATA);
+ synchronized (this) {
+ send(cmd);
+ try {
+ this.wait();
+ } catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+ }
+ return metaData;
}
@Override
@Override
public boolean isClosed() throws SQLException {
- // TODO Auto-generated method stub
return false;
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
- int resultSetConcurrency, int resultSetHoldability)
- throws SQLException {
+ int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType,
- int resultSetConcurrency) throws SQLException {
+ int resultSetConcurrency) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
- int resultSetConcurrency, int resultSetHoldability)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException {
+ System.err.println("SQL 1=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType,
- int resultSetConcurrency) throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ int resultSetConcurrency) throws SQLException {
+ System.err.println("SQL 2=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ throws SQLException {
+ System.err.println("SQL 3=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ throws SQLException {
+ System.err.println("SQL 4=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ throws SQLException {
+ System.err.println("SQL 5=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ System.err.println("SQL 6=" + sql);
+ return new JDBCStatement(this, sql).getProxy();
}
@Override
@Override
public void setClientInfo(Properties properties)
- throws SQLClientInfoException {
+ throws SQLClientInfoException {
// TODO Auto-generated method stub
}
@Override
public void setClientInfo(String name, String value)
- throws SQLClientInfoException {
+ throws SQLClientInfoException {
// TODO Auto-generated method stub
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
// TODO Auto-generated method stub
return 0;
}
-}
+}
--- /dev/null
+package org.opendaylight.controller.md.sal.dom.xsql.jdbc;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+
+public class JDBCProxy implements InvocationHandler {
+
+ private Object myObject = null;
+ private Class<?> myObjectClass = null;
+
+ public JDBCProxy(Object obj) {
+ this.myObject = obj;
+ this.myObjectClass = this.myObject.getClass();
+ }
+
+ @Override
+ public Object invoke(Object proxy, Method method, Object[] args)
+ throws Throwable {
+ System.err.println("Class " + this.myObjectClass.getSimpleName()
+ + " Method " + method.getName());
+ return method.invoke(this.myObject, args);
+ }
+
+}
import java.io.Reader;
import java.io.Serializable;
import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import org.opendaylight.controller.md.sal.dom.xsql.XSQLCriteria;
import org.opendaylight.controller.md.sal.dom.xsql.XSQLODLUtils;
-public class JDBCResultSet
- implements Serializable, ResultSet, ResultSetMetaData {
+public class JDBCResultSet implements Serializable, ResultSet,
+ ResultSetMetaData {
private static final long serialVersionUID = -7450200738431047057L;
private String sql = null;
- private List<XSQLBluePrintNode> tablesInQuery =
- new ArrayList<XSQLBluePrintNode>();
- private Map<String, XSQLBluePrintNode> tablesInQueryMap =
- new ConcurrentHashMap<String, XSQLBluePrintNode>();
+ private List<XSQLBluePrintNode> tablesInQuery = new ArrayList<XSQLBluePrintNode>();
+ private Map<String, XSQLBluePrintNode> tablesInQueryMap = new ConcurrentHashMap<String, XSQLBluePrintNode>();
private List<XSQLColumn> fieldsInQuery = new ArrayList<XSQLColumn>();
private transient LinkedList<Map> records = new LinkedList<Map>();
private transient Map currentRecord = null;
private int id = 0;
private static Integer nextID = new Integer(0);
public int numberOfTasks = 0;
- private Map<String, Map<XSQLColumn, List<XSQLCriteria>>> criteria =
- new ConcurrentHashMap<String, Map<XSQLColumn, List<XSQLCriteria>>>();
+ private Map<String, Map<XSQLColumn, List<XSQLCriteria>>> criteria = new ConcurrentHashMap<String, Map<XSQLColumn, List<XSQLCriteria>>>();
private Exception err = null;
private List<Record> EMPTY_RESULT = new LinkedList<Record>();
+ private transient Map<String,JDBCResultSet> subQueries = new HashMap<String,JDBCResultSet>();
+
+ public ResultSet getProxy() {
+ return (ResultSet) Proxy.newProxyInstance(this.getClass().getClassLoader(), new Class[] {ResultSet.class }, new JDBCProxy(this));
+ }
+
+ public void setSQL(String _sql) {
+ this.sql = _sql;
+ }
+
+ public JDBCResultSet addSubQuery(String _sql,String logicalName) {
+ if(subQueries == null)
+ subQueries = new HashMap<String,JDBCResultSet>();
+ JDBCResultSet rs = new JDBCResultSet(_sql);
+ this.subQueries.put(logicalName,rs);
+ return rs;
+ }
+
+ public Map<String,JDBCResultSet> getSubQueries() {
+ if(this.subQueries==null)
+ this.subQueries = new HashMap<>();
+ return this.subQueries;
+ }
public JDBCResultSet(String _sql) {
synchronized (JDBCResultSet.class) {
}
public int isObjectFitCriteria(Map objValues, String tableName) {
- Map<XSQLColumn, List<XSQLCriteria>> tblCriteria = criteria.get(tableName);
+ Map<XSQLColumn, List<XSQLCriteria>> tblCriteria = criteria
+ .get(tableName);
if (tblCriteria == null) {
return 1;
}
for (Map.Entry<XSQLColumn, List<XSQLCriteria>> cc : tblCriteria
- .entrySet()) {
+ .entrySet()) {
for (XSQLCriteria c : cc.getValue()) {
Object value = objValues.get(cc.getKey().toString());
int result = c.checkValue(value);
}
public int isObjectFitCriteria(Object element, Class cls) {
- Map<XSQLColumn, List<XSQLCriteria>> tblCriteria =
- criteria.get(cls.getName());
+ Map<XSQLColumn, List<XSQLCriteria>> tblCriteria = criteria.get(cls
+ .getName());
if (tblCriteria == null) {
return 1;
}
for (Map.Entry<XSQLColumn, List<XSQLCriteria>> cc : tblCriteria
- .entrySet()) {
+ .entrySet()) {
for (XSQLCriteria c : cc.getValue()) {
- int result =
- c.isObjectFitCriteria(element, cc.getKey().getName());
+ int result = c.isObjectFitCriteria(element, cc.getKey()
+ .getName());
if (result == 0) {
return 0;
}
}
}
-
public void addRecord(ArrayList hierarchy) {
Map rec = new HashMap();
for (int i = hierarchy.size() - 1; i >= 0; i--) {
Object element = hierarchy.get(i);
for (XSQLColumn c : fieldsInQuery) {
- if (c.getTableName()
- .equals(element.getClass().getSimpleName())) {
+ if (c.getTableName().equals(element.getClass().getSimpleName())) {
try {
- Method m = element.getClass().getMethod(c.getName(), null);
+ Method m = element.getClass().getMethod(c.getName(),
+ null);
Object value = m.invoke(element, null);
rec.put(c.getName(), value);
} catch (Exception err) {
Map subChildren = XSQLODLUtils.getChildren(node);
Map result = new HashMap();
for (Object stc : subChildren.values()) {
- if (stc.getClass().getName()
- .endsWith("ImmutableAugmentationNode")) {
+ if (stc.getClass().getName().endsWith("ImmutableAugmentationNode")) {
Map values = XSQLODLUtils.getChildren(stc);
for (Object key : values.keySet()) {
Object val = values.get(key);
- if (val.getClass().getName()
- .endsWith("ImmutableLeafNode")) {
+ if (val.getClass().getName().endsWith("ImmutableLeafNode")) {
Object value = XSQLODLUtils.getValue(val);
String k = XSQLODLUtils.getNodeName(val);
if (value != null) {
result.put(bpn.getBluePrintNodeName() + "." + k,
- value.toString());
+ value.toString());
}
}
}
String k = XSQLODLUtils.getNodeName(stc);
Object value = XSQLODLUtils.getValue(stc);
if (value != null) {
- result.put(bpn.getBluePrintNodeName() + "." + k, value.toString());
+ result.put(bpn.getBluePrintNodeName() + "." + k,
+ value.toString());
}
}
}
return result;
}
- private void addToData(Record rec, XSQLBluePrintNode bpn, XSQLBluePrint bluePrint, Map fullRecord) {
- XSQLBluePrintNode eNodes[] = bluePrint.getBluePrintNodeByODLTableName(XSQLODLUtils.getNodeIdentiofier(rec.element));
+ private void addToData(Record rec, XSQLBluePrintNode bpn,
+ XSQLBluePrint bluePrint, Map fullRecord) {
+ XSQLBluePrintNode eNodes[] = bluePrint
+ .getBluePrintNodeByODLTableName(XSQLODLUtils
+ .getNodeIdentiofier(rec.element));
if (bpn != null) {
for (XSQLColumn c : fieldsInQuery) {
- for(XSQLBluePrintNode eNode:eNodes){
- if (((XSQLBluePrintNode) c.getBluePrintNode()).getBluePrintNodeName().equals(eNode.getBluePrintNodeName())) {
- //Object value = Criteria.getValue(rec.element, c.getName());
+ for (XSQLBluePrintNode eNode : eNodes) {
+ if (((XSQLBluePrintNode) c.getBluePrintNode())
+ .getBluePrintNodeName().equals(
+ eNode.getBluePrintNodeName())) {
+ // Object value = Criteria.getValue(rec.element,
+ // c.getName());
String columnName = c.toString();
Object value = fullRecord.get(columnName);
if (value != null) {
return false;
}
- public List<Object> getChildren(Object node, String tableName,XSQLBluePrint bluePrint) {
+ public List<Object> getChildren(Object node, String tableName,
+ XSQLBluePrint bluePrint) {
List<Object> children = XSQLODLUtils.getMChildren(node);
List<Object> result = new LinkedList<Object>();
for (Object child : children) {
String odlNodeName = XSQLODLUtils.getNodeIdentiofier(child);
- if(odlNodeName==null) continue;
+ if (odlNodeName == null)
+ continue;
- XSQLBluePrintNode eNodes[] = bluePrint.getBluePrintNodeByODLTableName(odlNodeName);
- if(eNodes==null) continue;
+ XSQLBluePrintNode eNodes[] = bluePrint
+ .getBluePrintNodeByODLTableName(odlNodeName);
+ if (eNodes == null)
+ continue;
boolean match = false;
- for(XSQLBluePrintNode enode:eNodes){
- if(tableName.startsWith(enode.toString())){
+ for (XSQLBluePrintNode enode : eNodes) {
+ if (tableName.startsWith(enode.toString())) {
match = true;
break;
}
}
- if(!match) continue;
+ if (!match)
+ continue;
if (child.getClass().getName().endsWith("ImmutableContainerNode")) {
result.add(child);
- }else
- if (child.getClass().getName().endsWith("ImmutableAugmentationNode")) {
+ } else if (child.getClass().getName()
+ .endsWith("ImmutableAugmentationNode")) {
List<Object> _children = XSQLODLUtils.getMChildren(child);
for (Object c : _children) {
- if (c.getClass().getName().endsWith("ImmutableContainerNode")) {
+ if (c.getClass().getName()
+ .endsWith("ImmutableContainerNode")) {
result.add(c);
}
}
return result;
}
- public List<Record> addRecords(Object element, XSQLBluePrintNode node,boolean root, String tableName,XSQLBluePrint bluePrint) {
+ public List<Record> addRecords(Object element, XSQLBluePrintNode node,
+ boolean root, String tableName, XSQLBluePrint bluePrint) {
List<Record> result = new LinkedList<Record>();
String nodeID = XSQLODLUtils.getNodeIdentiofier(element);
if (node.getODLTableName().equals(nodeID)) {
- XSQLBluePrintNode bluePrintNode = bluePrint.getBluePrintNodeByODLTableName(nodeID)[0];
+ XSQLBluePrintNode bluePrintNode = bluePrint
+ .getBluePrintNodeByODLTableName(nodeID)[0];
Record rec = new Record();
rec.element = element;
- XSQLBluePrintNode bpn = this.tablesInQueryMap.get(bluePrintNode.getBluePrintNodeName());
- if (this.criteria.containsKey(bluePrintNode.getBluePrintNodeName()) || bpn != null) {
+ XSQLBluePrintNode bpn = this.tablesInQueryMap.get(bluePrintNode
+ .getBluePrintNodeName());
+ if (this.criteria.containsKey(bluePrintNode.getBluePrintNodeName())
+ || bpn != null) {
Map<?, ?> allKeyValues = collectColumnValues(element, bpn);
- if (!(isObjectFitCriteria(allKeyValues, bpn.getBluePrintNodeName()) == 1)) {
+ if (!(isObjectFitCriteria(allKeyValues,
+ bpn.getBluePrintNodeName()) == 1)) {
return EMPTY_RESULT;
}
- addToData(rec, bpn, bluePrint,allKeyValues);
+ addToData(rec, bpn, bluePrint, allKeyValues);
}
if (root) {
addRecord(rec.data);
}
XSQLBluePrintNode parent = node.getParent();
- List<Record> subRecords = addRecords(element, parent, false, tableName,bluePrint);
+ List<Record> subRecords = addRecords(element, parent, false, tableName,
+ bluePrint);
for (Record subRec : subRecords) {
- List<Object> subO = getChildren(subRec.element, tableName,bluePrint);
+ List<Object> subO = getChildren(subRec.element, tableName,
+ bluePrint);
if (subO != null) {
for (Object subData : subO) {
Record rec = new Record();
rec.data.putAll(subRec.data);
String recID = XSQLODLUtils.getNodeIdentiofier(rec.element);
- XSQLBluePrintNode eNodes[] = bluePrint.getBluePrintNodeByODLTableName(recID);
+ XSQLBluePrintNode eNodes[] = bluePrint
+ .getBluePrintNodeByODLTableName(recID);
XSQLBluePrintNode bpn = null;
- for(XSQLBluePrintNode eNode:eNodes){
- bpn = this.tablesInQueryMap.get(eNode.getBluePrintNodeName());
- if(bpn!=null)
+ for (XSQLBluePrintNode eNode : eNodes) {
+ bpn = this.tablesInQueryMap.get(eNode
+ .getBluePrintNodeName());
+ if (bpn != null)
break;
}
boolean isObjectInCriteria = true;
if (bpn != null) {
Map allKeyValues = collectColumnValues(rec.element, bpn);
- if ((isObjectFitCriteria(allKeyValues, bpn.getBluePrintNodeName()) == 1)) {
- addToData(rec, bpn,bluePrint,allKeyValues);
+ if ((isObjectFitCriteria(allKeyValues,
+ bpn.getBluePrintNodeName()) == 1)) {
+ addToData(rec, bpn, bluePrint, allKeyValues);
} else {
isObjectInCriteria = false;
}
if (isObjectInCriteria) {
if (root) {
- if(!rec.data.isEmpty())
+ if (!rec.data.isEmpty())
addRecord(rec.data);
} else {
result.add(rec);
@Override
public BigDecimal getBigDecimal(int columnIndex, int scale)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public BigDecimal getBigDecimal(String columnLabel, int scale)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ throws SQLException {
+ return getObject(columnIndex);
}
@Override
public Object getObject(int columnIndex) throws SQLException {
- return currentRecord
- .get(this.fieldsInQuery.get(columnIndex - 1).toString());
+ return currentRecord.get(this.fieldsInQuery.get(columnIndex - 1)
+ .toString());
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map)
- throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ throws SQLException {
+ return getObject(columnLabel);
}
@Override
@Override
public String getString(int columnIndex) throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ return "Kuku";
}
@Override
public String getString(String columnLabel) throws SQLException {
- // TODO Auto-generated method stub
- return null;
+ return "Kuku";
}
@Override
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public int getType() throws SQLException {
- // TODO Auto-generated method stub
- return 0;
+ return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
}
@Override
- public InputStream getUnicodeStream(String columnLabel)
- throws SQLException {
+ public InputStream getUnicodeStream(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
- public void updateAsciiStream(String columnLabel, InputStream x,
- long length)
- throws SQLException {
+ public void updateAsciiStream(String columnLabel, InputStream x, long length)
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
- public void updateBinaryStream(String columnLabel, InputStream x,
- int length)
- throws SQLException {
+ public void updateBinaryStream(String columnLabel, InputStream x, int length)
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x,
- long length) throws SQLException {
+ long length) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
}
@Override
- public void updateBlob(int columnIndex, InputStream inputStream,
- long length)
- throws SQLException {
+ public void updateBlob(int columnIndex, InputStream inputStream, long length)
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream,
- long length) throws SQLException {
+ long length) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateBoolean(String columnLabel, boolean x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(int columnIndex, Reader x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader,
- int length) throws SQLException {
+ int length) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader,
- long length) throws SQLException {
+ long length) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateClob(int columnIndex, Reader reader, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateClob(String columnLabel, Reader reader, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateClob(String columnLabel, Reader reader)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader,
- long length) throws SQLException {
+ long length) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
- public void updateNClob(int columnIndex, Reader reader)
- throws SQLException {
+ public void updateNClob(int columnIndex, Reader reader) throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNClob(String columnLabel, NClob nClob)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNClob(String columnLabel, Reader reader)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNString(int columnIndex, String nString)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateNString(String columnLabel, String nString)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
}
@Override
public int getColumnType(int column) throws SQLException {
- // TODO Auto-generated method stub
- return 0;
+ return 12;
}
@Override
@Override
public <T> T getObject(String columnLabel, Class<T> type)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return null;
}
-
-
- ////Metadata
-
-
+ // //Metadata
}
package org.opendaylight.controller.md.sal.dom.xsql.jdbc;
-import org.opendaylight.controller.md.sal.dom.xsql.XSQLAdapter;
-import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrint;
-import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrintNode;
-import org.opendaylight.controller.md.sal.dom.xsql.XSQLColumn;
-import org.opendaylight.controller.md.sal.dom.xsql.XSQLCriteria;
-
import java.net.ServerSocket;
import java.net.Socket;
import java.sql.SQLException;
import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.ConcurrentHashMap;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLAdapter;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrint;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrintNode;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLColumn;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLCriteria;
+
public class JDBCServer extends Thread {
private ServerSocket socket = null;
private XSQLAdapter adapter = null;
}
public static void execute(JDBCResultSet rs, XSQLAdapter adapter)
- throws SQLException {
- parseTables(rs, adapter.getBluePrint());
- parseFields(rs, adapter.getBluePrint());
- parseCriteria(rs, adapter.getBluePrint());
+ throws SQLException {
+ if(rs.getSQL().toLowerCase().trim().equals("select 1")){
+ rs.setFinished(true);
+ return;
+ }
+ checkAndBreakSubQueries(rs, adapter);
+ if (rs.getSubQueries().size() == 0) {
+ parseTables(rs, adapter.getBluePrint());
+ parseFields(rs, adapter.getBluePrint());
+ parseCriteria(rs, adapter.getBluePrint());
+ try {
+ adapter.execute(rs);
+ } catch (Exception err) {
+ throw new SQLException("Error", err);
+ }
+ } else {
+ parseExternalQuery(rs);
+ }
+ }
+
+ public static void parseExternalQuery(JDBCResultSet rs) throws SQLException {
+ String sql = rs.getSQL();
+ for (Map.Entry<String, JDBCResultSet> entry : rs.getSubQueries()
+ .entrySet()) {
+ int index = sql.toLowerCase().indexOf(entry.getValue().getSQL());
+ String extSql = sql.substring(0, index);
+ index = extSql.lastIndexOf("(");
+ extSql = extSql.substring(0, index);
+ System.out.println("External SQL=" + extSql);
+ parseLogicalFields(extSql, rs);
+ }
+ }
+
+ public static void parseLogicalFields(String sql, JDBCResultSet rs)
+ throws SQLException {
+ if(sql.trim().toLowerCase().equals("select * from")){
+ for (Map.Entry<String, JDBCResultSet> entry : rs.getSubQueries().entrySet()) {
+ for(XSQLBluePrintNode node:entry.getValue().getTables()){
+ rs.addTableToQuery(node);
+ }
+ rs.getFields().addAll(entry.getValue().getFields());
+ while (entry.getValue().next()) {
+ Map rec = entry.getValue().getCurrent();
+ Map newRec = new HashMap();
+ newRec.putAll(rec);
+ rs.addRecord(newRec);
+ }
+ }
+ rs.setFinished(true);
+ return;
+ }
+
+ Map<String, XSQLBluePrintNode> logicalNameToNode = new HashMap<String, XSQLBluePrintNode>();
+ Map<String, String> origNameToName = new HashMap<String, String>();
+ List<XSQLColumn> columnOrder = new ArrayList<>();
+ int nextLogField = addNextLogicalField(sql, 0,
+ logicalNameToNode, origNameToName,columnOrder);
+ int next = sql.toLowerCase().indexOf(" as ", nextLogField);
+ while (next != -1) {
+ nextLogField = addNextLogicalField(sql, nextLogField + 1,
+ logicalNameToNode, origNameToName,columnOrder);
+ next = sql.toLowerCase().indexOf(" as ", nextLogField + 1);
+ }
+
+ for (XSQLBluePrintNode node : logicalNameToNode.values()) {
+ rs.addTableToQuery(node);
+ }
+ rs.getFields().addAll(columnOrder);
+ for (Map.Entry<String, JDBCResultSet> entry : rs.getSubQueries().entrySet()) {
+ while (entry.getValue().next()) {
+ Map rec = entry.getValue().getCurrent();
+ Map newRec = new HashMap();
+ for (Iterator iter = rec.entrySet().iterator(); iter.hasNext();) {
+ Map.Entry e = (Map.Entry) iter.next();
+ String key = (String) e.getKey();
+ Object value = e.getValue();
+ String logicalKey = origNameToName.get(key);
+ if (value != null && logicalKey != null) {
+ newRec.put(logicalKey, value);
+ }
+ }
+ rs.addRecord(newRec);
+ }
+ }
+ rs.setFinished(true);
+ }
+
+ public static void main(String args[]) {
+ String sql = "SELECT DISTINCT"
+ + "\"LOGICAL_TABLE_1\".\"nodes/node.id\" AS \"COL0\"\n"
+ + ",\"LOGICAL_TABLE_1\".\"nodes/node.id\" AS \"COL1\"\n"
+ + ",\"LOGICAL_TABLE_1\".\"nodes/node.id\" AS \"COL2\"\n"
+ + "FROM\n"
+ + "(select * from nodes/node;) \"LOGICAL_TABLE_1\"\n";
+ JDBCResultSet rs = new JDBCResultSet(sql);
try {
- adapter.execute(rs);
+ parseLogicalFields(sql, rs);
} catch (Exception err) {
- throw new SQLException("Error", err);
+ err.printStackTrace();
+ }
+ }
+
+ public static int addNextLogicalField(String sql, int startIndex,
+ Map<String, XSQLBluePrintNode> logicalNameToNode,
+ Map<String, String> origNameToName, List<XSQLColumn> columnOrder) {
+ int index1 = sql.indexOf("\"", startIndex);
+ int index2 = sql.indexOf("\".\"", index1);
+ int index3 = sql.indexOf("\"", index2 + 3);
+ int index4 = sql.toLowerCase().indexOf(" as ", startIndex);
+ int index5 = sql.indexOf("\"", index4);
+ int index6 = sql.indexOf("\"", index5 + 1);
+
+ String tblName = sql.substring(index1 + 1, index2);
+ String origFieldNameFull = sql.substring(index2 + 3, index3);
+ String origTableName = "";
+ String origFieldName = "";
+ if (origFieldNameFull.indexOf(".") != -1) {
+ origTableName = origFieldNameFull.substring(0,origFieldNameFull.indexOf("."));
+ origFieldName = origFieldNameFull.substring(origFieldNameFull.indexOf(".") + 1);
+ }
+ String logicalFieldName = sql.substring(index5 + 1, index6);
+ XSQLBluePrintNode node = logicalNameToNode.get(tblName);
+ if (node == null) {
+ node = new XSQLBluePrintNode(tblName, origTableName, 0);
+ logicalNameToNode.put(tblName, node);
+ }
+ columnOrder.add(node.addColumn(logicalFieldName, tblName, origFieldName, origTableName));
+ origNameToName.put(origFieldNameFull, tblName + "." + logicalFieldName);
+ return index6;
+ }
+
+ public static void checkAndBreakSubQueries(JDBCResultSet rs,XSQLAdapter adapter) throws SQLException {
+ String sql = rs.getSQL().toLowerCase();
+ int index = sql.indexOf("select");
+ if (index == -1)
+ throw new SQLException("Select statement is missing...");
+ int index2 = sql.indexOf("select", index + 6);
+ if (index2 != -1) {
+ int startSubQuery = index2;
+ for (int i = startSubQuery; i >= 0; i--) {
+ if (sql.charAt(i) == '(') {
+ startSubQuery = i;
+ break;
+ }
+ }
+ int braketCount = 0;
+ int endSubQuery = startSubQuery;
+ do {
+ if (sql.charAt(endSubQuery) == '(')
+ braketCount++;
+ else if (sql.charAt(endSubQuery) == ')')
+ braketCount--;
+ endSubQuery++;
+ } while (braketCount > 0 || endSubQuery == sql.length());
+ String subQuerySQL = sql.substring(startSubQuery + 1,endSubQuery - 1);
+ if(rs.getSQL().toLowerCase().substring(0,startSubQuery).trim().equals("select * from")){
+ rs.setSQL(subQuerySQL);
+ return;
+ }
+ index = sql.indexOf("\"", endSubQuery);
+ index2 = sql.indexOf("\"", index + 1);
+ if(index==-1){
+ index = endSubQuery;
+ index2 = sql.length();
+ }
+ String logicalName = rs.getSQL().substring(index + 1, index2).trim();
+ JDBCResultSet subRS = rs.addSubQuery(subQuerySQL, logicalName);
+ JDBCServer.execute(subRS, adapter);
}
}
public static void parseTables(JDBCResultSet rs, XSQLBluePrint bp)
- throws SQLException {
+ throws SQLException {
String lowSQL = rs.getSQL().toLowerCase();
int from = lowSQL.indexOf("from");
int where = lowSQL.indexOf("where");
String tableName = tokens.nextToken().trim();
XSQLBluePrintNode table = bp.getBluePrintNodeByTableName(tableName);
if (table == null) {
- throw new SQLException(
- "Unknown table name \"" + tableName + "\"");
+ throw new SQLException("Unknown table name \"" + tableName
+ + "\"");
}
rs.addTableToQuery(table);
}
}
public static void addCriteria(XSQLColumn col, XSQLCriteria c,
- JDBCResultSet rs) {
- Map<XSQLColumn, List<XSQLCriteria>> tblCriteria =
- rs.getCriteria().get(col.getTableName());
+ JDBCResultSet rs) {
+ Map<XSQLColumn, List<XSQLCriteria>> tblCriteria = rs.getCriteria().get(
+ col.getTableName());
if (tblCriteria == null) {
- tblCriteria =
- new ConcurrentHashMap<XSQLColumn, List<XSQLCriteria>>();
+ tblCriteria = new ConcurrentHashMap<XSQLColumn, List<XSQLCriteria>>();
rs.getCriteria().put(col.getTableName(), tblCriteria);
}
List<XSQLCriteria> lstCriteria = tblCriteria.get(col);
}
public static void parseFields(JDBCResultSet rs, XSQLBluePrint bp)
- throws SQLException {
+ throws SQLException {
String lowSQL = rs.getSQL().toLowerCase();
if (!lowSQL.startsWith("select")) {
throw new SQLException("Missing 'select' statement.");
return;
}
if (token.indexOf(".") != -1) {
- XSQLBluePrintNode tbl = bp.getBluePrintNodeByTableName(
- token.substring(0, token.indexOf(".")).trim());
+ XSQLBluePrintNode tbl = bp.getBluePrintNodeByTableName(token
+ .substring(0, token.indexOf(".")).trim());
String p = token.substring(token.indexOf(".") + 1);
if (p.equals("*")) {
for (XSQLColumn c : tbl.getColumns()) {
}
}
if (col == null) {
- throw new SQLException(
- "Unknown field name '" + token + "'.");
+ throw new SQLException("Unknown field name '" + token
+ + "'.");
}
rs.getFields().add(col);
String lowSQL = rs.getSQL().toLowerCase();
int where = lowSQL.indexOf("where");
int order = lowSQL.indexOf("order");
- int subQuery = lowSQL.indexOf("select", 2);
int whereTo = lowSQL.indexOf(";");
if (where == -1) {
return;
}
- if (where != -1 && subQuery != -1 && subQuery < where) {
- return;
- }
-
- if (order != -1 && subQuery != -1 && order < subQuery) {
- whereTo = order;
- } else if (order != -1 && subQuery != -1 && order > subQuery) {
- whereTo = subQuery;
- } else if (order != -1) {
+ if (order != -1) {
whereTo = order;
- } else if (subQuery != -1) {
- whereTo = subQuery;
}
- String whereStatement =
- rs.getSQL().substring(where + 5, whereTo).trim();
+
+ if(whereTo==-1)
+ whereTo=lowSQL.length();
+
+ String whereStatement = rs.getSQL().substring(where + 5, whereTo)
+ .trim();
XSQLCriteria cr = new XSQLCriteria(whereStatement, -1);
for (XSQLBluePrintNode tbl : rs.getTables()) {
for (XSQLColumn col : tbl.getColumns()) {
package org.opendaylight.controller.md.sal.dom.xsql.jdbc;
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
import java.sql.Connection;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.ParameterMetaData;
+import java.sql.PreparedStatement;
+import java.sql.Ref;
import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLWarning;
-import java.sql.Statement;
+import java.sql.SQLXML;
+import java.sql.Time;
+import java.sql.Timestamp;
import java.util.ArrayList;
+import java.util.Calendar;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-public class JDBCStatement implements Statement {
+public class JDBCStatement implements PreparedStatement {
private JDBCResultSet rs = null;
private transient JDBCConnection connection = null;
- private static Map<Integer, JDBCResultSet> queries =
- new ConcurrentHashMap<Integer, JDBCResultSet>();
+ private static Map<Integer, JDBCResultSet> queries = new ConcurrentHashMap<Integer, JDBCResultSet>();
+ private String sql = null;
+
+ public JDBCStatement(JDBCConnection con,String _sql) {
+ this.connection = con;
+ this.sql = _sql;
+ }
public JDBCStatement(JDBCConnection con) {
this.connection = con;
}
+ public void setSQL(String _sql){
+ this.sql = _sql;
+ }
+
public JDBCStatement() {
}
+ public PreparedStatement getProxy() {
+ return this;
+ /*
+ return (PreparedStatement) Proxy.newProxyInstance(this.getClass()
+ .getClassLoader(), new Class[] { PreparedStatement.class },
+ new JDBCProxy(this));
+ */
+ }
+
public static JDBCResultSet getQuery(int id) {
return queries.get(id);
}
rs = new JDBCResultSet(_sql);
queries.put(rs.getID(), rs);
synchronized (rs) {
- this.connection
- .send(new JDBCCommand(rs, JDBCCommand.TYPE_EXECUTE_QUERY));
+ this.connection.send(new JDBCCommand(rs,
+ JDBCCommand.TYPE_EXECUTE_QUERY));
try {
rs.wait();
} catch (Exception err) {
throw ((SQLException) rs.getError());
}
}
- return rs;
+ return rs.getProxy();
}
@Override
@Override
public boolean execute(String sql, int autoGeneratedKeys)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
- public boolean execute(String sql, int[] columnIndexes)
- throws SQLException {
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean execute(String sql, String[] columnNames)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return false;
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int executeUpdate(String sql, int[] columnIndexes)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int executeUpdate(String sql, String[] columnNames)
- throws SQLException {
+ throws SQLException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getMaxRows() throws SQLException {
- // TODO Auto-generated method stub
- return 0;
+ return 200;
}
@Override
return false;
}
+ @Override
+ public ResultSet executeQuery() throws SQLException {
+ return this.executeQuery(this.sql);
+ }
+
+ @Override
+ public int executeUpdate() throws SQLException {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ @Override
+ public void setNull(int parameterIndex, int sqlType) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBoolean(int parameterIndex, boolean x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setByte(int parameterIndex, byte x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setShort(int parameterIndex, short x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setInt(int parameterIndex, int x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setLong(int parameterIndex, long x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setFloat(int parameterIndex, float x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setDouble(int parameterIndex, double x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBigDecimal(int parameterIndex, BigDecimal x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setString(int parameterIndex, String x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBytes(int parameterIndex, byte[] x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setDate(int parameterIndex, Date x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setTime(int parameterIndex, Time x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setTimestamp(int parameterIndex, Timestamp x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setUnicodeStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void clearParameters() throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x, int targetSqlType)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x) throws SQLException {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public boolean execute() throws SQLException {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ @Override
+ public void addBatch() throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader, int length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setRef(int parameterIndex, Ref x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, Blob x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Clob x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setArray(int parameterIndex, Array x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public ResultSetMetaData getMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setDate(int parameterIndex, Date x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setTime(int parameterIndex, Time x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNull(int parameterIndex, int sqlType, String typeName)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setURL(int parameterIndex, URL x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public ParameterMetaData getParameterMetaData() throws SQLException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setRowId(int parameterIndex, RowId x) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNString(int parameterIndex, String value)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNCharacterStream(int parameterIndex, Reader value,
+ long length) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, NClob value) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, InputStream inputStream, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setSQLXML(int parameterIndex, SQLXML xmlObject)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x, int targetSqlType,
+ int scaleOrLength) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x, long length)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader,
+ long length) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNCharacterStream(int parameterIndex, Reader value)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, InputStream inputStream)
+ throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, Reader reader) throws SQLException {
+ // TODO Auto-generated method stub
+
+ }
}
--- /dev/null
+package org.opendaylight.xsql.test;
+
+import java.io.InputStream;
+import java.sql.SQLException;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLAdapter;
+import org.opendaylight.controller.md.sal.dom.xsql.XSQLBluePrint;
+import org.opendaylight.controller.md.sal.dom.xsql.jdbc.JDBCResultSet;
+import org.opendaylight.controller.md.sal.dom.xsql.jdbc.JDBCServer;
+
+public class XSQLTest {
+
+ XSQLBluePrint bluePrint = null;
+
+ @Before
+ public void before() {
+ try{
+ InputStream in = this.getClass().getClassLoader().getResourceAsStream("BluePrintCache.dat");
+ if(in!=null){
+ bluePrint = XSQLBluePrint.load(in);
+ log("Loaded Blue Print!");
+ }else{
+ log("Can't find Blue Print!");
+ }
+ in.close();
+ }catch(Exception err){
+ err.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testQueryParsingSimpleNoCriteria() {
+ String sql = "select * from nodes/node;";
+ JDBCResultSet rs = new JDBCResultSet(sql);
+ parseTables(sql,bluePrint, rs);
+ parseFields(sql, bluePrint, rs);
+ JDBCServer.parseCriteria(rs, bluePrint);
+ if(rs.getCriteria().isEmpty()){
+ log("Test Criteria parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true, true);
+ }else{
+ log("Test Criteria parsing of \""+sql+"\" Failed!");
+ Assert.assertEquals(false, true);
+ }
+ }
+
+ @Test
+ public void testQueryParsingComplexNoCriteria() {
+ String sql = "select nodes/node.id,nodes/node/node-connector.id,nodes/node/node-connector.hardware-address from nodes/node,nodes/node/node-connector;";
+ JDBCResultSet rs = new JDBCResultSet(sql);
+ parseTables(sql,bluePrint, rs);
+ parseFields(sql, bluePrint, rs);
+ JDBCServer.parseCriteria(rs, bluePrint);
+ if(rs.getCriteria().isEmpty()){
+ log("Test Criteria parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true, true);
+ }else{
+ log("Test Criteria parsing of \""+sql+"\" Failed!");
+ Assert.assertEquals(false, true);
+ }
+ }
+
+ @Test
+ public void testQueryParsingComplexWithCriteria() {
+ String sql = "select nodes/node.id,nodes/node/node-connector.id,nodes/node/node-connector.hardware-address from nodes/node,nodes/node/node-connector where hardware-address like 'AB';";
+ JDBCResultSet rs = new JDBCResultSet(sql);
+ parseTables(sql,bluePrint, rs);
+ parseFields(sql, bluePrint, rs);
+ JDBCServer.parseCriteria(rs, bluePrint);
+ if(!rs.getCriteria().isEmpty()){
+ log("Test Criteria parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true, true);
+ }else{
+ log("Test Criteria parsing of \""+sql+"\" Failed!");
+ Assert.assertEquals(false, true);
+ }
+ }
+
+ @Test
+ public void testQueryParsingSimpleWithCriteria() {
+ String sql = "select * from nodes/node where nodes/node.id like 'something...';";
+ JDBCResultSet rs = new JDBCResultSet(sql);
+ parseTables(sql,bluePrint, rs);
+ parseFields(sql, bluePrint, rs);
+ JDBCServer.parseCriteria(rs, bluePrint);
+ if(!rs.getCriteria().isEmpty()){
+ log("Test Criteria parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true, true);
+ }else{
+ log("Test Criteria parsing of \""+sql+"\" Failed!");
+ Assert.assertEquals(false, true);
+ }
+ }
+
+ private static void parseTables(String sql,XSQLBluePrint bp,JDBCResultSet rs){
+ try{
+ JDBCServer.parseTables(rs, bp);
+ log("Test Table parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true,true);
+ }catch(SQLException err){
+ log("Test Table parsing of \""+sql+"\" Failed!");
+ err.printStackTrace();
+ Assert.assertEquals(false,true);
+ }
+ }
+
+ @Test
+ public void testQueryParsingComplexWithCriteriaAndGrouping() {
+
+ String sub_sql = "select nodes/node.id,nodes/node/node-connector.id,nodes/node/node-connector.hardware-address from nodes/node,nodes/node/node-connector where hardware-address like 'AB';";
+
+ String sql = "SELECT DISTINCT"
+ + "\"LOGICAL_TABLE_1\".\"nodes/node.id\" AS \"COL0\"\n"
+ + ",\"LOGICAL_TABLE_1\".\"nodes/node.address\" AS \"COL1\"\n"
+ + ",\"LOGICAL_TABLE_1\".\"nodes/node/node-connector.hardware-address\" AS \"COL2\"\n"
+ + "FROM\n"
+ + "("+sub_sql+") \"LOGICAL_TABLE_1\"\n";
+
+
+
+ JDBCResultSet rs = new JDBCResultSet(sql);
+ XSQLAdapter.getInstance().loadBluePrint();
+ try{
+ JDBCServer.checkAndBreakSubQueries(rs, XSQLAdapter.getInstance());
+ if(rs.getSubQueries().isEmpty()){
+ log("Logical table parsing for "+sql+" Failed!");
+ }else{
+ JDBCServer.parseExternalQuery(rs);
+ log("Fields="+rs.getFields().size());
+ Assert.assertEquals(rs.getFields().size(), 3);
+ Assert.assertEquals(rs.getTables().size(), 1);
+ Assert.assertEquals(rs.getTables().get(0).getODLTableName(), "LOGICAL_TABLE_1");
+
+ JDBCResultSet subRS = rs.getSubQueries().values().iterator().next();
+ parseTables(sql,bluePrint, subRS);
+ parseFields(sql, bluePrint, subRS);
+ JDBCServer.parseCriteria(subRS, bluePrint);
+ if(!subRS.getCriteria().isEmpty()){
+ log("Test Criteria parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true, true);
+ }else{
+ log("Test Criteria parsing of \""+sql+"\" Failed!");
+ Assert.assertEquals(false, true);
+ }
+ }
+ }catch(SQLException err){
+ err.printStackTrace();
+ }
+ }
+
+ private static void parseFields(String sql,XSQLBluePrint bp,JDBCResultSet rs){
+ try{
+ JDBCServer.parseFields(rs, bp);
+ log("Test Fields parsing of \""+sql+"\" Passed!");
+ Assert.assertEquals(true,true);
+ }catch(SQLException err){
+ log("Test Fields parsing of \""+sql+"\" Failed!");
+ err.printStackTrace();
+ Assert.assertEquals(false,true);
+ }
+ }
+
+ private static void log(String str) {
+ System.out.print("*** XSQL Tests -");
+ System.out.println(str);
+ }
+}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-inmemory-datastore</artifactId>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+
+final class ChainedTransactionCommitImpl implements DOMStoreThreePhaseCommitCohort {
+ private final SnapshotBackedWriteTransaction transaction;
+ private final DOMStoreThreePhaseCommitCohort delegate;
+ private final DOMStoreTransactionChainImpl txChain;
+
+ protected ChainedTransactionCommitImpl(final SnapshotBackedWriteTransaction transaction,
+ final DOMStoreThreePhaseCommitCohort delegate, final DOMStoreTransactionChainImpl txChain) {
+ this.transaction = Preconditions.checkNotNull(transaction);
+ this.delegate = Preconditions.checkNotNull(delegate);
+ this.txChain = Preconditions.checkNotNull(txChain);
+ }
+
+ @Override
+ public ListenableFuture<Boolean> canCommit() {
+ return delegate.canCommit();
+ }
+
+ @Override
+ public ListenableFuture<Void> preCommit() {
+ return delegate.preCommit();
+ }
+
+ @Override
+ public ListenableFuture<Void> abort() {
+ return delegate.abort();
+ }
+
+ @Override
+ public ListenableFuture<Void> commit() {
+ ListenableFuture<Void> commitFuture = delegate.commit();
+ Futures.addCallback(commitFuture, new FutureCallback<Void>() {
+ @Override
+ public void onFailure(final Throwable t) {
+ txChain.onTransactionFailed(transaction, t);
+ }
+
+ @Override
+ public void onSuccess(final Void result) {
+ txChain.onTransactionCommited(transaction);
+ }
+ });
+ return commitFuture;
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Preconditions;
+import java.util.AbstractMap.SimpleEntry;
+import java.util.Map.Entry;
+import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+import org.opendaylight.controller.md.sal.dom.store.impl.SnapshotBackedWriteTransaction.TransactionReadyPrototype;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeModification;
+import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeSnapshot;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+final class DOMStoreTransactionChainImpl extends TransactionReadyPrototype implements DOMStoreTransactionChain {
+ private static abstract class State {
+ /**
+ * Allocate a new snapshot.
+ *
+ * @return A new snapshot
+ */
+ protected abstract DataTreeSnapshot getSnapshot();
+ }
+
+ private static final class Idle extends State {
+ private final InMemoryDOMDataStore store;
+
+ Idle(final InMemoryDOMDataStore store) {
+ this.store = Preconditions.checkNotNull(store);
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ return store.takeSnapshot();
+ }
+ }
+
+ /**
+ * We have a transaction out there.
+ */
+ private static final class Allocated extends State {
+ private static final AtomicReferenceFieldUpdater<Allocated, DataTreeSnapshot> SNAPSHOT_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(Allocated.class, DataTreeSnapshot.class, "snapshot");
+ private final DOMStoreWriteTransaction transaction;
+ private volatile DataTreeSnapshot snapshot;
+
+ Allocated(final DOMStoreWriteTransaction transaction) {
+ this.transaction = Preconditions.checkNotNull(transaction);
+ }
+
+ public DOMStoreWriteTransaction getTransaction() {
+ return transaction;
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ final DataTreeSnapshot ret = snapshot;
+ Preconditions.checkState(ret != null, "Previous transaction %s is not ready yet", transaction.getIdentifier());
+ return ret;
+ }
+
+ void setSnapshot(final DataTreeSnapshot snapshot) {
+ final boolean success = SNAPSHOT_UPDATER.compareAndSet(this, null, snapshot);
+ Preconditions.checkState(success, "Transaction %s has already been marked as ready", transaction.getIdentifier());
+ }
+ }
+
+ /**
+ * Chain is logically shut down, no further allocation allowed.
+ */
+ private static final class Shutdown extends State {
+ private final String message;
+
+ Shutdown(final String message) {
+ this.message = Preconditions.checkNotNull(message);
+ }
+
+ @Override
+ protected DataTreeSnapshot getSnapshot() {
+ throw new IllegalStateException(message);
+ }
+ }
+
+ private static final AtomicReferenceFieldUpdater<DOMStoreTransactionChainImpl, State> STATE_UPDATER =
+ AtomicReferenceFieldUpdater.newUpdater(DOMStoreTransactionChainImpl.class, State.class, "state");
+ private static final Logger LOG = LoggerFactory.getLogger(DOMStoreTransactionChainImpl.class);
+ private static final Shutdown CLOSED = new Shutdown("Transaction chain is closed");
+ private static final Shutdown FAILED = new Shutdown("Transaction chain has failed");
+ private final InMemoryDOMDataStore store;
+ private final Idle idleState;
+ private volatile State state;
+
+ DOMStoreTransactionChainImpl(final InMemoryDOMDataStore store) {
+ this.store = Preconditions.checkNotNull(store);
+ idleState = new Idle(store);
+ state = idleState;
+ }
+
+ private Entry<State, DataTreeSnapshot> getSnapshot() {
+ final State localState = state;
+ return new SimpleEntry<>(localState, localState.getSnapshot());
+ }
+
+ private boolean recordTransaction(final State expected, final DOMStoreWriteTransaction transaction) {
+ final State state = new Allocated(transaction);
+ return STATE_UPDATER.compareAndSet(this, expected, state);
+ }
+
+ @Override
+ public DOMStoreReadTransaction newReadOnlyTransaction() {
+ final Entry<State, DataTreeSnapshot> entry = getSnapshot();
+ return new SnapshotBackedReadTransaction(store.nextIdentifier(), store.getDebugTransactions(), entry.getValue());
+ }
+
+ @Override
+ public DOMStoreReadWriteTransaction newReadWriteTransaction() {
+ Entry<State, DataTreeSnapshot> entry;
+ DOMStoreReadWriteTransaction ret;
+
+ do {
+ entry = getSnapshot();
+ ret = new SnapshotBackedReadWriteTransaction(store.nextIdentifier(),
+ store.getDebugTransactions(), entry.getValue(), this);
+ } while (!recordTransaction(entry.getKey(), ret));
+
+ return ret;
+ }
+
+ @Override
+ public DOMStoreWriteTransaction newWriteOnlyTransaction() {
+ Entry<State, DataTreeSnapshot> entry;
+ DOMStoreWriteTransaction ret;
+
+ do {
+ entry = getSnapshot();
+ ret = new SnapshotBackedWriteTransaction(store.nextIdentifier(),
+ store.getDebugTransactions(), entry.getValue(), this);
+ } while (!recordTransaction(entry.getKey(), ret));
+
+ return ret;
+ }
+
+ @Override
+ protected void transactionAborted(final SnapshotBackedWriteTransaction tx) {
+ final State localState = state;
+ if (localState instanceof Allocated) {
+ final Allocated allocated = (Allocated)localState;
+ if (allocated.getTransaction().equals(tx)) {
+ final boolean success = STATE_UPDATER.compareAndSet(this, localState, idleState);
+ if (!success) {
+ LOG.info("State already transitioned from {} to {}", localState, state);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected DOMStoreThreePhaseCommitCohort transactionReady(final SnapshotBackedWriteTransaction tx, final DataTreeModification tree) {
+ final State localState = state;
+
+ if (localState instanceof Allocated) {
+ final Allocated allocated = (Allocated)localState;
+ final DOMStoreWriteTransaction transaction = allocated.getTransaction();
+ Preconditions.checkState(tx.equals(transaction), "Mis-ordered ready transaction %s last allocated was %s", tx, transaction);
+ allocated.setSnapshot(tree);
+ } else {
+ LOG.debug("Ignoring transaction {} readiness due to state {}", tx, localState);
+ }
+
+ return new ChainedTransactionCommitImpl(tx, store.transactionReady(tx, tree), this);
+ }
+
+ @Override
+ public void close() {
+ final State localState = state;
+
+ do {
+ Preconditions.checkState(!CLOSED.equals(localState), "Transaction chain {} has been closed", this);
+
+ if (FAILED.equals(localState)) {
+ LOG.debug("Ignoring user close in failed state");
+ return;
+ }
+ } while (!STATE_UPDATER.compareAndSet(this, localState, CLOSED));
+ }
+
+ void onTransactionFailed(final SnapshotBackedWriteTransaction transaction, final Throwable t) {
+ LOG.debug("Transaction chain {} failed on transaction {}", this, transaction, t);
+ state = FAILED;
+ }
+
+ void onTransactionCommited(final SnapshotBackedWriteTransaction transaction) {
+ // If the committed transaction was the one we allocated last,
+ // we clear it and the ready snapshot, so the next transaction
+ // allocated refers to the data tree directly.
+ final State localState = state;
+
+ if (!(localState instanceof Allocated)) {
+ LOG.debug("Ignoring successful transaction {} in state {}", transaction, localState);
+ return;
+ }
+
+ final Allocated allocated = (Allocated)localState;
+ final DOMStoreWriteTransaction tx = allocated.getTransaction();
+ if (!tx.equals(transaction)) {
+ LOG.debug("Ignoring non-latest successful transaction {} in state {}", transaction, allocated);
+ return;
+ }
+
+ if (!STATE_UPDATER.compareAndSet(this, localState, idleState)) {
+ LOG.debug("Transaction chain {} has already transitioned from {} to {}, not making it idle", this, localState, state);
+ }
+ }
+}
\ No newline at end of file
import static com.google.common.base.Preconditions.checkState;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import javax.annotation.concurrent.GuardedBy;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
@Override
public DOMStoreTransactionChain createTransactionChain() {
- return new DOMStoreTransactionChainImpl();
+ return new DOMStoreTransactionChainImpl(this);
}
@Override
}
}
- boolean getDebugTransactions() {
+ public final boolean getDebugTransactions() {
return debugTransactions;
}
+ final DataTreeSnapshot takeSnapshot() {
+ return dataTree.takeSnapshot();
+ }
+
@Override
public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
final YangInstanceIdentifier path, final L listener, final DataChangeScope scope) {
return new ThreePhaseCommitImpl(tx, tree);
}
- private Object nextIdentifier() {
+ Object nextIdentifier() {
return name + "-" + txCounter.getAndIncrement();
}
- private class DOMStoreTransactionChainImpl extends TransactionReadyPrototype implements DOMStoreTransactionChain {
- @GuardedBy("this")
- private SnapshotBackedWriteTransaction allocatedTransaction;
- @GuardedBy("this")
- private DataTreeSnapshot readySnapshot;
- @GuardedBy("this")
- private boolean chainFailed = false;
-
- @GuardedBy("this")
- private void checkFailed() {
- Preconditions.checkState(!chainFailed, "Transaction chain is failed.");
- }
-
- @GuardedBy("this")
- private DataTreeSnapshot getSnapshot() {
- checkFailed();
-
- if (allocatedTransaction != null) {
- Preconditions.checkState(readySnapshot != null, "Previous transaction %s is not ready yet", allocatedTransaction.getIdentifier());
- return readySnapshot;
- } else {
- return dataTree.takeSnapshot();
- }
- }
-
- @GuardedBy("this")
- private <T extends SnapshotBackedWriteTransaction> T recordTransaction(final T transaction) {
- allocatedTransaction = transaction;
- readySnapshot = null;
- return transaction;
- }
-
- @Override
- public synchronized DOMStoreReadTransaction newReadOnlyTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return new SnapshotBackedReadTransaction(nextIdentifier(), getDebugTransactions(), snapshot);
- }
-
- @Override
- public synchronized DOMStoreReadWriteTransaction newReadWriteTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return recordTransaction(new SnapshotBackedReadWriteTransaction(nextIdentifier(),
- getDebugTransactions(), snapshot, this));
- }
-
- @Override
- public synchronized DOMStoreWriteTransaction newWriteOnlyTransaction() {
- final DataTreeSnapshot snapshot = getSnapshot();
- return recordTransaction(new SnapshotBackedWriteTransaction(nextIdentifier(),
- getDebugTransactions(), snapshot, this));
- }
-
- @Override
- protected synchronized void transactionAborted(final SnapshotBackedWriteTransaction tx) {
- if (tx.equals(allocatedTransaction)) {
- Preconditions.checkState(readySnapshot == null, "Unexpected abort of transaction %s with ready snapshot %s", tx, readySnapshot);
- allocatedTransaction = null;
- }
- }
-
- @Override
- protected synchronized DOMStoreThreePhaseCommitCohort transactionReady(final SnapshotBackedWriteTransaction tx, final DataTreeModification tree) {
- Preconditions.checkState(tx.equals(allocatedTransaction), "Mis-ordered ready transaction %s last allocated was %s", tx, allocatedTransaction);
- if (readySnapshot != null) {
- // The snapshot should have been cleared
- LOG.warn("Uncleared snapshot {} encountered, overwritten with transaction {} snapshot {}", readySnapshot, tx, tree);
- }
-
- final DOMStoreThreePhaseCommitCohort cohort = InMemoryDOMDataStore.this.transactionReady(tx, tree);
- readySnapshot = tree;
- return new ChainedTransactionCommitImpl(tx, cohort, this);
- }
-
- @Override
- public void close() {
- // FIXME: this call doesn't look right here - listeningExecutor is shared and owned
- // by the outer class.
- //listeningExecutor.shutdownNow();
- }
-
- protected synchronized void onTransactionFailed(final SnapshotBackedWriteTransaction transaction,
- final Throwable t) {
- chainFailed = true;
- }
-
- public synchronized void onTransactionCommited(final SnapshotBackedWriteTransaction transaction) {
- // If the committed transaction was the one we allocated last,
- // we clear it and the ready snapshot, so the next transaction
- // allocated refers to the data tree directly.
- if (transaction.equals(allocatedTransaction)) {
- if (readySnapshot == null) {
- LOG.warn("Transaction {} committed while no ready snapshot present", transaction);
- }
-
- allocatedTransaction = null;
- readySnapshot = null;
- }
- }
- }
-
- private static class ChainedTransactionCommitImpl implements DOMStoreThreePhaseCommitCohort {
- private final SnapshotBackedWriteTransaction transaction;
- private final DOMStoreThreePhaseCommitCohort delegate;
- private final DOMStoreTransactionChainImpl txChain;
-
- protected ChainedTransactionCommitImpl(final SnapshotBackedWriteTransaction transaction,
- final DOMStoreThreePhaseCommitCohort delegate, final DOMStoreTransactionChainImpl txChain) {
- this.transaction = transaction;
- this.delegate = delegate;
- this.txChain = txChain;
- }
-
- @Override
- public ListenableFuture<Boolean> canCommit() {
- return delegate.canCommit();
- }
-
- @Override
- public ListenableFuture<Void> preCommit() {
- return delegate.preCommit();
- }
-
- @Override
- public ListenableFuture<Void> abort() {
- return delegate.abort();
- }
-
- @Override
- public ListenableFuture<Void> commit() {
- ListenableFuture<Void> commitFuture = delegate.commit();
- Futures.addCallback(commitFuture, new FutureCallback<Void>() {
- @Override
- public void onFailure(final Throwable t) {
- txChain.onTransactionFailed(transaction, t);
- }
-
- @Override
- public void onSuccess(final Void result) {
- txChain.onTransactionCommited(transaction);
- }
- });
- return commitFuture;
- }
- }
-
- private class ThreePhaseCommitImpl implements DOMStoreThreePhaseCommitCohort {
+ private final class ThreePhaseCommitImpl implements DOMStoreThreePhaseCommitCohort {
private final SnapshotBackedWriteTransaction transaction;
private final DataTreeModification modification;
<parent>\r
<artifactId>sal-parent</artifactId>\r
<groupId>org.opendaylight.controller</groupId>\r
- <version>1.1-SNAPSHOT</version>\r
+ <version>1.2.0-SNAPSHOT</version>\r
</parent>\r
\r
<groupId>org.opendaylight.controller</groupId>\r
<dependency>\r
<groupId>org.opendaylight.controller</groupId>\r
<artifactId>sal-dom-xsql</artifactId>\r
- <version>1.1-SNAPSHOT</version>\r
+ <version>1.2.0-SNAPSHOT</version>\r
</dependency>\r
</dependencies>\r
\r
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-netconf-connector</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-remote</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-remoterpc-connector</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-rest-connector-config</artifactId>
<description>Configuration files for sal-rest-connector</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-rest-connector</artifactId>
<packaging>bundle</packaging>
import org.slf4j.LoggerFactory;
class JsonToCompositeNodeReader {
- private static final Logger LOG = LoggerFactory.getLogger(JsonReader.class);
+ private static final Logger LOG = LoggerFactory.getLogger(JsonToCompositeNodeReader.class);
private static final Splitter COLON_SPLITTER = Splitter.on(':');
private JsonToCompositeNodeReader() {
}
}
+ /**
+ * Transform input value to URI instance.
+ *
+ * Input string has to be in format moduleName:localName. moduleName part is then transformed to URI instance.
+ * If moduleName part contains character like "<" or ">" then null value is returned because they
+ * aren't valid URI characters.
+ *
+ * @param jsonElementName
+ * value in format moduleName:localName
+ * @return
+ */
private static URI getNamespaceFor(final String jsonElementName) {
final Iterator<String> it = COLON_SPLITTER.split(jsonElementName).iterator();
- // The string needs to me in form "moduleName:localName"
+ // The string needs to be in form "moduleName:localName"
if (it.hasNext()) {
final String maybeURI = it.next();
if (Iterators.size(it) == 1) {
- return URI.create(maybeURI);
+ try {
+ return URI.create(maybeURI);
+ } catch (IllegalArgumentException e) {
+ LOG.debug("Value {} couldn't be interpreted as URI.", maybeURI);
+ }
}
}
}
}
- // it could be identityref Built-In Type
+ // it could be identityref Built-In Type therefore it is necessary to look at value as module_name:local_name
URI namespace = getNamespaceFor(value);
if (namespace != null) {
return new IdentityValuesDTO(namespace.toString(), getLocalNameFor(value), null, value);
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import javax.xml.stream.FactoryConfigurationError;
import org.opendaylight.controller.sal.restconf.impl.InstanceIdentifierContext;
import org.opendaylight.controller.sal.restconf.impl.NormalizedNodeContext;
import org.opendaylight.controller.sal.restconf.impl.RestconfDocumentedException;
+import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
+import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
WebApplicationException {
InstanceIdentifierContext pathContext = t.getInstanceIdentifierContext();
if (t.getData() == null) {
- throw new RestconfDocumentedException(Response.Status.NOT_FOUND);
+ throw new RestconfDocumentedException(
+ "Request could not be completed because the relevant data model content does not exist.",
+ ErrorType.APPLICATION, ErrorTag.DATA_MISSING);
}
XMLStreamWriter xmlWriter;
private CheckedFuture<Void, TransactionCommitFailedException> deleteDataViaTransaction(
final DOMDataWriteTransaction writeTransaction, final LogicalDatastoreType datastore,
YangInstanceIdentifier path) {
- LOG.info("Delete " + datastore.name() + " via Restconf: {}", path);
+ LOG.trace("Delete " + datastore.name() + " via Restconf: {}", path);
writeTransaction.delete(datastore, path);
return writeTransaction.submit();
}
assertTrue(exceptionMessage.contains("Root element of Json has to be Object"));
}
+ /**
+ * Tests case when JSON input data value is in format string1:string2 and first string contain characters "<" or ">" (invalid URI characters).
+ *
+ * During loading data it is also interpreting as data value in moduleName:localName (potential leafref value).
+ * ModuleName part is transformed to URI which causes exception which is caught and URI value is null which cause that potential value in simple node is
+ * simple string (value from JSON input) and not IdentityValueDTO instance which is used for leaf-ref candidates.
+ */
+ @Test
+ public void invalidUriCharacterInValue() {
+ final Node<?> rootNode = TestUtils.readInputToCnSn("/json-to-cnsn/invalid-uri-character-in-value.json", true,
+ JsonToCompositeNodeProvider.INSTANCE);
+
+ assertTrue(rootNode instanceof CompositeNode);
+ Node<?> lf1 = null;
+ Node<?> lf2 = null;
+ for(Node<?> child : ((CompositeNode)rootNode).getChildren()) {
+ if (child.getNodeType().getLocalName().equals("lf1")) {
+ lf1 = child;
+ } else if (child.getNodeType().getLocalName().equals("lf2")) {
+ lf2 = child;
+ }
+ }
+
+ assertNotNull(lf1);
+ assertNotNull(lf2);
+ assertTrue(lf1 instanceof SimpleNode<?>);
+ assertTrue(lf2 instanceof SimpleNode<?>);
+
+ assertEquals("module<Name:value lf1", ((SimpleNode<?>) lf1).getValue());
+ assertEquals("module>Name:value lf2", ((SimpleNode<?>) lf2).getValue());
+ }
+
}
--- /dev/null
+{
+ "moduleName:cont":{
+ "lf1":"module<Name:value lf1",
+ "lf2":"module>Name:value lf2"
+ }
+}
\ No newline at end of file
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-rest-docgen</artifactId>
for (DataSchemaNode childNode : module.getChildNodes()) {
// For every container and list in the module
- processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
- processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
+ if (childNode instanceof ContainerSchemaNode || childNode instanceof ListSchemaNode) {
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
+ }
}
}
property.put(TYPE_KEY, childNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
property.put(ITEMS_KEY, items);
properties.put(childNode.getQName().getLocalName(), property);
+ } else if (childNode instanceof LeafSchemaNode){
+ JSONObject property = processLeafNode((LeafSchemaNode)childNode);
+ properties.put(childNode.getQName().getLocalName(), property);
}
}
return properties;
--- /dev/null
+package org.opendaylight.controller.sal.rest.doc.impl;
+
+import com.google.common.base.Preconditions;
+import org.json.JSONObject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl;
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Map;
+
+
+public class ModelGeneratorTest {
+
+ private DocGenTestHelper helper;
+ private SchemaContext schemaContext;
+
+ @Before
+ public void setUp() throws Exception {
+ helper = new DocGenTestHelper();
+ helper.setUp();
+ schemaContext = new YangParserImpl().resolveSchemaContext(new HashSet<Module>(helper.getModules().values()));
+ }
+
+ @Test
+ public void testConvertToJsonSchema() throws Exception {
+
+ Preconditions.checkArgument(helper.getModules() != null, "No modules found");
+
+ ModelGenerator generator = new ModelGenerator();
+
+ for (Map.Entry<File, Module> m : helper.getModules().entrySet()) {
+ if (m.getKey().getAbsolutePath().endsWith("opflex.yang")) {
+
+ JSONObject jsonObject = generator.convertToJsonSchema(m.getValue(), schemaContext);
+ Assert.assertNotNull(jsonObject);
+ }
+ }
+
+ }
+}
\ No newline at end of file
--- /dev/null
+module opflex {
+ yang-version 1;
+
+ namespace "urn:opendaylight:groupbasedpolicy:opflex";
+ prefix "opflex";
+
+
+
+
+
+ description
+ "This module defines the group-based policy OpFlex renderer model.";
+
+ revision "2014-05-28" {
+ description
+ "Initial revision.";
+ }
+
+ typedef serialization {
+ description
+ "The serialization to use for OpFlex messages.";
+
+ type enumeration {
+ enum json {
+ description
+ "JSON 1.0 serialization.";
+ }
+ enum xml {
+ description
+ "XML serialization.";
+ }
+ enum binary {
+ description
+ "OpFlex binary serialization.";
+ }
+ }
+ }
+
+ // ******************
+ // Configuration Data
+ // ******************
+ leaf domain {
+ description
+ "The OpFlex administrative domain.";
+
+ config true;
+
+ type string;
+ }
+}
\ No newline at end of file
"Toaster module in progress.";
}
+ leaf domain {
+ description
+ "Toaster domain.";
+
+ config true;
+
+ type string;
+ }
identity toast-type {
description
"Base for all bread types supported by the toaster.
- New bread types not listed here nay be added in the
+ New bread types not listed here nay be added in the
future.";
}
"Indicates the toaster service is available";
description
"Top-level container for all toaster database objects.";
-
+
leaf testToasterBits {
type bits {
bit testbit1 {
}
default "testbit2";
}
-
+
leaf testUnion {
type union {
type int32;
type string;
}
-
- }
-
+
+ }
+
leaf-list allow-user {
type string;
description "A list of user name patterns to allow";
-
+
}
-
+
choice how {
default interval;
case interval {
type string;
}
}
- }
-
+ }
+
leaf toasterManufacturer {
type DisplayString;
config false;
mandatory true;
description
- "The name of the toaster's manufacturer. For instance,
+ "The name of the toaster's manufacturer. For instance,
Microsoft Toaster.";
}
config false;
mandatory true;
description
- "This variable indicates the current state of
+ "This variable indicates the current state of
the toaster.";
}
}
rpc make-toast {
description
"Make some toast.
- The toastDone notification will be sent when
+ The toastDone notification will be sent when
the toast is finished.
An 'in-use' error will be returned if toast
is already being made.
- A 'resource-denied' error will be returned
+ A 'resource-denied' error will be returned
if the toaster service is disabled.";
input {
leaf toasterDoneness {
}
default '5';
description
- "This variable controls how well-done is the
+ "This variable controls how well-done is the
ensuing toast. It should be on a scale of 1 to 10.
- Toast made at 10 generally is considered unfit
- for human consumption; toast made at 1 is warmed
+ Toast made at 10 generally is considered unfit
+ for human consumption; toast made at 1 is warmed
lightly.";
}
}
default 'wheat-bread';
description
- "This variable informs the toaster of the type of
- material that is being toasted. The toaster
- uses this information, combined with
- toasterDoneness, to compute for how
- long the material must be toasted to achieve
+ "This variable informs the toaster of the type of
+ material that is being toasted. The toaster
+ uses this information, combined with
+ toasterDoneness, to compute for how
+ long the material must be toasted to achieve
the required doneness.";
}
}
- }
+ }
rpc cancel-toast {
description
"Stop making toast, if any is being made.
- A 'resource-denied' error will be returned
+ A 'resource-denied' error will be returned
if the toaster service is disabled.";
- }
-
+ }
+
notification toastDone {
description
"Indicates that the toast in progress has completed.";
description
"Indicates the final toast status";
}
- }
- }
+ }
+ }
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sal-restconf-broker</artifactId>
<packaging>bundle</packaging>
<parent>
<artifactId>sal-parent</artifactId>
<groupId>org.opendaylight.controller</groupId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>clustering-it</artifactId>
<groupId>org.opendaylight.controller.samples</groupId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>clustering-it-config</artifactId>
<packaging>jar</packaging>
<parent>
<artifactId>clustering-it</artifactId>
<groupId>org.opendaylight.controller.samples</groupId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>clustering-it-model</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>clustering-it</artifactId>
<packaging>pom</packaging>
<parent>
<artifactId>clustering-it</artifactId>
<groupId>org.opendaylight.controller.samples</groupId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>clustering-it-provider</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
<groupId>org.opendaylight.controller.samples.l2switch.md</groupId>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
<groupId>org.opendaylight.controller.samples.l2switch.md</groupId>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>l2switch.aggregator</artifactId>
<groupId>org.opendaylight.controller.samples.l2switch</groupId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.samples</groupId>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>toaster-config</artifactId>
<description>Configuration files for toaster</description>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sample-toaster-consumer</artifactId>
<packaging>bundle</packaging>
<properties>
- <sal-binding-api.version>1.1-SNAPSHOT</sal-binding-api.version>
+ <sal-binding-api.version>1.2.0-SNAPSHOT</sal-binding-api.version>
</properties>
<dependencies>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sample-toaster-it</artifactId>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sample-toaster-provider</artifactId>
<packaging>bundle</packaging>
<properties>
- <sal-binding-api.version>1.1-SNAPSHOT</sal-binding-api.version>
+ <sal-binding-api.version>1.2.0-SNAPSHOT</sal-binding-api.version>
</properties>
<dependencies>
<parent>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>sal-samples</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<artifactId>sample-toaster</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>statistics-manager</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<groupId>org.opendaylight.controller.md</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-parent</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
</parent>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>topology-manager</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>config-netconf-connector</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>config-persister-impl</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>ietf-netconf-monitoring-extension</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>ietf-netconf-monitoring</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-api</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>netconf-auth</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-cli</artifactId>
<packaging>jar</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-client</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-config</artifactId>
<description>Configuration files for netconf</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-connector-config</artifactId>
<description>Configuration files for netconf-connector</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-impl</artifactId>
<packaging>bundle</packaging>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import io.netty.channel.Channel;
+import java.util.List;
+import java.util.Set;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.impl.NetconfServerSessionListener;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfMonitoringServiceImpl;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
+import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringServiceImplTest {
+
+ private NetconfMonitoringServiceImpl service;
+
+ @Mock
+ private NetconfOperationProvider operationProvider;
+ @Mock
+ private NetconfManagementSession managementSession;
+ @Mock
+ private NetconfOperationServiceSnapshot snapshot;
+ @Mock
+ private NetconfOperationService operationService;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ service = new NetconfMonitoringServiceImpl(operationProvider);
+ }
+
+ @Test
+ public void testSessions() throws Exception {
+ doReturn("sessToStr").when(managementSession).toString();
+ service.onSessionUp(managementSession);
+ List list = Lists.newArrayList(managementSession);
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testGetSchemas() throws Exception {
+ doThrow(RuntimeException.class).when(operationProvider).openSnapshot(anyString());
+ service.getSchemas();
+ }
+
+ @Test(expected = IllegalStateException.class)
+ public void testGetSchemas2() throws Exception {
+ doThrow(Exception.class).when(operationProvider).openSnapshot(anyString());
+ service.getSchemas();
+ }
+
+ @Test
+ public void testGetSchemas3() throws Exception {
+ doReturn("").when(managementSession).toString();
+ Capability cap = mock(Capability.class);
+ Set caps = Sets.newHashSet(cap);
+ Set services = Sets.newHashSet(operationService);
+ doReturn(snapshot).when(operationProvider).openSnapshot(anyString());
+ doReturn(services).when(snapshot).getServices();
+ doReturn(caps).when(operationService).getCapabilities();
+ Optional opt = mock(Optional.class);
+ doReturn(opt).when(cap).getCapabilitySchema();
+ doReturn(true).when(opt).isPresent();
+ doReturn(opt).when(cap).getModuleNamespace();
+ doReturn("namespace").when(opt).get();
+ Optional optRev = Optional.of("rev");
+ doReturn(optRev).when(cap).getRevision();
+ doReturn(Optional.of("modName")).when(cap).getModuleName();
+ doReturn(Optional.of(Lists.newArrayList("loc"))).when(cap).getLocation();
+ doNothing().when(snapshot).close();
+
+ assertNotNull(service.getSchemas());
+ verify(snapshot, times(1)).close();
+
+ NetconfServerSessionListener sessionListener = mock(NetconfServerSessionListener.class);
+ Channel channel = mock(Channel.class);
+ NetconfHelloMessageAdditionalHeader header = new NetconfHelloMessageAdditionalHeader("name", "addr", "2", "tcp", "id");
+ NetconfServerSession sm = new NetconfServerSession(sessionListener, channel, 10, header);
+ doNothing().when(sessionListener).onSessionUp(any(NetconfServerSession.class));
+ sm.sessionUp();
+ service.onSessionUp(sm);
+ assertEquals(1, service.getSessions().getSession().size());
+
+ assertEquals(Long.valueOf(10), service.getSessions().getSession().get(0).getSessionId());
+
+ service.onSessionDown(sm);
+ assertEquals(0, service.getSessions().getSession().size());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
+public class DefaultCloseSessionTest {
+ @Test
+ public void testDefaultCloseSession() throws Exception {
+ AutoCloseable res = mock(AutoCloseable.class);
+ doNothing().when(res).close();
+ DefaultCloseSession session = new DefaultCloseSession("", res);
+ Document doc = XmlUtil.newDocument();
+ XmlElement elem = XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"));
+ session.handleWithNoSubsequentOperations(doc, elem);
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testDefaultCloseSession2() throws Exception {
+ AutoCloseable res = mock(AutoCloseable.class);
+ doThrow(NetconfDocumentedException.class).when(res).close();
+ DefaultCloseSession session = new DefaultCloseSession("", res);
+ Document doc = XmlUtil.newDocument();
+ XmlElement elem = XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"));
+ session.handleWithNoSubsequentOperations(doc, elem);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import com.google.common.collect.Sets;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
+import org.opendaylight.controller.netconf.util.test.XmlFileLoader;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import static org.mockito.Mockito.*;
+
+public class DefaultCommitTest {
+
+ private NetconfOperationChainedExecution operation;
+ private Document requestMessage;
+ private NetconfOperationRouter router;
+ private DefaultCommitNotificationProducer notifier;
+ private CapabilityProvider cap;
+ private DefaultCommit commit;
+
+ @Before
+ public void setUp() throws Exception {
+ operation = mock(NetconfOperationChainedExecution.class);
+ doReturn(XmlUtil.newDocument()).when(operation).execute(any(Document.class));
+ router = mock(NetconfOperationRouter.class);
+ doReturn(false).when(operation).isExecutionTermination();
+ notifier = mock(DefaultCommitNotificationProducer.class);
+ doNothing().when(notifier).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ cap = mock(CapabilityProvider.class);
+ doReturn(Sets.newHashSet()).when(cap).getCapabilities();
+ Document rpcData = XmlFileLoader.xmlFileToDocument("netconfMessages/editConfig_expectedResult.xml");
+ doReturn(rpcData).when(router).onNetconfMessage(any(Document.class), any(NetconfServerSession.class));
+ commit = new DefaultCommit(notifier, cap, "", router);
+ }
+
+ @Test
+ public void testHandleWithNotification() throws Exception {
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ commit.handle(requestMessage, operation);
+ verify(operation, times(1)).execute(requestMessage);
+ verify(notifier, times(1)).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ }
+
+ @Test
+ public void testHandleWithoutNotification() throws Exception {
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ Element elem = requestMessage.getDocumentElement();
+ elem.setAttribute("notify", "false");
+ commit.handle(requestMessage, operation);
+ verify(operation, times(1)).execute(requestMessage);
+ verify(notifier, never()).sendCommitNotification(anyString(), any(Element.class), anySetOf(String.class));
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testHandle() throws Exception {
+ Document rpcData = XmlFileLoader.xmlFileToDocument("netconfMessages/get.xml");
+ doReturn(rpcData).when(router).onNetconfMessage(any(Document.class), any(NetconfServerSession.class));
+ requestMessage = XmlFileLoader.xmlFileToDocument("netconfMessages/commit.xml");
+ commit.handle(requestMessage, operation);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import com.google.common.base.Optional;
+import junit.framework.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
+public class DefaultGetSchemaTest {
+
+ private CapabilityProvider cap;
+ private Document doc;
+ private String getSchema;
+
+ @Before
+ public void setUp() throws Exception {
+ cap = mock(CapabilityProvider.class);
+ doc = XmlUtil.newDocument();
+ getSchema = "<get-schema xmlns=\"urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring\">\n" +
+ " <identifier>threadpool-api</identifier>\n" +
+ " <version>2010-09-24</version>\n" +
+ " <format\n" +
+ " xmlns:ncm=\"urn:ietf:params:xml:ns:yang:ietf-netconf-monitoring\">ncm:yang\n" +
+ " </format>\n" +
+ " </get-schema>";
+ }
+
+ @Test(expected = NetconfDocumentedException.class)
+ public void testDefaultGetSchema() throws Exception {
+ DefaultGetSchema schema = new DefaultGetSchema(cap, "");
+ doThrow(IllegalStateException.class).when(cap).getSchemaForCapability(anyString(), any(Optional.class));
+ schema.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement(getSchema)));
+ }
+
+ @Test
+ public void handleWithNoSubsequentOperations() throws Exception {
+ DefaultGetSchema schema = new DefaultGetSchema(cap, "");
+ doReturn("").when(cap).getSchemaForCapability(anyString(), any(Optional.class));
+ assertNotNull(schema.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement(getSchema))));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.mapping.operations;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandler;
+import io.netty.channel.ChannelPipeline;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.impl.NetconfServerSession;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.*;
+
+public class DefaultStopExiTest {
+ @Test
+ public void testHandleWithNoSubsequentOperations() throws Exception {
+ DefaultStopExi exi = new DefaultStopExi("");
+ Document doc = XmlUtil.newDocument();
+ Channel channel = mock(Channel.class);
+ ChannelPipeline pipeline = mock(ChannelPipeline.class);
+ doReturn(pipeline).when(channel).pipeline();
+ ChannelHandler channelHandler = mock(ChannelHandler.class);
+ doReturn(channelHandler).when(pipeline).replace(anyString(), anyString(), any(ChannelHandler.class));
+
+ NetconfServerSession serverSession = new NetconfServerSession(null, channel, 2L, null);
+ exi.setNetconfSession(serverSession);
+
+ assertNotNull(exi.handleWithNoSubsequentOperations(doc, XmlElement.fromDomElement(XmlUtil.readXmlToElement("<elem/>"))));
+ verify(pipeline, times(1)).replace(anyString(), anyString(), any(ChannelHandler.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.osgi;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Dictionary;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.osgi.framework.*;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
+public class NetconfImplActivatorTest {
+
+ @Mock
+ private BundleContext bundle;
+ @Mock
+ private Filter filter;
+ @Mock
+ private ServiceReference reference;
+ @Mock
+ private ServiceRegistration registration;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(filter).when(bundle).createFilter(anyString());
+ doNothing().when(bundle).addServiceListener(any(ServiceListener.class), anyString());
+
+ ServiceReference[] refs = new ServiceReference[0];
+ doReturn(refs).when(bundle).getServiceReferences(anyString(), anyString());
+ doReturn(Arrays.asList(refs)).when(bundle).getServiceReferences(any(Class.class), anyString());
+ doReturn("").when(bundle).getProperty(anyString());
+ doReturn(registration).when(bundle).registerService(any(Class.class), any(NetconfOperationServiceFactoryListenerImpl.class), any(Dictionary.class));
+ doNothing().when(registration).unregister();
+ doNothing().when(bundle).removeServiceListener(any(ServiceListener.class));
+ }
+
+ @Test
+ public void testStart() throws Exception {
+ NetconfImplActivator activator = new NetconfImplActivator();
+ activator.start(bundle);
+ verify(bundle, times(2)).registerService(any(Class.class), any(NetconfOperationServiceFactoryListenerImpl.class), any(Dictionary.class));
+ activator.stop(bundle);
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.osgi;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceReference;
+
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
+public class NetconfOperationServiceFactoryTrackerTest {
+
+ @Mock
+ private Filter filter;
+ @Mock
+ private BundleContext context;
+ @Mock
+ private NetconfOperationServiceFactoryListener listener;
+ @Mock
+ private NetconfOperationServiceFactory factory;
+ @Mock
+ private ServiceReference reference;
+
+ private NetconfOperationServiceFactoryTracker tracker;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doNothing().when(listener).onRemoveNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ doReturn(filter).when(context).createFilter(anyString());
+ doReturn("").when(reference).toString();
+ doReturn(factory).when(context).getService(any(ServiceReference.class));
+ doReturn("").when(factory).toString();
+ doNothing().when(listener).onAddNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ tracker = new NetconfOperationServiceFactoryTracker(context, listener);
+ }
+
+ @Test
+ public void testNetconfOperationServiceFactoryTracker() throws Exception {
+ tracker.removedService(null, factory);
+ verify(listener, times(1)).onRemoveNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ }
+
+ @Test
+ public void testAddingService() throws Exception {
+ assertNotNull(tracker.addingService(reference));
+ verify(listener, times(1)).onAddNetconfOperationServiceFactory(any(NetconfOperationServiceFactory.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.impl.util;
+
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.util.concurrent.GenericFutureListener;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.mockito.Mockito.*;
+
+public class DeserializerExceptionHandlerTest {
+
+ private DeserializerExceptionHandler handler;
+ private ChannelFuture channelFuture;
+ private ChannelHandlerContext context;
+ private Channel channel;
+
+ @Before
+ public void setUp() throws Exception {
+ handler = new DeserializerExceptionHandler();
+ context = mock(ChannelHandlerContext.class);
+ channel = mock(Channel.class);
+ doReturn(channel).when(context).channel();
+ channelFuture = mock(ChannelFuture.class);
+ doReturn(channelFuture).when(channelFuture).addListener(any(GenericFutureListener.class));
+ doReturn(channelFuture).when(channel).writeAndFlush(anyObject());
+ }
+
+ @Test
+ public void testExceptionCaught() throws Exception {
+ handler.exceptionCaught(context, new Exception());
+ verify(context, times(1)).channel();
+ }
+}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-it</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-mapping-api</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-monitoring</artifactId>
<packaging>bundle</packaging>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import java.util.Arrays;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceListener;
+import org.osgi.framework.ServiceReference;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringActivatorTest {
+
+ @Mock
+ BundleContext context;
+ @Mock
+ Filter filter;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(filter).when(context).createFilter(anyString());
+ doNothing().when(context).addServiceListener(any(ServiceListener.class), anyString());
+ ServiceReference[] refs = new ServiceReference[2];
+ doReturn(Arrays.asList(refs)).when(context).getServiceReferences(any(Class.class), anyString());
+ doReturn(refs).when(context).getServiceReferences(anyString(), anyString());
+ }
+
+ @Test
+ public void testNetconfMonitoringActivator() throws Exception {
+ NetconfMonitoringActivator activator = new NetconfMonitoringActivator();
+ activator.start(context);
+ verify(context, times(1)).addServiceListener(any(ServiceListener.class), anyString());
+
+ activator.stop(context);
+ verify(context, times(1)).removeServiceListener(any(ServiceListener.class));
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import com.google.common.base.Optional;
+import org.junit.Test;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
+public class NetconfMonitoringOperationServiceTest {
+ @Test
+ public void testGetters() throws Exception {
+ NetconfMonitoringService monitor = mock(NetconfMonitoringService.class);
+ NetconfMonitoringOperationService service = new NetconfMonitoringOperationService(monitor);
+
+ assertEquals(1, service.getNetconfOperations().size());
+
+ assertEquals(Optional.absent(), service.getCapabilities().iterator().next().getCapabilitySchema());
+ assertEquals(Optional.absent(), service.getCapabilities().iterator().next().getLocation());
+ assertEquals(Optional.of(MonitoringConstants.MODULE_REVISION), service.getCapabilities().iterator().next().getRevision());
+ assertEquals(Optional.of(MonitoringConstants.MODULE_NAME), service.getCapabilities().iterator().next().getModuleName());
+ assertEquals(Optional.of(MonitoringConstants.NAMESPACE), service.getCapabilities().iterator().next().getModuleNamespace());
+ assertEquals(MonitoringConstants.URI, service.getCapabilities().iterator().next().getCapabilityUri());
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.monitoring.osgi;
+
+import java.util.Hashtable;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Filter;
+import org.osgi.framework.ServiceReference;
+import org.osgi.framework.ServiceRegistration;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyCollection;
+import static org.mockito.Mockito.*;
+
+public class NetconfMonitoringServiceTrackerTest {
+
+ @Mock
+ private ServiceReference reference;
+ @Mock
+ private BundleContext context;
+ @Mock
+ private ServiceRegistration serviceRegistration;
+ @Mock
+ private Filter filter;
+ @Mock
+ private NetconfMonitoringService monitoringService;
+
+ @Before
+ public void setUp() throws Exception {
+ MockitoAnnotations.initMocks(this);
+ doReturn(serviceRegistration).when(context).registerService(any(Class.class), any(NetconfOperationServiceFactory.class), any(Hashtable.class));
+ doNothing().when(serviceRegistration).unregister();
+ doReturn(filter).when(context).createFilter(anyString());
+ doReturn("").when(reference).toString();
+ doReturn(monitoringService).when(context).getService(any(ServiceReference.class));
+ }
+
+ @Test
+ public void testAddingService() throws Exception {
+ NetconfMonitoringServiceTracker tracker = new NetconfMonitoringServiceTracker(context);
+ tracker.addingService(reference);
+ verify(context, times(1)).registerService(any(Class.class), any(NetconfOperationServiceFactory.class), any(Hashtable.class));
+ tracker.removedService(reference, null);
+ verify(serviceRegistration, times(1)).unregister();
+ }
+}
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-netty-util</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>netconf-ssh</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>netconf-tcp</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-testtool</artifactId>
<artifactId>logback-classic</artifactId>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcpkix-jdk15on</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-netty-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-auth</artifactId>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.logback_settings</artifactId>
<groupId>xmlunit</groupId>
<artifactId>xmlunit</artifactId>
</dependency>
-
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>config-util</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>ietf-netconf-monitoring</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-client</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-yang-types-20130715</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-inet-types</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-impl</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-monitoring</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-ssh</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netty-config-api</artifactId>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import java.util.Collections;
+import java.util.List;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+
+public class DataList {
+
+ private List<XmlElement> configList = Collections.emptyList();
+
+ public List<XmlElement> getConfigList() {
+ return configList;
+ }
+
+ public void setConfigList(List<XmlElement> configList) {
+ this.configList = configList;
+ }
+
+ public void resetConfigList() {
+ configList = Collections.emptyList();
+ }
+
+}
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import ch.qos.logback.classic.Level;
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.io.ByteStreams;
+import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import java.io.File;
+import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.List;
-
import java.util.concurrent.TimeUnit;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.annotation.Arg;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
-
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Charsets;
-import com.google.common.io.CharStreams;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
public final class Main {
- // TODO add logback config
-
- // TODO make exi configurable
-
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
static class Params {
@Arg(dest = "generate-config-address")
public String generateConfigsAddress;
- @Arg(dest = "generate-configs-dir")
- public File generateConfigsDir;
+ @Arg(dest = "distro-folder")
+ public File distroFolder;
@Arg(dest = "generate-configs-batch-size")
public int generateConfigBatchSize;
@Arg(dest = "exi")
public boolean exi;
+ @Arg(dest = "debug")
+ public boolean debug;
+
static ArgumentParser getParser() {
final ArgumentParser parser = ArgumentParsers.newArgumentParser("netconf testool");
- parser.addArgument("--devices-count")
+
+ parser.description("Netconf device simulator. Detailed info can be found at https://wiki.opendaylight.org/view/OpenDaylight_Controller:Netconf:Testtool#Building_testtool");
+
+ parser.addArgument("--device-count")
.type(Integer.class)
.setDefault(1)
.type(Integer.class)
parser.addArgument("--schemas-dir")
.type(File.class)
- .required(true)
- .help("Directory containing yang schemas to describe simulated devices")
+ .help("Directory containing yang schemas to describe simulated devices. Some schemas e.g. netconf monitoring and inet types are included by default")
.dest("schemas-dir");
parser.addArgument("--starting-port")
parser.addArgument("--generate-config-connection-timeout")
.type(Integer.class)
- .setDefault((int)TimeUnit.MINUTES.toMillis(5))
+ .setDefault((int)TimeUnit.MINUTES.toMillis(30))
.help("Timeout to be generated in initial config files")
.dest("generate-config-connection-timeout");
parser.addArgument("--generate-configs-batch-size")
.type(Integer.class)
- .setDefault(100)
+ .setDefault(4000)
.help("Number of connector configs per generated file")
.dest("generate-configs-batch-size");
- parser.addArgument("--generate-configs-dir")
+ parser.addArgument("--distribution-folder")
.type(File.class)
- .help("Directory where initial config files for ODL distribution should be generated")
- .dest("generate-configs-dir");
+ .help("Directory where the karaf distribution for controller is located")
+ .dest("distro-folder");
parser.addArgument("--ssh")
.type(Boolean.class)
parser.addArgument("--exi")
.type(Boolean.class)
- .setDefault(false)
+ .setDefault(true)
.help("Whether to use exi to transport xml content")
.dest("exi");
+ parser.addArgument("--debug")
+ .type(Boolean.class)
+ .setDefault(false)
+ .help("Whether to use debug log level instead of INFO")
+ .dest("debug");
+
return parser;
}
checkArgument(deviceCount > 0, "Device count has to be > 0");
checkArgument(startingPort > 1024, "Starting port has to be > 1024");
- checkArgument(schemasDir.exists(), "Schemas dir has to exist");
- checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
- checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ if(schemasDir != null) {
+ checkArgument(schemasDir.exists(), "Schemas dir has to exist");
+ checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
+ checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ }
}
}
public static void main(final String[] args) {
- ch.ethz.ssh2.log.Logger.enabled = true;
-
final Params params = parseArgs(args, Params.getParser());
params.validate();
+ final ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
+ root.setLevel(params.debug ? Level.DEBUG : Level.INFO);
+
final NetconfDeviceSimulator netconfDeviceSimulator = new NetconfDeviceSimulator();
try {
final List<Integer> openDevices = netconfDeviceSimulator.start(params);
- if(params.generateConfigsDir != null) {
- new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ if(params.distroFolder != null) {
+ final ConfigGenerator configGenerator = new ConfigGenerator(params.distroFolder, openDevices);
+ final List<File> generated = configGenerator.generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ configGenerator.updateFeatureFile(generated);
+ configGenerator.changeLoadOrder();
}
} catch (final Exception e) {
LOG.error("Unhandled exception", e);
}
}
-
private static Params parseArgs(final String[] args, final ArgumentParser parser) {
final Params opt = new Params();
try {
public static final String NETCONF_USE_SSH = "false";
public static final String SIM_DEVICE_SUFFIX = "-sim-device";
- private final File directory;
+ private static final String SIM_DEVICE_CFG_PREFIX = "simulated-devices_";
+ private static final String ETC_KARAF_PATH = "etc/";
+ private static final String ETC_OPENDAYLIGHT_KARAF_PATH = ETC_KARAF_PATH + "opendaylight/karaf/";
+
+ public static final String NETCONF_CONNECTOR_ALL_FEATURE = "odl-netconf-connector-all";
+ private static final String ORG_OPS4J_PAX_URL_MVN_CFG = "org.ops4j.pax.url.mvn.cfg";
+
+ private final File configDir;
private final List<Integer> openDevices;
+ private final File ncFeatureFile;
+ private final File etcDir;
+ private final File loadOrderCfgFile;
public ConfigGenerator(final File directory, final List<Integer> openDevices) {
- this.directory = directory;
+ this.configDir = new File(directory, ETC_OPENDAYLIGHT_KARAF_PATH);
+ this.etcDir = new File(directory, ETC_KARAF_PATH);
+ this.loadOrderCfgFile = new File(etcDir, ORG_OPS4J_PAX_URL_MVN_CFG);
+ this.ncFeatureFile = getFeatureFile(directory, "features-netconf-connector");
this.openDevices = openDevices;
}
- public void generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
- if(directory.exists() == false) {
- checkState(directory.mkdirs(), "Unable to create folder %s" + directory);
+ public List<File> generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
+ if(configDir.exists() == false) {
+ Preconditions.checkState(configDir.mkdirs(), "Unable to create directory " + configDir);
+ }
+
+ for (final File file : configDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return !pathname.isDirectory() && pathname.getName().startsWith(SIM_DEVICE_CFG_PREFIX);
+ }
+ })) {
+ Preconditions.checkState(file.delete(), "Unable to clean previous generated file %s", file);
}
try(InputStream stream = Main.class.getResourceAsStream(NETCONF_CONNECTOR_XML)) {
checkNotNull(stream, "Cannot load %s", NETCONF_CONNECTOR_XML);
String configBlueprint = CharStreams.toString(new InputStreamReader(stream, Charsets.UTF_8));
- // TODO make address configurable
checkState(configBlueprint.contains(NETCONF_CONNECTOR_NAME));
checkState(configBlueprint.contains(NETCONF_CONNECTOR_PORT));
checkState(configBlueprint.contains(NETCONF_USE_SSH));
StringBuilder b = new StringBuilder();
b.append(before);
+ final List<File> generatedConfigs = Lists.newArrayList();
+
for (final Integer openDevice : openDevices) {
if(batchStart == null) {
batchStart = openDevice;
connectorCount++;
if(connectorCount == batchSize) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevice)), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevice));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
connectorCount = 0;
b = new StringBuilder();
b.append(before);
// Write remaining
if(connectorCount != 0) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1))), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1)));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
}
- LOG.info("Config files generated in {}", directory);
+ LOG.info("Config files generated in {}", configDir);
+ return generatedConfigs;
} catch (final IOException e) {
throw new RuntimeException("Unable to generate config files", e);
}
}
+
+
+ public void updateFeatureFile(final List<File> generated) {
+ // TODO karaf core contains jaxb for feature files, use that for modification
+ try {
+ final Document document = XmlUtil.readXmlToDocument(Files.toString(ncFeatureFile, Charsets.UTF_8));
+ final NodeList childNodes = document.getDocumentElement().getChildNodes();
+
+ for (int i = 0; i < childNodes.getLength(); i++) {
+ final Node item = childNodes.item(i);
+ if(item instanceof Element == false) {
+ continue;
+ }
+ if(item.getLocalName().equals("feature") ==false) {
+ continue;
+ }
+
+ if(NETCONF_CONNECTOR_ALL_FEATURE.equals(((Element) item).getAttribute("name"))) {
+ final Element ncAllFeatureDefinition = (Element) item;
+ // Clean previous generated files
+ for (final XmlElement configfile : XmlElement.fromDomElement(ncAllFeatureDefinition).getChildElements("configfile")) {
+ ncAllFeatureDefinition.removeChild(configfile.getDomElement());
+ }
+ for (final File file : generated) {
+ final Element configfile = document.createElement("configfile");
+ configfile.setTextContent("file:" + ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ configfile.setAttribute("finalname", ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ ncAllFeatureDefinition.appendChild(configfile);
+ }
+ }
+ }
+
+ Files.write(XmlUtil.toString(document), ncFeatureFile, Charsets.UTF_8);
+ LOG.info("Feature file {} updated", ncFeatureFile);
+ } catch (final IOException e) {
+ throw new RuntimeException("Unable to load features file as a resource");
+ } catch (final SAXException e) {
+ throw new RuntimeException("Unable to parse features file");
+ }
+ }
+
+
+ private static File getFeatureFile(final File distroFolder, final String featureName) {
+ checkExistingDir(distroFolder, String.format("Folder %s does not exist", distroFolder));
+
+ final File systemDir = checkExistingDir(new File(distroFolder, "system"), String.format("Folder %s does not contain a karaf distro, folder system is missing", distroFolder));
+ final File netconfConnectorFeaturesParentDir = checkExistingDir(new File(systemDir, "org/opendaylight/controller/" + featureName), String.format("Karaf distro in %s does not contain netconf-connector features", distroFolder));
+
+ // Find newest version for features
+ final File newestVersionDir = Collections.max(
+ Lists.newArrayList(netconfConnectorFeaturesParentDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.isDirectory();
+ }
+ })), new Comparator<File>() {
+ @Override
+ public int compare(final File o1, final File o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+ });
+
+ return newestVersionDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.getName().contains(featureName);
+ }
+ })[0];
+ }
+
+ private static File checkExistingDir(final File folder, final String msg) {
+ Preconditions.checkArgument(folder.exists(), msg);
+ Preconditions.checkArgument(folder.isDirectory(), msg);
+ return folder;
+ }
+
+ public void changeLoadOrder() {
+ try {
+ Files.write(ByteStreams.toByteArray(getClass().getResourceAsStream("/" +ORG_OPS4J_PAX_URL_MVN_CFG)), loadOrderCfgFile);
+ LOG.info("Load order changed to prefer local bundles/features by rewriting file {}", loadOrderCfgFile);
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to rewrite features file " + loadOrderCfgFile, e);
+ }
+ }
}
}
import com.google.common.base.Charsets;
import com.google.common.base.Function;
+import com.google.common.base.Objects;
import com.google.common.base.Optional;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.CharStreams;
import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.Futures;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.local.LocalAddress;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.net.Inet4Address;
import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceListener;
+import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceProvider;
import org.opendaylight.yangtools.yang.model.repo.util.FilesystemSchemaSourceCache;
import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils;
import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
private static final Logger LOG = LoggerFactory.getLogger(NetconfDeviceSimulator.class);
- public static final int CONNECTION_TIMEOUT_MILLIS = 20000;
-
private final NioEventLoopGroup nettyThreadgroup;
private final HashedWheelTimer hashedWheelTimer;
private final List<Channel> devicesChannels = Lists.newArrayList();
}
private Map<ModuleBuilder, String> toModuleBuilders(final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> sources) {
- final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
- @Override
- public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
- return input.getKey().getAST();
- }
- });
- final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
- asts.values(), Optional.<SchemaContext>absent());
+ final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
+ @Override
+ public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
+ return input.getKey().getAST();
+ }
+ });
+ final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
+ asts.values(), Optional.<SchemaContext>absent());
- final ParseTreeWalker walker = new ParseTreeWalker();
- final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
+ final ParseTreeWalker walker = new ParseTreeWalker();
+ final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
- for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
- final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
- walker, entry.getValue()).getModuleBuilder();
+ for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
+ final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
+ walker, entry.getValue()).getModuleBuilder();
- try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
- sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
+ try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
+ sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
}
-
- return sourceToBuilder;
}
+ return sourceToBuilder;
+ }
+
public List<Integer> start(final Main.Params params) {
+ LOG.info("Starting {}, {} simulated devices starting on port {}", params.deviceCount, params.ssh ? "SSH" : "TCP", params.startingPort);
+
final Map<ModuleBuilder, String> moduleBuilders = parseSchemasToModuleBuilders(params);
final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders, params.exi, params.generateConfigsTimeout);
devicesChannels.add(server.channel());
openDevices.add(currentPort - 1);
-
}
if(openDevices.size() == params.deviceCount) {
- LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort);
+ LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort - 1);
} else {
LOG.warn("Not all simulated devices started successfully. Started devices ar on ports {}", openDevices);
}
public void schemaSourceUnregistered(final PotentialSchemaSource<?> potentialSchemaSource) {}
});
- final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
- consumer.registerSchemaSourceListener(cache);
+ if(params.schemasDir != null) {
+ final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
+ consumer.registerSchemaSourceListener(cache);
+ }
+
+ addDefaultSchemas(consumer);
final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> asts = Maps.newHashMap();
for (final SourceIdentifier loadedSource : loadedSources) {
return toModuleBuilders(asts);
}
+ private void addDefaultSchemas(final SharedSchemaRepository consumer) {
+ SourceIdentifier sId = new SourceIdentifier("ietf-netconf-monitoring", "2010-10-04");
+ registerSource(consumer, "/META-INF/yang/ietf-netconf-monitoring.yang", sId);
+
+ sId = new SourceIdentifier("ietf-yang-types", "2013-07-15");
+ registerSource(consumer, "/META-INF/yang/ietf-yang-types@2013-07-15.yang", sId);
+
+ sId = new SourceIdentifier("ietf-inet-types", "2010-09-24");
+ registerSource(consumer, "/META-INF/yang/ietf-inet-types.yang", sId);
+ }
+
+ private void registerSource(final SharedSchemaRepository consumer, final String resource, final SourceIdentifier sourceId) {
+ consumer.registerSchemaSource(new SchemaSourceProvider<SchemaSourceRepresentation>() {
+ @Override
+ public CheckedFuture<? extends SchemaSourceRepresentation, SchemaSourceException> getSource(final SourceIdentifier sourceIdentifier) {
+ return Futures.immediateCheckedFuture(new YangTextSchemaSource(sourceId) {
+ @Override
+ protected Objects.ToStringHelper addToStringAttributes(final Objects.ToStringHelper toStringHelper) {
+ return toStringHelper;
+ }
+
+ @Override
+ public InputStream openStream() throws IOException {
+ return getClass().getResourceAsStream(resource);
+ }
+ });
+ }
+ }, PotentialSchemaSource.create(sourceId, YangTextSchemaSource.class, PotentialSchemaSource.Costs.IMMEDIATE.getValue()));
+ }
+
private static InetSocketAddress getAddress(final int port) {
try {
// TODO make address configurable
static class SimulatedOperationService implements NetconfOperationService {
private final Set<Capability> capabilities;
- private static SimulatedGet sGet;
+ private final long currentSessionId;
public SimulatedOperationService(final Set<Capability> capabilities, final long currentSessionId) {
this.capabilities = capabilities;
- sGet = new SimulatedGet(String.valueOf(currentSessionId));
+ this.currentSessionId = currentSessionId;
}
@Override
@Override
public Set<NetconfOperation> getNetconfOperations() {
- return Sets.<NetconfOperation>newHashSet(sGet);
+ final DataList storage = new DataList();
+ final SimulatedGet sGet = new SimulatedGet(String.valueOf(currentSessionId), storage);
+ final SimulatedEditConfig sEditConfig = new SimulatedEditConfig(String.valueOf(currentSessionId), storage);
+ final SimulatedGetConfig sGetConfig = new SimulatedGetConfig(String.valueOf(currentSessionId), storage);
+ final SimulatedCommit sCommit = new SimulatedCommit(String.valueOf(currentSessionId));
+ return Sets.<NetconfOperation>newHashSet(sGet, sGetConfig, sEditConfig, sCommit);
}
@Override
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Optional;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
+import org.opendaylight.controller.netconf.confignetconfconnector.operations.AbstractConfigNetconfOperation;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+class SimulatedCommit extends AbstractConfigNetconfOperation {
+
+ SimulatedCommit(final String netconfSessionIdForReporting) {
+ super(null, netconfSessionIdForReporting);
+ }
+
+ @Override
+ protected Element handleWithNoSubsequentOperations(final Document document, final XmlElement operationElement) throws NetconfDocumentedException {
+ return XmlUtil.createElement(document, XmlNetconfConstants.OK, Optional.<String>absent());
+ }
+
+ @Override
+ protected String getOperationName() {
+ return XmlNetconfConstants.COMMIT;
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Optional;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
+import org.opendaylight.controller.netconf.confignetconfconnector.operations.AbstractConfigNetconfOperation;
+import org.opendaylight.controller.netconf.confignetconfconnector.operations.editconfig.EditConfigXmlParser;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Attr;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+class SimulatedEditConfig extends AbstractConfigNetconfOperation {
+ private static final String DELETE_EDIT_CONFIG = "delete";
+ private static final String OPERATION = "operation";
+ private static final String REMOVE_EDIT_CONFIG = "remove";
+ private final DataList storage;
+
+ SimulatedEditConfig(final String netconfSessionIdForReporting, final DataList storage) {
+ super(null, netconfSessionIdForReporting);
+ this.storage = storage;
+ }
+
+ @Override
+ protected Element handleWithNoSubsequentOperations(final Document document, final XmlElement operationElement) throws NetconfDocumentedException {
+ final XmlElement configElementData = operationElement.getOnlyChildElement(XmlNetconfConstants.CONFIG_KEY);
+
+ containsDelete(configElementData);
+ if(containsDelete(configElementData)){
+ storage.resetConfigList();
+ } else {
+ storage.setConfigList(configElementData.getChildElements());
+ }
+
+ return XmlUtil.createElement(document, XmlNetconfConstants.OK, Optional.<String>absent());
+ }
+
+ @Override
+ protected String getOperationName() {
+ return EditConfigXmlParser.EDIT_CONFIG;
+ }
+
+ private boolean containsDelete(final XmlElement element) {
+ for (final Attr o : element.getAttributes().values()) {
+ if (o.getLocalName().equals(OPERATION)
+ && (o.getValue().equals(DELETE_EDIT_CONFIG) || o.getValue()
+ .equals(REMOVE_EDIT_CONFIG))) {
+ return true;
+ }
+
+ }
+
+ for (final XmlElement xmlElement : element.getChildElements()) {
+ if (containsDelete(xmlElement)) {
+ return true;
+ }
+
+ }
+
+ return false;
+ }
+}
class SimulatedGet extends AbstractConfigNetconfOperation {
- SimulatedGet(final String netconfSessionIdForReporting) {
+ private final DataList storage;
+
+ SimulatedGet(final String netconfSessionIdForReporting, final DataList storage) {
super(null, netconfSessionIdForReporting);
+ this.storage = storage;
}
@Override
protected Element handleWithNoSubsequentOperations(final Document document, final XmlElement operationElement) throws NetconfDocumentedException {
- return XmlUtil.createElement(document, XmlNetconfConstants.DATA_KEY, Optional.<String>absent());
+ final Element element = XmlUtil.createElement(document, XmlNetconfConstants.DATA_KEY, Optional.<String>absent());
+
+ for(final XmlElement e : storage.getConfigList()) {
+ final Element domElement = e.getDomElement();
+ element.appendChild(element.getOwnerDocument().importNode(domElement, true));
+ }
+
+ return element;
}
@Override
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Optional;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
+import org.opendaylight.controller.netconf.confignetconfconnector.operations.AbstractConfigNetconfOperation;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+class SimulatedGetConfig extends AbstractConfigNetconfOperation {
+
+ private final DataList storage;
+
+ SimulatedGetConfig(final String netconfSessionIdForReporting, final DataList storage) {
+ super(null, netconfSessionIdForReporting);
+ this.storage = storage;
+ }
+
+ @Override
+ protected Element handleWithNoSubsequentOperations(final Document document, final XmlElement operationElement) throws NetconfDocumentedException {
+ final Element element = XmlUtil.createElement(document, XmlNetconfConstants.DATA_KEY, Optional.<String>absent());
+
+ for(final XmlElement e : storage.getConfigList()) {
+ final Element domElement = e.getDomElement();
+ element.appendChild(element.getOwnerDocument().importNode(domElement, true));
+ }
+
+ return element;
+ }
+
+ @Override
+ protected String getOperationName() {
+ return XmlNetconfConstants.GET_CONFIG;
+ }
+}
--- /dev/null
+################################################################################
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+#
+# If set to true, the following property will not allow any certificate to be used
+# when accessing Maven repositories through SSL
+#
+#org.ops4j.pax.url.mvn.certificateCheck=
+
+#
+# Path to the local Maven settings file.
+# The repositories defined in this file will be automatically added to the list
+# of default repositories if the 'org.ops4j.pax.url.mvn.repositories' property
+# below is not set.
+# The following locations are checked for the existence of the settings.xml file
+# * 1. looks for the specified url
+# * 2. if not found looks for ${user.home}/.m2/settings.xml
+# * 3. if not found looks for ${maven.home}/conf/settings.xml
+# * 4. if not found looks for ${M2_HOME}/conf/settings.xml
+#
+#org.ops4j.pax.url.mvn.settings=
+
+#
+# Path to the local Maven repository which is used to avoid downloading
+# artifacts when they already exist locally.
+# The value of this property will be extracted from the settings.xml file
+# above, or defaulted to:
+# System.getProperty( "user.home" ) + "/.m2/repository"
+#
+org.ops4j.pax.url.mvn.localRepository=${karaf.home}/${karaf.default.repository}
+
+#
+# Default this to false. It's just weird to use undocumented repos
+#
+org.ops4j.pax.url.mvn.useFallbackRepositories=false
+
+#
+# Uncomment if you don't wanna use the proxy settings
+# from the Maven conf/settings.xml file
+#
+# org.ops4j.pax.url.mvn.proxySupport=false
+
+#
+# Disable aether support by default. This ensure that the defaultRepositories
+# below will be used
+#
+#org.ops4j.pax.url.mvn.disableAether=true
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# Those repositories will be checked before iterating through the
+# below list of repositories and even before the local repository
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+#
+# The following property value will add the system folder as a repo.
+#
+#org.ops4j.pax.url.mvn.defaultRepositories=
+
+# Use the default local repo (e.g.~/.m2/repository) as a "remote" repo
+org.ops4j.pax.url.mvn.defaultLocalRepoAsRemote=false
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# The default list includes the following repositories containing releases:
+# http://repo1.maven.org/maven2
+# http://repository.apache.org/content/groups/snapshots-group
+# http://svn.apache.org/repos/asf/servicemix/m2-repo
+# http://repository.springsource.com/maven/bundles/release
+# http://repository.springsource.com/maven/bundles/external
+# To add repositories to the default ones, prepend '+' to the list of repositories
+# to add.
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+# @id=reponid : the id for the repository, just like in the settings.xml this is optional but recomendet
+#
+# The default list doesn't contain any repository containing snapshots as it can impact the artifacts resolution.
+# You may want to add the following repositories containing snapshots:
+# http://repository.apache.org/content/groups/snapshots-group@id=apache@snapshots@noreleases
+# http://oss.sonatype.org/content/repositories/snapshots@id=sonatype.snapshots.deploy@snapshots@norelease
+# http://oss.sonatype.org/content/repositories/ops4j-snapshots@id=ops4j.sonatype.snapshots.deploy@snapshots@noreleases
+#
+org.ops4j.pax.url.mvn.repositories= \
+ file:${karaf.home}/${karaf.default.repository}@id=system.repository, \
+ file:${karaf.data}/kar@id=kar.repository@multi, \
+ http://repo1.maven.org/maven2@id=central, \
+ http://repository.springsource.com/maven/bundles/release@id=spring.ebr.release, \
+ http://repository.springsource.com/maven/bundles/external@id=spring.ebr.external
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<relativePath>../</relativePath>
</parent>
<artifactId>netconf-usermanager</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
</parent>
<artifactId>netconf-util</artifactId>
<packaging>bundle</packaging>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>netconf-subsystem</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.3.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<prerequisites>
<module>netconf-connector-config</module>
<module>netconf-auth</module>
<module>netconf-usermanager</module>
+ <module>netconf-testtool</module>
</modules>
<dependencies>
<module>netconf-it</module>
</modules>
</profile>
-
- <profile>
- <id>testtool</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>netconf-testtool</module>
- </modules>
- </profile>
</profiles>
</project>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>networkconfig.neutron.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<enunciate.version>1.26.2</enunciate.version>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>networkconfig.neutron</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<enunciate.version>1.26.2</enunciate.version>
<groupId>org.opendaylight.controller</groupId>
<artifactId>app-northbound</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>maven-archetype</packaging>
<name>app-northbound</name>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.1-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>bundlescanner</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>bundlescanner.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>commons.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>connectionmanager.northbound</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>containermanager.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>controllermanager.northbound</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>flowprogrammer.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>hosttracker.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>httpservice-bridge</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<description>HttpService bridge web application</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>northbound.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>ch.qos.logback</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>northbound.client</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>jolokia-bridge</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<description>Jolokia bridge web application</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>networkconfig.bridgedomain.northbound</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>networkconfig.neutron.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>forwarding.staticrouting.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>statistics.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>subnets.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>swagger-ui</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>switchmanager.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>topology.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>usermanager.northbound</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.0-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../opendaylight/commons/opendaylight</relativePath>
</parent>
<scm>
<groupId>org.opendaylight.controller</groupId>
<artifactId>northboundtest</artifactId>
- <version>0.4.0-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<build>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>protocol_plugins.openflow</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>protocol_plugins.stub</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>routing.dijkstra_implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.9.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal.connection</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal.connection.implementation</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal.networkconfiguration</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>sal.networkconfiguration.implementation</artifactId>
- <version>0.0.3-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>clustersession</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>clustering.services</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>samples.loadbalancer</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../../commons/opendaylight</relativePath>
</parent>
<artifactId>samples.loadbalancer.northbound</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>samples.simpleforwarding</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../commons/opendaylight</relativePath>
</parent>
<artifactId>security</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>statisticsmanager</artifactId>
- <version>0.5.1-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>statisticsmanager.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<!-- Sonar properties using jacoco to retrieve integration test results -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>statisticsmanager.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementation/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>switchmanager</artifactId>
- <version>0.7.1-SNAPSHOT</version>
+ <version>0.8.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>switchmanager.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<properties>
c.add(createContainerServiceDependency(containerName).setService(
IInventoryService.class).setCallbacks(
"setInventoryService", "unsetInventoryService")
- .setRequired(false));
+ .setRequired(true));
c.add(createContainerServiceDependency(containerName).setService(
IStatisticsManager.class).setCallbacks(
"setStatisticsManager", "unsetStatisticsManager")
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>switchmanager.integrationtest</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementation/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>topologymanager</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.integrationtest</artifactId>
- <version>0.5.2-SNAPSHOT</version>
+ <version>0.6.0-SNAPSHOT</version>
<relativePath>../../commons/integrationtest</relativePath>
</parent>
<artifactId>topologymanager.integrationtest</artifactId>
- <version>0.4.1-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<properties>
<sonar.jacoco.itReportPath>../implementaiton/target/jacoco-it.exec</sonar.jacoco.itReportPath>
<!-- Sonar jacoco plugin to get integration test coverage info -->
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>topologymanager.shell</artifactId>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>usermanager</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>usermanager.implementation</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.0-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<scm>
<groupId>org.opendaylight.controller</groupId>
<artifactId>web.brandfragment</artifactId>
- <version>0.4.0-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<build>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>devices.web</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>flows.web</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>osgi-brandfragment.web</artifactId>
- <version>0.0.2-SNAPSHOT</version>
+ <version>0.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<description>OSGi management web application brand fragment</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>web</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>topology.web</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.opendaylight</artifactId>
- <version>1.4.2-SNAPSHOT</version>
+ <version>1.5.0-SNAPSHOT</version>
<relativePath>../../commons/opendaylight</relativePath>
</parent>
<artifactId>troubleshoot.web</artifactId>
- <version>0.4.2-SNAPSHOT</version>
+ <version>0.5.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<dependency>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.parent</artifactId>
- <version>1.0.2-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<relativePath>opendaylight/commons/parent</relativePath>
</parent>
<artifactId>releasepom</artifactId>
- <version>0.1.2-SNAPSHOT</version>
+ <version>0.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<prerequisites>
<maven>3.0</maven>
</prerequisites>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.2-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<scm>
<connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.2-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../commons/thirdparty</relativePath>
</parent>
<groupId>org.opendaylight.controller.thirdparty</groupId>
<artifactId>ganymed</artifactId>
- <version>1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<dependencies>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../commons/thirdparty</relativePath>
</parent>
<scm>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller.thirdparty</groupId>
<artifactId>com.sun.jersey.jersey-servlet</artifactId>
- <version>1.18-SNAPSHOT</version>
+ <version>1.19.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<build>
<plugins>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../commons/thirdparty</relativePath>
</parent>
<scm>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller.thirdparty</groupId>
<artifactId>net.sf.jung2</artifactId>
- <version>2.0.2-SNAPSHOT</version>
+ <version>2.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<build>
<plugins>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller.thirdparty</groupId>
<artifactId>org.openflow.openflowj</artifactId>
- <version>1.0.3-SNAPSHOT</version>
+ <version>1.1.0-SNAPSHOT</version>
<name>OpenFlow Java</name>
<description>A Java implemention of the OpenFlow v1.0 protocol</description>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../commons/thirdparty</relativePath>
</parent>
<parent>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.thirdparty</artifactId>
- <version>1.1.1-SNAPSHOT</version>
+ <version>1.2.0-SNAPSHOT</version>
<relativePath>../commons/thirdparty</relativePath>
</parent>
<scm>
<modelVersion>4.0.0</modelVersion>
<groupId>org.opendaylight.controller.thirdparty</groupId>
<artifactId>org.apache.catalina.filters.CorsFilter</artifactId>
- <version>7.0.43-SNAPSHOT</version>
+ <version>7.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
<build>
<plugins>