From: Ed Warnicke Date: Wed, 13 Aug 2014 14:40:16 +0000 (+0000) Subject: Merge "Bug-1338: Create a grouping for order to help create generic OrderComparator... X-Git-Tag: release/helium~297 X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=commitdiff_plain;h=961b5b9260565194a863a25bd697f171ec2405af;hp=938d130525543760d1efdc764b7c80fa412e88aa;p=controller.git Merge "Bug-1338: Create a grouping for order to help create generic OrderComparator in OFP" --- diff --git a/features/config-netty/pom.xml b/features/config-netty/pom.xml index 16fd975130..2f4b4b1e21 100644 --- a/features/config-netty/pom.xml +++ b/features/config-netty/pom.xml @@ -7,7 +7,7 @@ 0.2.5-SNAPSHOT ../../opendaylight/config/ - config-netty-features + features-config-netty pom @@ -18,11 +18,35 @@ org.opendaylight.controller - config-persister-features + features-config-persister features xml runtime + + org.opendaylight.controller + netty-event-executor-config + + + org.opendaylight.controller + netty-threadgroup-config + + + org.opendaylight.controller + netty-timer-config + + + org.opendaylight.controller + threadpool-config-api + + + org.opendaylight.controller + threadpool-config-impl + + + org.opendaylight.controller + config-netty-config + diff --git a/features/config-netty/src/main/resources/features.xml b/features/config-netty/src/main/resources/features.xml index f1b2d1f753..7f57d8cb84 100644 --- a/features/config-netty/src/main/resources/features.xml +++ b/features/config-netty/src/main/resources/features.xml @@ -3,7 +3,7 @@ - mvn:org.opendaylight.controller/config-persister-features/${config.version}/xml/features + mvn:org.opendaylight.controller/features-config-persister/${config.version}/xml/features odl-config-netty-config-api mvn:org.opendaylight.controller/netty-event-executor-config/${project.version} @@ -12,6 +12,6 @@ mvn:org.opendaylight.controller/threadpool-config-api/${project.version} mvn:org.opendaylight.controller/threadpool-config-impl/${project.version} odl-config-startup - mvn:org.opendaylight.controller/config-netty-config/${config.version}/xml/config + mvn:org.opendaylight.controller/config-netty-config/${config.version}/xml/config \ No newline at end of file diff --git a/features/config-persister/pom.xml b/features/config-persister/pom.xml index ec1520ed98..6dc8941345 100644 --- a/features/config-persister/pom.xml +++ b/features/config-persister/pom.xml @@ -7,7 +7,7 @@ 0.2.5-SNAPSHOT ../../opendaylight/config/ - config-persister-features + features-config-persister pom @@ -26,18 +26,62 @@ org.opendaylight.controller - netconf-features + features-netconf features xml runtime org.opendaylight.controller - config-features + features-config features xml runtime + + org.opendaylight.controller + config-persister-api + + + org.opendaylight.controller + config-persister-file-xml-adapter + + + org.opendaylight.controller + config-persister-impl + + + org.opendaylight.controller + config-persister-feature-adapter + + + org.opendaylight.controller + netconf-util + + + org.opendaylight.controller + netconf-mapping-api + + + com.google.guava + guava + + + commons-io + commons-io + + + org.apache.commons + commons-lang3 + + + org.eclipse.persistence + org.eclipse.persistence.core + + + org.eclipse.persistence + org.eclipse.persistence.moxy + diff --git a/features/config-persister/src/main/resources/features.xml b/features/config-persister/src/main/resources/features.xml index 2273a4a309..a3c005b3bd 100644 --- a/features/config-persister/src/main/resources/features.xml +++ b/features/config-persister/src/main/resources/features.xml @@ -4,22 +4,20 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://karaf.apache.org/xmlns/features/v1.2.0 http://karaf.apache.org/xmlns/features/v1.2.0"> mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features - mvn:org.opendaylight.controller/netconf-features/${netconf.version}/xml/features - mvn:org.opendaylight.controller/config-features/${config.version}/xml/features - - odl-config-netconf-connector + mvn:org.opendaylight.controller/features-netconf/${netconf.version}/xml/features + mvn:org.opendaylight.controller/features-config/${config.version}/xml/features + odl-config-persister - odl-netconf-impl + odl-config-startup odl-netconf-api odl-config-api - yangtools-binding-generator + odl-yangtools-binding-generator mvn:org.opendaylight.controller/config-persister-api/${project.version} mvn:org.opendaylight.controller/config-persister-file-xml-adapter/${project.version} - mvn:org.opendaylight.controller/config-persister-directory-xml-adapter/${project.version} mvn:org.opendaylight.controller/config-persister-impl/${project.version} - + mvn:org.opendaylight.controller/config-persister-feature-adapter/${project.version} mvn:org.opendaylight.controller/netconf-util/${netconf.version} mvn:org.opendaylight.controller/netconf-mapping-api/${netconf.version} @@ -29,4 +27,9 @@ mvn:org.eclipse.persistence/org.eclipse.persistence.core/${eclipse.persistence.version} mvn:org.eclipse.persistence/org.eclipse.persistence.moxy/${eclipse.persistence.version} - \ No newline at end of file + + odl-config-netconf-connector + odl-config-persister + odl-netconf-impl + + diff --git a/features/config/pom.xml b/features/config/pom.xml index 7e5dd6472b..c69e11bed2 100644 --- a/features/config/pom.xml +++ b/features/config/pom.xml @@ -7,7 +7,7 @@ 0.2.5-SNAPSHOT ../../opendaylight/config/ - config-features + features-config pom @@ -24,6 +24,74 @@ xml runtime + + org.opendaylight.controller + sal-common + + + org.opendaylight.controller + sal-common-api + + + org.opendaylight.controller + sal-common-impl + + + org.opendaylight.controller + sal-common-util + + + org.opendaylight.controller + config-api + + + org.opendaylight.controller + netty-config-api + + + io.netty + netty-transport + + + io.netty + netty-common + + + io.netty + netty-buffer + + + org.opendaylight.controller + config-util + + + org.opendaylight.controller + yang-jmx-generator + + + org.opendaylight.controller + shutdown-api + + + org.opendaylight.controller + shutdown-impl + + + org.osgi + org.osgi.core + + + com.google.guava + guava + + + org.javassist + javassist + + + org.opendaylight.controller + config-manager + diff --git a/features/config/src/main/resources/features.xml b/features/config/src/main/resources/features.xml index de5b198173..6c0d32427d 100644 --- a/features/config/src/main/resources/features.xml +++ b/features/config/src/main/resources/features.xml @@ -5,45 +5,52 @@ xsi:schemaLocation="http://karaf.apache.org/xmlns/features/v1.2.0 http://karaf.apache.org/xmlns/features/v1.2.0"> mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features - - yangtools-concepts - yangtools-binding - yangtools-binding-generator - odl-mdsal-commons - odl-config-api - mvn:org.opendaylight.controller/config-util/${project.version} - mvn:org.opendaylight.controller/yang-jmx-generator/${project.version} - mvn:org.opendaylight.controller/shutdown-api/${project.version} - mvn:org.opendaylight.controller/shutdown-impl/${project.version} - mvn:org.osgi/org.osgi.core/${osgi.core.version} - mvn:com.google.guava/guava/${guava.version} - mvn:org.javassist/javassist/${javassist.version} + + odl-mdsal-common + odl-config-api + odl-config-netty-config-api + odl-config-core + odl-config-manager - - odl-config-core - mvn:org.opendaylight.controller/config-manager/${project.version} + + + odl-yangtools-data-binding + mvn:org.opendaylight.controller/sal-common/${mdsal.version} + mvn:org.opendaylight.controller/sal-common-api/${mdsal.version} + mvn:org.opendaylight.controller/sal-common-impl/${mdsal.version} + mvn:org.opendaylight.controller/sal-common-util/${mdsal.version} mvn:org.opendaylight.controller/config-api/${project.version} - - - yangtools-concepts - yangtools-binding + odl-yangtools-common + odl-yangtools-binding + odl-config-api mvn:org.opendaylight.controller/netty-config-api/${project.version} - - mvn:io.netty/netty-transport/${netty.version} mvn:io.netty/netty-common/${netty.version} mvn:io.netty/netty-buffer/${netty.version} + + + odl-yangtools-common + odl-yangtools-binding + odl-yangtools-binding-generator + odl-mdsal-common odl-config-api + mvn:org.opendaylight.controller/config-util/${project.version} + mvn:org.opendaylight.controller/yang-jmx-generator/${project.version} + mvn:org.opendaylight.controller/shutdown-api/${project.version} + mvn:org.opendaylight.controller/shutdown-impl/${project.version} + mvn:org.osgi/org.osgi.core/${osgi.core.version} + mvn:com.google.guava/guava/${guava.version} + mvn:org.javassist/javassist/${javassist.version} - - mvn:org.opendaylight.controller/netconf-config-dispatcher/${project.version} + + odl-config-core + mvn:org.opendaylight.controller/config-manager/${project.version} - \ No newline at end of file diff --git a/features/flow/pom.xml b/features/flow/pom.xml new file mode 100644 index 0000000000..09bb6c91e6 --- /dev/null +++ b/features/flow/pom.xml @@ -0,0 +1,120 @@ + + + 4.0.0 + + org.opendaylight.controller + sal-parent + 1.1-SNAPSHOT + ../../opendaylight/md-sal + + features-flow + + pom + + + features.xml + + + + + org.opendaylight.controller + features-mdsal + ${mdsal.version} + features + xml + runtime + + + org.opendaylight.controller.model + model-flow-base + + + org.opendaylight.controller.model + model-flow-service + + + org.opendaylight.controller.model + model-flow-statistics + + + org.opendaylight.controller.model + model-inventory + + + org.opendaylight.controller.model + model-topology + + + org.opendaylight.controller.md + topology-manager + + + org.opendaylight.controller.md + topology-lldp-discovery + + + org.opendaylight.controller.md + statistics-manager + + + org.opendaylight.controller.md + inventory-manager + + + org.opendaylight.controller.md + forwardingrules-manager + + + + + + + true + src/main/resources + + + + + org.apache.maven.plugins + maven-resources-plugin + + + filter + + resources + + generate-resources + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + attach-artifacts + + attach-artifact + + package + + + + ${project.build.directory}/classes/${features.file} + xml + features + + + + + + + + + + scm:git:ssh://git.opendaylight.org:29418/controller.git + scm:git:ssh://git.opendaylight.org:29418/controller.git + HEAD + https://wiki.opendaylight.org/view/OpenDaylight_Controller:MD-SAL + + diff --git a/features/flow/src/main/resources/features.xml b/features/flow/src/main/resources/features.xml new file mode 100644 index 0000000000..3f914be4ae --- /dev/null +++ b/features/flow/src/main/resources/features.xml @@ -0,0 +1,26 @@ + + + + mvn:org.opendaylight.controller/features-mdsal/${mdsal.version}/xml/features + + odl-yangtools-models + mvn:org.opendaylight.controller.model/model-flow-base/${project.version} + mvn:org.opendaylight.controller.model/model-flow-service/${project.version} + mvn:org.opendaylight.controller.model/model-flow-statistics/${project.version} + mvn:org.opendaylight.controller.model/model-inventory/${project.version} + mvn:org.opendaylight.controller.model/model-topology/${project.version} + + + odl-mdsal-broker + odl-flow-model + mvn:org.opendaylight.controller.md/topology-manager/${project.version} + mvn:org.opendaylight.controller.md/topology-lldp-discovery/${project.version} + mvn:org.opendaylight.controller.md/statistics-manager/${project.version} + mvn:org.opendaylight.controller.md/inventory-manager/${project.version} + mvn:org.opendaylight.controller.md/forwardingrules-manager/${project.version} + mvn:org.opendaylight.controller/liblldp/${sal.version} + + + diff --git a/features/mdsal/pom.xml b/features/mdsal/pom.xml index 2983c5efab..4f1ba98e5c 100644 --- a/features/mdsal/pom.xml +++ b/features/mdsal/pom.xml @@ -7,7 +7,7 @@ 1.1-SNAPSHOT ../../opendaylight/md-sal - mdsal-features + features-mdsal pom @@ -15,7 +15,164 @@ features.xml - + + + org.opendaylight.yangtools + features-yangtools + features + xml + runtime + + + org.opendaylight.controller + features-config + features + xml + runtime + + + org.opendaylight.controller + features-config-persister + features + xml + runtime + + + org.opendaylight.controller + features-config-netty + features + xml + runtime + + + org.opendaylight.controller + sal-core-api + + + org.opendaylight.controller + sal-core-api + + + org.opendaylight.controller + sal-core-spi + + + org.opendaylight.controller + sal-broker-impl + + + org.opendaylight.controller + sal-binding-api + + + org.opendaylight.controller + sal-binding-config + + + org.opendaylight.controller + sal-binding-broker-impl + + + org.opendaylight.controller + sal-binding-util + + + org.opendaylight.controller + sal-connector-api + + + org.opendaylight.controller + sal-inmemory-datastore + + + org.opendaylight.controller + md-sal-config + + + org.opendaylight.controller + sal-netconf-connector + + + org.opendaylight.controller.model + model-inventory + + + org.opendaylight.controller + netconf-config-dispatcher + + + org.opendaylight.controller + netconf-connector-config + + + org.opendaylight.controller + sal-rest-connector + + + com.google.code.gson + gson + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-server + + + org.opendaylight.controller.thirdparty + com.sun.jersey.jersey-servlet + + + io.netty + netty-buffer + + + io.netty + netty-codec + + + io.netty + netty-codec-http + + + io.netty + netty-common + + + io.netty + netty-handler + + + io.netty + netty-transport + + + org.opendaylight.controller + sal-remote + + + org.opendaylight.controller + sal-rest-connector-config + + + org.opendaylight.controller.samples + sample-toaster + + + org.opendaylight.controller.samples + sample-toaster-provider + + + org.opendaylight.controller.samples + sample-toaster-consumer + + + org.opendaylight.controller.samples + toaster-config + + diff --git a/features/mdsal/src/main/resources/features.xml b/features/mdsal/src/main/resources/features.xml index 7d393bc64c..a3d7ed0f83 100644 --- a/features/mdsal/src/main/resources/features.xml +++ b/features/mdsal/src/main/resources/features.xml @@ -1,28 +1,24 @@ - + mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features + mvn:org.opendaylight.controller/features-config/${config.version}/xml/features + mvn:org.opendaylight.controller/features-config-persister/${config.version}/xml/features + mvn:org.opendaylight.controller/features-config-netty/${config.version}/xml/features - odl-mdsal-commons odl-mdsal-broker - odl-mdsal-restconf - - - yangtools-data-binding - mvn:org.opendaylight.controller/sal-common/${project.version} - mvn:org.opendaylight.controller/sal-common-api/${project.version} - mvn:org.opendaylight.controller/sal-common-impl/${project.version} - mvn:org.opendaylight.controller/sal-common-util/${project.version} + odl-mdsal-netconf-connector + odl-restconf + odl-toaster - yangtools-concepts - yangtools-binding - odl-mdsal-commons - odl-config-core - odl-config-manager - odl-config-api - odl-config-persister + odl-yangtools-common + odl-yangtools-binding + odl-mdsal-common + odl-config-startup + odl-config-netty mvn:org.opendaylight.controller/sal-core-api/${project.version} mvn:org.opendaylight.controller/sal-core-spi/${project.version} mvn:org.opendaylight.controller/sal-broker-impl/${project.version} @@ -32,47 +28,41 @@ mvn:org.opendaylight.controller/sal-binding-util/${project.version} mvn:org.opendaylight.controller/sal-connector-api/${project.version} mvn:org.opendaylight.controller/sal-inmemory-datastore/${project.version} + mvn:org.opendaylight.controller/md-sal-config/${mdsal.version}/xml/config + + + odl-mdsal-broker + odl-netconf-client + odl-yangtools-models + mvn:org.opendaylight.controller/sal-netconf-connector/${project.version} + mvn:org.opendaylight.controller.model/model-inventory/${project.version} + mvn:org.opendaylight.controller/netconf-config-dispatcher/${config.version} + mvn:org.opendaylight.controller/netconf-connector-config/${netconf.version}/xml/config - + odl-mdsal-broker + war mvn:org.opendaylight.controller/sal-rest-connector/${project.version} - wrap:mvn:com.google.code.gson/gson/${gson.version} - wrap:mvn:com.sun.jersey/jersey-core/${jersey.version} - wrap:mvn:com.sun.jersey/jersey-server/${jersey.version} + mvn:com.google.code.gson/gson/${gson.version} + mvn:com.sun.jersey/jersey-core/${jersey.version} + mvn:com.sun.jersey/jersey-server/${jersey.version} mvn:org.opendaylight.controller.thirdparty/com.sun.jersey.jersey-servlet/${jersey.version} - wrap:mvn:io.netty/netty-buffer/${netty.version} - wrap:mvn:io.netty/netty-codec/${netty.version} - wrap:mvn:io.netty/netty-codec-http/${netty.version} - wrap:mvn:io.netty/netty-common/${netty.version} - wrap:mvn:io.netty/netty-handler/${netty.version} - wrap:mvn:io.netty/netty-transport/${netty.version} - - - mvn:org.opendaylight.controller.model/model-flow-base/${project.version} - mvn:org.opendaylight.controller.model/model-flow-management/${project.version} - mvn:org.opendaylight.controller.model/model-flow-service/${project.version} - mvn:org.opendaylight.controller.model/model-flow-statistics/${project.version} - mvn:org.opendaylight.controller.model/model-inventory/${project.version} - mvn:org.opendaylight.controller.model/model-topology/${project.version} + mvn:io.netty/netty-buffer/${netty.version} + mvn:io.netty/netty-codec/${netty.version} + mvn:io.netty/netty-codec-http/${netty.version} + mvn:io.netty/netty-common/${netty.version} + mvn:io.netty/netty-handler/${netty.version} + mvn:io.netty/netty-transport/${netty.version} + mvn:org.opendaylight.controller/sal-remote/${project.version} + mvn:org.opendaylight.controller/sal-rest-connector-config/${mdsal.version}/xml/config - - yangtools-concepts - yangtools-binding + + odl-yangtools-common + odl-yangtools-binding odl-mdsal-broker - odl-mdsal-all mvn:org.opendaylight.controller.samples/sample-toaster/${project.version} mvn:org.opendaylight.controller.samples/sample-toaster-consumer/${project.version} mvn:org.opendaylight.controller.samples/sample-toaster-provider/${project.version} + mvn:org.opendaylight.controller.samples/toaster-config/${project.version}/xml/config - - mvn:org.opendaylight.controller/sal-netconf-connector/${project.version} - mvn:org.opendaylight.controller/sal-restconf-broker/${project.version} - mvn:org.opendaylight.controller/sal-remote/${project.version} - mvn:org.opendaylight.controller.md/topology-manager/${project.version} - mvn:org.opendaylight.controller.md/topology-lldp-discovery/${project.version} - mvn:org.opendaylight.controller.md/statistics-manager/${project.version} - mvn:org.opendaylight.controller.md/inventory-manager/${project.version} - mvn:org.opendaylight.controller.md/forwardingrules-manager/${project.version} - - diff --git a/features/netconf/pom.xml b/features/netconf/pom.xml index 856557c1e8..956a67e28b 100644 --- a/features/netconf/pom.xml +++ b/features/netconf/pom.xml @@ -7,7 +7,7 @@ 0.2.5-SNAPSHOT ../../opendaylight/netconf - netconf-features + features-netconf pom @@ -18,18 +18,98 @@ org.opendaylight.controller - config-features + features-config features xml runtime org.opendaylight.controller - features-odl-protocol-framework + features-protocol-framework features xml runtime + + org.opendaylight.controller + netconf-api + + + org.opendaylight.controller + ietf-netconf-monitoring + + + org.opendaylight.controller + ietf-netconf-monitoring-extension + + + org.opendaylight.yangtools.model + ietf-inet-types + + + org.opendaylight.yangtools.model + ietf-yang-types + + + org.opendaylight.controller + netconf-mapping-api + + + org.opendaylight.controller + netconf-util + + + org.opendaylight.controller + netconf-impl + + + org.opendaylight.controller + config-netconf-connector + + + org.opendaylight.controller + netconf-netty-util + + + org.opendaylight.controller.thirdparty + ganymed + + + org.openexi + nagasena + + + io.netty + netty-codec + + + io.netty + netty-handler + + + io.netty + netty-common + + + io.netty + netty-buffer + + + io.netty + netty-transport + + + org.opendaylight.controller + netconf-client + + + org.opendaylight.controller + netconf-config + + + org.opendaylight.controller + netconf-monitoring + diff --git a/features/netconf/src/main/resources/features.xml b/features/netconf/src/main/resources/features.xml index 50a537b50a..0033b0d83c 100644 --- a/features/netconf/src/main/resources/features.xml +++ b/features/netconf/src/main/resources/features.xml @@ -3,14 +3,24 @@ - mvn:org.opendaylight.controller/features-odl-protocol-framework/${protocol-framework.version}/xml/features - mvn:org.opendaylight.controller/config-features/${config.version}/xml/features + mvn:org.opendaylight.controller/features-protocol-framework/${protocol-framework.version}/xml/features + mvn:org.opendaylight.controller/features-config/${config.version}/xml/features + + odl-netconf-api + odl-netconf-mapping-api + odl-netconf-util + odl-netconf-impl + odl-config-netconf-connector + odl-netconf-netty-util + odl-netconf-client + odl-netconf-monitoring + + odl-protocol-framework mvn:org.opendaylight.controller/netconf-api/${project.version} mvn:org.opendaylight.controller/ietf-netconf-monitoring/${project.version} mvn:org.opendaylight.controller/ietf-netconf-monitoring-extension/${project.version} - odl-protocol-framework mvn:org.opendaylight.yangtools.model/ietf-inet-types/${ietf-inet-types.version} mvn:org.opendaylight.yangtools.model/ietf-yang-types/${ietf-yang-types.version} @@ -22,26 +32,25 @@ odl-netconf-mapping-api mvn:org.opendaylight.controller/netconf-util/${project.version} - - odl-config-manager - mvn:org.opendaylight.controller/config-netconf-connector/${project.version} + odl-netconf-api odl-netconf-mapping-api odl-netconf-util - - - + odl-netconf-netty-util mvn:org.opendaylight.controller/netconf-impl/${project.version} + + + odl-config-manager odl-netconf-api odl-netconf-mapping-api odl-netconf-util - odl-netconf-netty-util + mvn:org.opendaylight.controller/config-netconf-connector/${project.version} - mvn:org.opendaylight.controller/netconf-netty-util/${project.version} odl-netconf-api odl-netconf-mapping-api odl-netconf-util + mvn:org.opendaylight.controller/netconf-netty-util/${project.version} mvn:org.opendaylight.controller.thirdparty/ganymed/${ganymed.version} mvn:org.openexi/nagasena/${exi.nagasena.version} mvn:io.netty/netty-codec/${netty.version} @@ -50,10 +59,14 @@ mvn:io.netty/netty-buffer/${netty.version} mvn:io.netty/netty-transport/${netty.version} - + + odl-netconf-netty-util mvn:org.opendaylight.controller/netconf-client/${project.version} + mvn:org.opendaylight.controller/netconf-config/${netconf.version}/xml/config + + + odl-netconf-util mvn:org.opendaylight.controller/netconf-monitoring/${project.version} - mvn:org.opendaylight.controller/netconf-tcp/${project.version} \ No newline at end of file diff --git a/features/pom.xml b/features/pom.xml index f69190cebd..88ed7491a7 100644 --- a/features/pom.xml +++ b/features/pom.xml @@ -22,6 +22,7 @@ config-persister config-netty mdsal + flow netconf protocol-framework diff --git a/features/protocol-framework/pom.xml b/features/protocol-framework/pom.xml index ba5dd18fc2..97836be455 100644 --- a/features/protocol-framework/pom.xml +++ b/features/protocol-framework/pom.xml @@ -7,7 +7,7 @@ 1.4.2-SNAPSHOT ../../opendaylight/commons/opendaylight - features-odl-protocol-framework + features-protocol-framework ${protocol-framework.version} pom @@ -18,11 +18,15 @@ org.opendaylight.controller - config-features + features-config features xml runtime + + org.opendaylight.controller + protocol-framework + diff --git a/features/protocol-framework/src/main/resources/features.xml b/features/protocol-framework/src/main/resources/features.xml index d2560f5cb0..6daa3432c1 100644 --- a/features/protocol-framework/src/main/resources/features.xml +++ b/features/protocol-framework/src/main/resources/features.xml @@ -3,10 +3,10 @@ - mvn:org.opendaylight.controller/config-features/${config.version}/xml/features + mvn:org.opendaylight.controller/features-config/${config.version}/xml/features + odl-config-api + odl-config-netty-config-api mvn:org.opendaylight.controller/protocol-framework/${protocol-framework.version} - odl-config-api - odl-config-netty-config-api \ No newline at end of file diff --git a/opendaylight/commons/liblldp/pom.xml b/opendaylight/commons/liblldp/pom.xml new file mode 100644 index 0000000000..1551041edb --- /dev/null +++ b/opendaylight/commons/liblldp/pom.xml @@ -0,0 +1,55 @@ + + + 4.0.0 + + org.opendaylight.controller + commons.opendaylight + 1.4.2-SNAPSHOT + ../opendaylight + + + liblldp + 0.8.1-SNAPSHOT + bundle + + + junit + junit + + + org.apache.commons + commons-lang3 + + + org.slf4j + slf4j-api + + + + + + + org.apache.felix + maven-bundle-plugin + true + + + org.slf4j, + org.apache.commons.lang3.builder, + org.apache.commons.lang3.tuple + + + org.opendaylight.controller.liblldp + + ${project.basedir}/META-INF + + + + + + scm:git:ssh://git.opendaylight.org:29418/controller.git + scm:git:ssh://git.opendaylight.org:29418/controller.git + HEAD + https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main + + diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BitBufferHelper.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BitBufferHelper.java new file mode 100644 index 0000000000..3eae43212f --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BitBufferHelper.java @@ -0,0 +1,718 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +/** + * + */ +package org.opendaylight.controller.liblldp; + +import java.util.Arrays; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * BitBufferHelper class that provides utility methods to + * - fetch specific bits from a serialized stream of bits + * - convert bits to primitive data type - like short, int, long + * - store bits in specified location in stream of bits + * - convert primitive data types to stream of bits + */ +public abstract class BitBufferHelper { + protected static final Logger logger = LoggerFactory + .getLogger(BitBufferHelper.class); + + public static final long ByteMask = 0xFF; + + // Getters + // data: array where data are stored + // startOffset: bit from where to start reading + // numBits: number of bits to read + // All this function return an exception if overflow or underflow + + /** + * Returns the first byte from the byte array + * @param byte[] data + * @return byte value + */ + public static byte getByte(byte[] data) { + if ((data.length * NetUtils.NumBitsInAByte) > Byte.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + return (data[0]); + } + + /** + * Returns the short value for the byte array passed. + * Size of byte array is restricted to Short.SIZE + * @param byte[] data + * @return short value + */ + public static short getShort(byte[] data) { + if (data.length > Short.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + return (short) toNumber(data); + } + + /** + * Returns the int value for the byte array passed. + * Size of byte array is restricted to Integer.SIZE + * @param byte[] data + * @return int - the integer value of byte array + */ + public static int getInt(byte[] data) { + if (data.length > Integer.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + return (int) toNumber(data); + } + + /** + * Returns the long value for the byte array passed. + * Size of byte array is restricted to Long.SIZE + * @param byte[] data + * @return long - the integer value of byte array + */ + public static long getLong(byte[] data) { + if (data.length > Long.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (Exception e) { + logger.error("", e); + } + } + return (long) toNumber(data); + } + + /** + * Returns the short value for the last numBits of the byte array passed. + * Size of numBits is restricted to Short.SIZE + * @param byte[] data + * @param int - numBits + * @return short - the short value of byte array + */ + public static short getShort(byte[] data, int numBits) { + if (numBits > Short.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + int startOffset = data.length * NetUtils.NumBitsInAByte - numBits; + byte[] bits = null; + try { + bits = BitBufferHelper.getBits(data, startOffset, numBits); + } catch (BufferException e) { + logger.error("", e); + } + return (short) toNumber(bits, numBits); + } + + /** + * Returns the int value for the last numBits of the byte array passed. + * Size of numBits is restricted to Integer.SIZE + * @param byte[] data + * @param int - numBits + * @return int - the integer value of byte array + */ + public static int getInt(byte[] data, int numBits) { + if (numBits > Integer.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + int startOffset = data.length * NetUtils.NumBitsInAByte - numBits; + byte[] bits = null; + try { + bits = BitBufferHelper.getBits(data, startOffset, numBits); + } catch (BufferException e) { + logger.error("", e); + } + return (int) toNumber(bits, numBits); + } + + /** + * Returns the long value for the last numBits of the byte array passed. + * Size of numBits is restricted to Long.SIZE + * @param byte[] data + * @param int - numBits + * @return long - the integer value of byte array + */ + public static long getLong(byte[] data, int numBits) { + if (numBits > Long.SIZE) { + try { + throw new BufferException( + "Container is too small for the number of requested bits"); + } catch (BufferException e) { + logger.error("", e); + } + } + if (numBits > data.length * NetUtils.NumBitsInAByte) { + try { + throw new BufferException( + "Trying to read more bits than contained in the data buffer"); + } catch (BufferException e) { + logger.error("", e); + } + } + int startOffset = data.length * NetUtils.NumBitsInAByte - numBits; + byte[] bits = null; + try { + bits = BitBufferHelper.getBits(data, startOffset, numBits); + } catch (BufferException e) { + logger.error("", e); + } + return (long) toNumber(bits, numBits); + } + + /** + * Reads the specified number of bits from the passed byte array + * starting to read from the specified offset + * The bits read are stored in a byte array which size is dictated + * by the number of bits to be stored. + * The bits are stored in the byte array LSB aligned. + * + * Ex. + * Read 7 bits at offset 10 + * 0 9 10 16 17 + * 0101000010 | 0000101 | 1111001010010101011 + * will be returned as {0,0,0,0,0,1,0,1} + * + * @param byte[] data + * @param int startOffset - offset to start fetching bits from data from + * @param int numBits - number of bits to be fetched from data + * @return byte [] - LSB aligned bits + * + * @throws BufferException + * when the startOffset and numBits parameters are not congruent + * with the data buffer size + */ + public static byte[] getBits(byte[] data, int startOffset, int numBits) + throws BufferException { + + int startByteOffset = 0; + int valfromcurr, valfromnext; + int extranumBits = numBits % NetUtils.NumBitsInAByte; + int extraOffsetBits = startOffset % NetUtils.NumBitsInAByte; + int numBytes = (numBits % NetUtils.NumBitsInAByte != 0) ? 1 + numBits + / NetUtils.NumBitsInAByte : numBits / NetUtils.NumBitsInAByte; + byte[] shiftedBytes = new byte[numBytes]; + startByteOffset = startOffset / NetUtils.NumBitsInAByte; + byte[] bytes = new byte[numBytes]; + if (numBits == 0) { + return bytes; + } + + checkExceptions(data, startOffset, numBits); + + if (extraOffsetBits == 0) { + if (extranumBits == 0) { + System.arraycopy(data, startByteOffset, bytes, 0, numBytes); + return bytes; + } else { + System.arraycopy(data, startByteOffset, bytes, 0, numBytes - 1); + bytes[numBytes - 1] = (byte) ((int) data[startByteOffset + + numBytes - 1] & getMSBMask(extranumBits)); + } + } else { + int i; + for (i = 0; i < numBits / NetUtils.NumBitsInAByte; i++) { + // Reading numBytes starting from offset + valfromcurr = (data[startByteOffset + i]) + & getLSBMask(NetUtils.NumBitsInAByte - extraOffsetBits); + valfromnext = (data[startByteOffset + i + 1]) + & getMSBMask(extraOffsetBits); + bytes[i] = (byte) (valfromcurr << (extraOffsetBits) | (valfromnext >> (NetUtils.NumBitsInAByte - extraOffsetBits))); + } + // Now adding the rest of the bits if any + if (extranumBits != 0) { + if (extranumBits < (NetUtils.NumBitsInAByte - extraOffsetBits)) { + valfromnext = (byte) (data[startByteOffset + i] & ((getMSBMask(extranumBits)) >> extraOffsetBits)); + bytes[i] = (byte) (valfromnext << extraOffsetBits); + } else if (extranumBits == (NetUtils.NumBitsInAByte - extraOffsetBits)) { + valfromcurr = (data[startByteOffset + i]) + & getLSBMask(NetUtils.NumBitsInAByte + - extraOffsetBits); + bytes[i] = (byte) (valfromcurr << extraOffsetBits); + } else { + valfromcurr = (data[startByteOffset + i]) + & getLSBMask(NetUtils.NumBitsInAByte + - extraOffsetBits); + valfromnext = (data[startByteOffset + i + 1]) + & (getMSBMask(extranumBits + - (NetUtils.NumBitsInAByte - extraOffsetBits))); + bytes[i] = (byte) (valfromcurr << (extraOffsetBits) | (valfromnext >> (NetUtils.NumBitsInAByte - extraOffsetBits))); + } + + } + } + // Aligns the bits to LSB + shiftedBytes = shiftBitsToLSB(bytes, numBits); + return shiftedBytes; + } + + // Setters + // data: array where data will be stored + // input: the data that need to be stored in the data array + // startOffset: bit from where to start writing + // numBits: number of bits to read + + /** + * Bits are expected to be stored in the input byte array from LSB + * @param byte[] - data to set the input byte + * @param byte - input byte to be inserted + * @param startOffset - offset of data[] to start inserting byte from + * @param numBits - number of bits of input to be inserted into data[] + * + * @throws BufferException + * when the input, startOffset and numBits are not congruent + * with the data buffer size + */ + public static void setByte(byte[] data, byte input, int startOffset, + int numBits) throws BufferException { + byte[] inputByteArray = new byte[1]; + Arrays.fill(inputByteArray, 0, 1, input); + setBytes(data, inputByteArray, startOffset, numBits); + } + + /** + * Bits are expected to be stored in the input byte array from LSB + * @param byte[] - data to set the input byte + * @param byte[] - input bytes to be inserted + * @param startOffset - offset of data[] to start inserting byte from + * @param numBits - number of bits of input to be inserted into data[] + * @return void + * @throws BufferException + * when the startOffset and numBits parameters are not congruent + * with data and input buffers' size + */ + public static void setBytes(byte[] data, byte[] input, int startOffset, + int numBits) throws BufferException { + checkExceptions(data, startOffset, numBits); + insertBits(data, input, startOffset, numBits); + } + + /** + * Returns numBits 1's in the MSB position + * + * @param numBits + * @return + */ + public static int getMSBMask(int numBits) { + int mask = 0; + for (int i = 0; i < numBits; i++) { + mask = mask | (1 << (7 - i)); + } + return mask; + } + + /** + * Returns numBits 1's in the LSB position + * + * @param numBits + * @return + */ + public static int getLSBMask(int numBits) { + int mask = 0; + for (int i = 0; i < numBits; i++) { + mask = mask | (1 << i); + } + return mask; + } + + /** + * Returns the numerical value of the byte array passed + * + * @param byte[] - array + * @return long - numerical value of byte array passed + */ + static public long toNumber(byte[] array) { + long ret = 0; + long length = array.length; + int value = 0; + for (int i = 0; i < length; i++) { + value = array[i]; + if (value < 0) + value += 256; + ret = ret + | (long) ((long) value << ((length - i - 1) * NetUtils.NumBitsInAByte)); + } + return ret; + } + + /** + * Returns the numerical value of the last numBits (LSB bits) of the byte + * array passed + * + * @param byte[] - array + * @param int - numBits + * @return long - numerical value of byte array passed + */ + static public long toNumber(byte[] array, int numBits) { + int length = numBits / NetUtils.NumBitsInAByte; + int bitsRest = numBits % NetUtils.NumBitsInAByte; + int startOffset = array.length - length; + long ret = 0; + int value = 0; + + value = array[startOffset - 1] & getLSBMask(bitsRest); + value = (array[startOffset - 1] < 0) ? (array[startOffset - 1] + 256) + : array[startOffset - 1]; + ret = ret + | (value << ((array.length - startOffset) * NetUtils.NumBitsInAByte)); + + for (int i = startOffset; i < array.length; i++) { + value = array[i]; + if (value < 0) + value += 256; + ret = ret + | (long) ((long) value << ((array.length - i - 1) * NetUtils.NumBitsInAByte)); + } + + return ret; + } + + /** + * Accepts a number as input and returns its value in byte form in LSB + * aligned form example: input = 5000 [1001110001000] bytes = 19, -120 + * [00010011] [10001000] + * + * @param Number + * @return byte[] + * + */ + + public static byte[] toByteArray(Number input) { + Class dataType = input.getClass(); + short size = 0; + long longValue = input.longValue(); + + if (dataType == Byte.class || dataType == byte.class) { + size = Byte.SIZE; + } else if (dataType == Short.class || dataType == short.class) { + size = Short.SIZE; + } else if (dataType == Integer.class || dataType == int.class) { + size = Integer.SIZE; + } else if (dataType == Long.class || dataType == long.class) { + size = Long.SIZE; + } else { + throw new IllegalArgumentException( + "Parameter must one of the following: Short/Int/Long\n"); + } + + int length = size / NetUtils.NumBitsInAByte; + byte bytes[] = new byte[length]; + + // Getting the bytes from input value + for (int i = 0; i < length; i++) { + bytes[i] = (byte) ((longValue >> (NetUtils.NumBitsInAByte * (length + - i - 1))) & ByteMask); + } + return bytes; + } + + /** + * Accepts a number as input and returns its value in byte form in MSB + * aligned form example: input = 5000 [1001110001000] bytes = -114, 64 + * [10011100] [01000000] + * + * @param Number + * input + * @param int numBits - the number of bits to be returned + * @return byte[] + * + */ + public static byte[] toByteArray(Number input, int numBits) { + Class dataType = input.getClass(); + short size = 0; + long longValue = input.longValue(); + + if (dataType == Short.class) { + size = Short.SIZE; + } else if (dataType == Integer.class) { + size = Integer.SIZE; + } else if (dataType == Long.class) { + size = Long.SIZE; + } else { + throw new IllegalArgumentException( + "Parameter must one of the following: Short/Int/Long\n"); + } + + int length = size / NetUtils.NumBitsInAByte; + byte bytes[] = new byte[length]; + byte[] inputbytes = new byte[length]; + byte shiftedBytes[]; + + // Getting the bytes from input value + for (int i = 0; i < length; i++) { + bytes[i] = (byte) ((longValue >> (NetUtils.NumBitsInAByte * (length + - i - 1))) & ByteMask); + } + + if ((bytes[0] == 0 && dataType == Long.class) + || (bytes[0] == 0 && dataType == Integer.class)) { + int index = 0; + for (index = 0; index < length; ++index) { + if (bytes[index] != 0) { + bytes[0] = bytes[index]; + break; + } + } + System.arraycopy(bytes, index, inputbytes, 0, length - index); + Arrays.fill(bytes, length - index + 1, length - 1, (byte) 0); + } else { + System.arraycopy(bytes, 0, inputbytes, 0, length); + } + + shiftedBytes = shiftBitsToMSB(inputbytes, numBits); + + return shiftedBytes; + } + + /** + * Takes an LSB aligned byte array and returned the LSB numBits in a MSB + * aligned byte array + * + * @param inputbytes + * @param numBits + * @return + */ + /** + * It aligns the last numBits bits to the head of the byte array following + * them with numBits % 8 zero bits. + * + * Example: For inputbytes = [00000111][01110001] and numBits = 12 it + * returns: shiftedBytes = [01110111][00010000] + * + * @param byte[] inputBytes + * @param int numBits - number of bits to be left aligned + * @return byte[] + */ + public static byte[] shiftBitsToMSB(byte[] inputBytes, int numBits) { + int numBitstoShiftBy = 0, leadZeroesMSB = 8, numEndRestBits = 0; + int size = inputBytes.length; + byte[] shiftedBytes = new byte[size]; + int i; + + for (i = 0; i < Byte.SIZE; i++) { + if (((byte) (inputBytes[0] & getMSBMask(i + 1))) != 0) { + leadZeroesMSB = i; + break; + } + } + + if (numBits % NetUtils.NumBitsInAByte == 0) { + numBitstoShiftBy = 0; + } else { + numBitstoShiftBy = ((NetUtils.NumBitsInAByte - (numBits % NetUtils.NumBitsInAByte)) < leadZeroesMSB) ? (NetUtils.NumBitsInAByte - (numBits % NetUtils.NumBitsInAByte)) + : leadZeroesMSB; + } + if (numBitstoShiftBy == 0) { + return inputBytes; + } + + if (numBits < NetUtils.NumBitsInAByte) { + // inputbytes.length = 1 OR read less than a byte + shiftedBytes[0] = (byte) ((inputBytes[0] & getLSBMask(numBits)) << numBitstoShiftBy); + } else { + // # of bits to read from last byte + numEndRestBits = NetUtils.NumBitsInAByte + - (inputBytes.length * NetUtils.NumBitsInAByte - numBits - numBitstoShiftBy); + + for (i = 0; i < (size - 1); i++) { + if ((i + 1) == (size - 1)) { + if (numEndRestBits > numBitstoShiftBy) { + shiftedBytes[i] = (byte) ((inputBytes[i] << numBitstoShiftBy) | ((inputBytes[i + 1] & getMSBMask(numBitstoShiftBy)) >> (numEndRestBits - numBitstoShiftBy))); + shiftedBytes[i + 1] = (byte) ((inputBytes[i + 1] & getLSBMask(numEndRestBits + - numBitstoShiftBy)) << numBitstoShiftBy); + } else + shiftedBytes[i] = (byte) ((inputBytes[i] << numBitstoShiftBy) | ((inputBytes[i + 1] & getMSBMask(numEndRestBits)) >> (NetUtils.NumBitsInAByte - numEndRestBits))); + } + shiftedBytes[i] = (byte) ((inputBytes[i] << numBitstoShiftBy) | (inputBytes[i + 1] & getMSBMask(numBitstoShiftBy)) >> (NetUtils.NumBitsInAByte - numBitstoShiftBy)); + } + + } + return shiftedBytes; + } + + /** + * It aligns the first numBits bits to the right end of the byte array + * preceding them with numBits % 8 zero bits. + * + * Example: For inputbytes = [01110111][00010000] and numBits = 12 it + * returns: shiftedBytes = [00000111][01110001] + * + * @param byte[] inputBytes + * @param int numBits - number of bits to be right aligned + * @return byte[] + */ + public static byte[] shiftBitsToLSB(byte[] inputBytes, int numBits) { + int numBytes = inputBytes.length; + int numBitstoShift = numBits % NetUtils.NumBitsInAByte; + byte[] shiftedBytes = new byte[numBytes]; + int inputLsb = 0, inputMsb = 0; + + if (numBitstoShift == 0) { + return inputBytes; + } + + for (int i = 1; i < numBytes; i++) { + inputLsb = inputBytes[i - 1] + & getLSBMask(NetUtils.NumBitsInAByte - numBitstoShift); + inputLsb = (inputLsb < 0) ? (inputLsb + 256) : inputLsb; + inputMsb = inputBytes[i] & getMSBMask(numBitstoShift); + inputMsb = (inputBytes[i] < 0) ? (inputBytes[i] + 256) + : inputBytes[i]; + shiftedBytes[i] = (byte) ((inputLsb << numBitstoShift) | (inputMsb >> (NetUtils.NumBitsInAByte - numBitstoShift))); + } + inputMsb = inputBytes[0] & (getMSBMask(numBitstoShift)); + inputMsb = (inputMsb < 0) ? (inputMsb + 256) : inputMsb; + shiftedBytes[0] = (byte) (inputMsb >> (NetUtils.NumBitsInAByte - numBitstoShift)); + return shiftedBytes; + } + + /** + * Insert in the data buffer at position dictated by the offset the number + * of bits specified from the input data byte array. The input byte array + * has the bits stored starting from the LSB + * + * @param byte[] data + * @param byte[] inputdata + * @param int startOffset + * @param int numBits + */ + public static void insertBits(byte[] data, byte[] inputdataLSB, + int startOffset, int numBits) { + byte[] inputdata = shiftBitsToMSB(inputdataLSB, numBits); // Align to + // MSB the + // passed byte + // array + int numBytes = numBits / NetUtils.NumBitsInAByte; + int startByteOffset = startOffset / NetUtils.NumBitsInAByte; + int extraOffsetBits = startOffset % NetUtils.NumBitsInAByte; + int extranumBits = numBits % NetUtils.NumBitsInAByte; + int RestBits = numBits % NetUtils.NumBitsInAByte; + int InputMSBbits = 0, InputLSBbits = 0; + int i; + + if (numBits == 0) { + return; + } + + if (extraOffsetBits == 0) { + if (extranumBits == 0) { + numBytes = numBits / NetUtils.NumBitsInAByte; + System.arraycopy(inputdata, 0, data, startByteOffset, numBytes); + } else { + System.arraycopy(inputdata, 0, data, startByteOffset, numBytes); + data[startByteOffset + numBytes] = (byte) (data[startByteOffset + + numBytes] | (inputdata[numBytes] & getMSBMask(extranumBits))); + } + } else { + for (i = 0; i < numBytes; i++) { + if (i != 0) + InputLSBbits = (inputdata[i - 1] & getLSBMask(extraOffsetBits)); + InputMSBbits = (byte) (inputdata[i] & (getMSBMask(NetUtils.NumBitsInAByte + - extraOffsetBits))); + InputMSBbits = (InputMSBbits >= 0) ? InputMSBbits + : InputMSBbits + 256; + data[startByteOffset + i] = (byte) (data[startByteOffset + i] + | (InputLSBbits << (NetUtils.NumBitsInAByte - extraOffsetBits)) | (InputMSBbits >> extraOffsetBits)); + InputMSBbits = InputLSBbits = 0; + } + if (RestBits < (NetUtils.NumBitsInAByte - extraOffsetBits)) { + if (numBytes != 0) + InputLSBbits = (inputdata[i - 1] & getLSBMask(extraOffsetBits)); + InputMSBbits = (byte) (inputdata[i] & (getMSBMask(RestBits))); + InputMSBbits = (InputMSBbits >= 0) ? InputMSBbits + : InputMSBbits + 256; + data[startByteOffset + i] = (byte) ((data[startByteOffset + i]) + | (InputLSBbits << (NetUtils.NumBitsInAByte - extraOffsetBits)) | (InputMSBbits >> extraOffsetBits)); + } else if (RestBits == (NetUtils.NumBitsInAByte - extraOffsetBits)) { + if (numBytes != 0) + InputLSBbits = (inputdata[i - 1] & getLSBMask(extraOffsetBits)); + InputMSBbits = (byte) (inputdata[i] & (getMSBMask(NetUtils.NumBitsInAByte + - extraOffsetBits))); + InputMSBbits = (InputMSBbits >= 0) ? InputMSBbits + : InputMSBbits + 256; + data[startByteOffset + i] = (byte) (data[startByteOffset + i] + | (InputLSBbits << (NetUtils.NumBitsInAByte - extraOffsetBits)) | (InputMSBbits >> extraOffsetBits)); + } else { + if (numBytes != 0) + InputLSBbits = (inputdata[i - 1] & getLSBMask(extraOffsetBits)); + InputMSBbits = (byte) (inputdata[i] & (getMSBMask(NetUtils.NumBitsInAByte + - extraOffsetBits))); + InputMSBbits = (InputMSBbits >= 0) ? InputMSBbits + : InputMSBbits + 256; + data[startByteOffset + i] = (byte) (data[startByteOffset + i] + | (InputLSBbits << (NetUtils.NumBitsInAByte - extraOffsetBits)) | (InputMSBbits >> extraOffsetBits)); + + InputLSBbits = (inputdata[i] & (getLSBMask(RestBits + - (NetUtils.NumBitsInAByte - extraOffsetBits)) << (NetUtils.NumBitsInAByte - RestBits))); + data[startByteOffset + i + 1] = (byte) (data[startByteOffset + + i + 1] | (InputLSBbits << (NetUtils.NumBitsInAByte - extraOffsetBits))); + } + } + } + + /** + * Checks for overflow and underflow exceptions + * @param data + * @param startOffset + * @param numBits + * @throws PacketException when the startOffset and numBits parameters + * are not congruent with the data buffer's size + */ + public static void checkExceptions(byte[] data, int startOffset, int numBits) + throws BufferException { + int endOffsetByte; + int startByteOffset; + endOffsetByte = startOffset + / NetUtils.NumBitsInAByte + + numBits + / NetUtils.NumBitsInAByte + + ((numBits % NetUtils.NumBitsInAByte != 0) ? 1 : ((startOffset + % NetUtils.NumBitsInAByte != 0) ? 1 : 0)); + startByteOffset = startOffset / NetUtils.NumBitsInAByte; + + if (data == null) { + throw new BufferException("data[] is null\n"); + } + + if ((startOffset < 0) || (startByteOffset >= data.length) + || (endOffsetByte > data.length) || (numBits < 0) + || (numBits > NetUtils.NumBitsInAByte * data.length)) { + throw new BufferException( + "Illegal arguement/out of bound exception - data.length = " + + data.length + " startOffset = " + startOffset + + " numBits " + numBits); + } + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BufferException.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BufferException.java new file mode 100644 index 0000000000..fa0848d894 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/BufferException.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.liblldp; + +/** + * Describes an exception that is raised during BitBufferHelper operations. + */ +public class BufferException extends Exception { + private static final long serialVersionUID = 1L; + + public BufferException(String message) { + super(message); + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/ConstructionException.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/ConstructionException.java new file mode 100644 index 0000000000..8b1d9d2d0f --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/ConstructionException.java @@ -0,0 +1,28 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +/** + * @file ConstructionException.java + * + * + * @brief Describe an exception that is raised when a construction + * for a Node/NodeConnector/Edge or any of the SAL basic object fails + * because input passed are not valid or compatible + * + * + */ +package org.opendaylight.controller.liblldp; + +public class ConstructionException extends Exception { + private static final long serialVersionUID = 1L; + + public ConstructionException(String message) { + super(message); + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/DataLinkAddress.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/DataLinkAddress.java new file mode 100644 index 0000000000..d617c05a5a --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/DataLinkAddress.java @@ -0,0 +1,96 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.io.Serializable; + +import javax.xml.bind.annotation.XmlRootElement; + +/** + * @file DataLinkAddress.java + * + * @brief Abstract base class for a Datalink Address + * + */ + +/** + * Abstract base class for a Datalink Address + * + */ +@XmlRootElement +abstract public class DataLinkAddress implements Serializable { + private static final long serialVersionUID = 1L; + private String name; + + public DataLinkAddress() { + + } + + /** + * Constructor of super class + * + * @param name Create a new DataLink, not for general use but + * available only for sub classes + * + * @return constructed object + */ + protected DataLinkAddress(String name) { + this.name = name; + } + + /** + * Used to copy the DataLinkAddress in a polymorphic way + * + * + * @return A clone of this DataLinkAddress + */ + @Override + abstract public DataLinkAddress clone(); + + /** + * Allow to distinguish among different data link addresses + * + * + * @return Name of the DataLinkAdress we are working on + */ + public String getName() { + return this.name; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DataLinkAddress other = (DataLinkAddress) obj; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + return true; + } + + @Override + public String toString() { + return "DataLinkAddress [name=" + name + "]"; + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EtherTypes.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EtherTypes.java new file mode 100644 index 0000000000..876d495899 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EtherTypes.java @@ -0,0 +1,117 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.util.ArrayList; +import java.util.List; + +/** + * The enum contains the most common 802.3 ethernet types and 802.2 + SNAP protocol ids + * + * + * + */ +public enum EtherTypes { + PVSTP("PVSTP", 0x010B), // 802.2 + SNAP (Spanning Tree) + CDP("CDP", 0x2000), // 802.2 + SNAP + VTP("VTP", 0x2003), // 802.2 + SNAP + IPv4("IPv4", 0x800), ARP("ARP", 0x806), RARP("Reverse ARP", 0x8035), VLANTAGGED( + "VLAN Tagged", 0x8100), // 802.1Q + IPv6("IPv6", 0x86DD), MPLSUCAST("MPLS Unicast", 0x8847), MPLSMCAST( + "MPLS Multicast", 0x8848), QINQ("QINQ", 0x88A8), // Standard 802.1ad QinQ + LLDP("LLDP", 0x88CC), OLDQINQ("Old QINQ", 0x9100), // Old non-standard QinQ + CISCOQINQ("Cisco QINQ", 0x9200); // Cisco non-standard QinQ + + private static final String regexNumberString = "^[0-9]+$"; + private String description; + private int number; + + private EtherTypes(String description, int number) { + this.description = description; + this.number = number; + } + + public String toString() { + return description; + } + + public int intValue() { + return number; + } + + public short shortValue() { + return ((Integer) number).shortValue(); + } + + public static String getEtherTypeName(int number) { + return getEtherTypeInternal(number); + } + + public static String getEtherTypeName(short number) { + return getEtherTypeInternal((int) number & 0xffff); + } + + public static String getEtherTypeName(byte number) { + return getEtherTypeInternal((int) number & 0xff); + } + + private static String getEtherTypeInternal(int number) { + for (EtherTypes type : EtherTypes.values()) { + if (type.number == number) { + return type.toString(); + } + } + return "0x" + Integer.toHexString(number); + } + + public static short getEtherTypeNumberShort(String name) { + if (name.matches(regexNumberString)) { + return Short.valueOf(name); + } + for (EtherTypes type : EtherTypes.values()) { + if (type.description.equalsIgnoreCase(name)) { + return type.shortValue(); + } + } + return 0; + } + + public static int getEtherTypeNumberInt(String name) { + if (name.matches(regexNumberString)) { + return Integer.valueOf(name); + } + for (EtherTypes type : EtherTypes.values()) { + if (type.description.equalsIgnoreCase(name)) { + return type.intValue(); + } + } + return 0; + } + + public static List getEtherTypesNameList() { + List ethertypesList = new ArrayList(); + for (EtherTypes type : EtherTypes.values()) { + ethertypesList.add(type.toString()); + } + return ethertypesList; + } + + public static EtherTypes loadFromString(String string) { + int intType = Integer.parseInt(string); + + for (EtherTypes type : EtherTypes.values()) { + if (type.number == intType) { + return type; + } + } + return null; + } + +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Ethernet.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Ethernet.java new file mode 100644 index 0000000000..54452bb9a4 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Ethernet.java @@ -0,0 +1,134 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; + +/** + * Class that represents the Ethernet frame objects + */ +public class Ethernet extends Packet { + private static final String DMAC = "DestinationMACAddress"; + private static final String SMAC = "SourceMACAddress"; + private static final String ETHT = "EtherType"; + + // TODO: This has to be outside and it should be possible for osgi + // to add new coming packet classes + public static final Map> etherTypeClassMap; + static { + etherTypeClassMap = new HashMap>(); + etherTypeClassMap.put(EtherTypes.LLDP.shortValue(), LLDP.class); + } + private static Map> fieldCoordinates = new LinkedHashMap>() { + private static final long serialVersionUID = 1L; + { + put(DMAC, new ImmutablePair(0, 48)); + put(SMAC, new ImmutablePair(48, 48)); + put(ETHT, new ImmutablePair(96, 16)); + } + }; + private final Map fieldValues; + + /** + * Default constructor that creates and sets the HashMap + */ + public Ethernet() { + super(); + fieldValues = new HashMap(); + hdrFieldCoordMap = fieldCoordinates; + hdrFieldsMap = fieldValues; + } + + /** + * Constructor that sets the access level for the packet and + * creates and sets the HashMap + */ + public Ethernet(boolean writeAccess) { + super(writeAccess); + fieldValues = new HashMap(); + hdrFieldCoordMap = fieldCoordinates; + hdrFieldsMap = fieldValues; + } + + @Override + public void setHeaderField(String headerField, byte[] readValue) { + if (headerField.equals(ETHT)) { + payloadClass = etherTypeClassMap.get(BitBufferHelper + .getShort(readValue)); + } + hdrFieldsMap.put(headerField, readValue); + } + + /** + * Gets the destination MAC address stored + * @return byte[] - the destinationMACAddress + */ + public byte[] getDestinationMACAddress() { + return fieldValues.get(DMAC); + } + + /** + * Gets the source MAC address stored + * @return byte[] - the sourceMACAddress + */ + public byte[] getSourceMACAddress() { + return fieldValues.get(SMAC); + } + + /** + * Gets the etherType stored + * @return short - the etherType + */ + public short getEtherType() { + return BitBufferHelper.getShort(fieldValues.get(ETHT)); + } + + public boolean isBroadcast(){ + return NetUtils.isBroadcastMACAddr(getDestinationMACAddress()); + } + + public boolean isMulticast(){ + return NetUtils.isMulticastMACAddr(getDestinationMACAddress()); + } + + /** + * Sets the destination MAC address for the current Ethernet object instance + * @param byte[] - the destinationMACAddress to set + */ + public Ethernet setDestinationMACAddress(byte[] destinationMACAddress) { + fieldValues.put(DMAC, destinationMACAddress); + return this; + } + + /** + * Sets the source MAC address for the current Ethernet object instance + * @param byte[] - the sourceMACAddress to set + */ + public Ethernet setSourceMACAddress(byte[] sourceMACAddress) { + fieldValues.put(SMAC, sourceMACAddress); + return this; + } + + /** + * Sets the etherType for the current Ethernet object instance + * @param short - the etherType to set + */ + public Ethernet setEtherType(short etherType) { + byte[] ethType = BitBufferHelper.toByteArray(etherType); + fieldValues.put(ETHT, ethType); + return this; + } + +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EthernetAddress.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EthernetAddress.java new file mode 100644 index 0000000000..b7b72cbffd --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/EthernetAddress.java @@ -0,0 +1,124 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.util.Arrays; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import javax.xml.bind.annotation.XmlTransient; + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) +public class EthernetAddress extends DataLinkAddress { + private static final long serialVersionUID = 1L; + @XmlTransient + private byte[] macAddress; + + public static final EthernetAddress BROADCASTMAC = createWellKnownAddress(new byte[] { + (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, + (byte) 0xff }); + + public static final EthernetAddress INVALIDHOST = BROADCASTMAC; + + public static final String addressName = "Ethernet MAC Address"; + public static final int SIZE = 6; + + private static final EthernetAddress createWellKnownAddress(byte[] mac) { + try { + return new EthernetAddress(mac); + } catch (ConstructionException ce) { + return null; + } + } + + /* Private constructor to satisfy JAXB */ + @SuppressWarnings("unused") + private EthernetAddress() { + } + + /** + * Public constructor for an Ethernet MAC address starting from + * the byte constituing the address, the constructor validate the + * size of the arrive to make sure it met the expected size + * + * @param macAddress A byte array in big endian format + * representing the Ethernet MAC Address + * + * @return The constructed object if valid + */ + public EthernetAddress(byte[] macAddress) throws ConstructionException { + super(addressName); + + if (macAddress == null) { + throw new ConstructionException("Null input parameter passed"); + } + + if (macAddress.length != SIZE) { + throw new ConstructionException( + "Wrong size of passed byte array, expected:" + SIZE + + " got:" + macAddress.length); + } + this.macAddress = new byte[SIZE]; + System.arraycopy(macAddress, 0, this.macAddress, 0, SIZE); + } + + public EthernetAddress clone() { + try { + return new EthernetAddress(this.macAddress.clone()); + } catch (ConstructionException ce) { + return null; + } + } + + /** + * Return the Ethernet Mac address in byte array format + * + * @return The Ethernet Mac address in byte array format + */ + public byte[] getValue() { + return this.macAddress; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + Arrays.hashCode(macAddress); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (getClass() != obj.getClass()) + return false; + EthernetAddress other = (EthernetAddress) obj; + if (!Arrays.equals(macAddress, other.macAddress)) + return false; + return true; + } + + @Override + public String toString() { + return "EthernetAddress [macAddress=" + HexEncode.bytesToHexStringFormat(macAddress) + + "]"; + } + + @XmlElement(name = "macAddress") + public String getMacAddress() { + return HexEncode.bytesToHexStringFormat(macAddress); + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/HexEncode.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/HexEncode.java new file mode 100644 index 0000000000..8236d4c3c9 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/HexEncode.java @@ -0,0 +1,114 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.math.BigInteger; + +/** + * The class provides methods to convert hex encode strings + * + * + */ +public class HexEncode { + /** + * This method converts byte array into String format without ":" inserted. + * + * @param bytes + * The byte array to convert to string + * @return The hexadecimal representation of the byte array. If bytes is + * null, "null" string is returned + */ + public static String bytesToHexString(byte[] bytes) { + + if (bytes == null) { + return "null"; + } + + String ret = ""; + StringBuffer buf = new StringBuffer(); + for (int i = 0; i < bytes.length; i++) { + if (i > 0) { + ret += ":"; + } + short u8byte = (short) (bytes[i] & 0xff); + String tmp = Integer.toHexString(u8byte); + if (tmp.length() == 1) { + buf.append("0"); + } + buf.append(tmp); + } + ret = buf.toString(); + return ret; + } + + public static String longToHexString(long val) { + char arr[] = Long.toHexString(val).toCharArray(); + StringBuffer buf = new StringBuffer(); + // prepend the right number of leading zeros + int i = 0; + for (; i < (16 - arr.length); i++) { + buf.append("0"); + if ((i & 0x01) == 1) { + buf.append(":"); + } + } + for (int j = 0; j < arr.length; j++) { + buf.append(arr[j]); + if ((((i + j) & 0x01) == 1) && (j < (arr.length - 1))) { + buf.append(":"); + } + } + return buf.toString(); + } + + + public static byte[] bytesFromHexString(String values) { + String target = ""; + if (values != null) { + target = values; + } + String[] octets = target.split(":"); + + byte[] ret = new byte[octets.length]; + for (int i = 0; i < octets.length; i++) { + ret[i] = Integer.valueOf(octets[i], 16).byteValue(); + } + return ret; + } + + public static long stringToLong(String values) { + long value = new BigInteger(values.replaceAll(":", ""), 16).longValue(); + return value; + } + + /** + * This method converts byte array into HexString format with ":" inserted. + */ + public static String bytesToHexStringFormat(byte[] bytes) { + if (bytes == null) { + return "null"; + } + String ret = ""; + StringBuffer buf = new StringBuffer(); + for (int i = 0; i < bytes.length; i++) { + if (i > 0) { + buf.append(":"); + } + short u8byte = (short) (bytes[i] & 0xff); + String tmp = Integer.toHexString(u8byte); + if (tmp.length() == 1) { + buf.append("0"); + } + buf.append(tmp); + } + ret = buf.toString(); + return ret; + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDP.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDP.java new file mode 100644 index 0000000000..9b7efbb1e6 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDP.java @@ -0,0 +1,259 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * Class that represents the LLDP frame objects + */ + +public class LLDP extends Packet { + private static final String CHASSISID = "ChassisId"; + private static final String SYSTEMNAMEID = "SystemNameID"; + private static final String PORTID = "PortId"; + private static final String TTL = "TTL"; + private static final int LLDPDefaultTlvs = 4; + private static LLDPTLV emptyTLV = new LLDPTLV().setLength((short) 0) + .setType((byte) 0); + public static final byte[] LLDPMulticastMac = { 1, (byte) 0x80, + (byte) 0xc2, 0, 0, (byte) 0xe }; + private Map tlvList; + + /** + * Default constructor that creates the tlvList LinkedHashMap + */ + public LLDP() { + super(); + tlvList = new LinkedHashMap(LLDPDefaultTlvs); + } + + /** + * Constructor that creates the tlvList LinkedHashMap and sets the write + * access for the same + */ + public LLDP(boolean writeAccess) { + super(writeAccess); + tlvList = new LinkedHashMap(LLDPDefaultTlvs); // Mandatory + // TLVs + } + + /** + * @param String + * - description of the type of TLV + * @return byte - type of TLV + */ + private byte getType(String typeDesc) { + if (typeDesc.equals(CHASSISID)) { + return LLDPTLV.TLVType.ChassisID.getValue(); + } else if (typeDesc.equals(PORTID)) { + return LLDPTLV.TLVType.PortID.getValue(); + } else if (typeDesc.equals(TTL)) { + return LLDPTLV.TLVType.TTL.getValue(); + } else { + return LLDPTLV.TLVType.Unknown.getValue(); + } + } + + /** + * @param String + * - description of the type of TLV + * @return LLDPTLV - full TLV + */ + public LLDPTLV getTLV(String type) { + return tlvList.get(getType(type)); + } + + /** + * @param String + * - description of the type of TLV + * @param LLDPTLV + * - tlv to set + * @return void + */ + public void setTLV(String type, LLDPTLV tlv) { + tlvList.put(getType(type), tlv); + } + + /** + * @return the chassisId TLV + */ + public LLDPTLV getChassisId() { + return getTLV(CHASSISID); + } + + /** + * @param LLDPTLV + * - the chassisId to set + */ + public LLDP setChassisId(LLDPTLV chassisId) { + tlvList.put(getType(CHASSISID), chassisId); + return this; + } + + /** + * @return the SystemName TLV + */ + public LLDPTLV getSystemNameId() { + return getTLV(SYSTEMNAMEID); + } + + /** + * @param LLDPTLV + * - the chassisId to set + */ + public LLDP setSystemNameId(LLDPTLV systemNameId) { + tlvList.put(getType(SYSTEMNAMEID), systemNameId); + return this; + } + + /** + * @return LLDPTLV - the portId TLV + */ + public LLDPTLV getPortId() { + return tlvList.get(getType(PORTID)); + } + + /** + * @param LLDPTLV + * - the portId to set + * @return LLDP + */ + public LLDP setPortId(LLDPTLV portId) { + tlvList.put(getType(PORTID), portId); + return this; + } + + /** + * @return LLDPTLV - the ttl TLV + */ + public LLDPTLV getTtl() { + return tlvList.get(getType(TTL)); + } + + /** + * @param LLDPTLV + * - the ttl to set + * @return LLDP + */ + public LLDP setTtl(LLDPTLV ttl) { + tlvList.put(getType(TTL), ttl); + return this; + } + + /** + * @return the optionalTLVList + */ + public List getOptionalTLVList() { + List list = new ArrayList(); + for (Map.Entry entry : tlvList.entrySet()) { + byte type = entry.getKey(); + if ((type == LLDPTLV.TLVType.ChassisID.getValue()) + || (type == LLDPTLV.TLVType.PortID.getValue()) + || (type == LLDPTLV.TLVType.TTL.getValue())) { + continue; + } else { + list.add(entry.getValue()); + } + } + return list; + } + + /** + * @param optionalTLVList + * the optionalTLVList to set + * @return LLDP + */ + public LLDP setOptionalTLVList(List optionalTLVList) { + for (LLDPTLV tlv : optionalTLVList) { + tlvList.put(tlv.getType(), tlv); + } + return this; + } + + @Override + public Packet deserialize(byte[] data, int bitOffset, int size) + throws PacketException { + int lldpOffset = bitOffset; // LLDP start + int lldpSize = size; // LLDP size + + if (logger.isTraceEnabled()) { + logger.trace("LLDP: {} (offset {} bitsize {})", new Object[] { + HexEncode.bytesToHexString(data), lldpOffset, lldpSize }); + } + /* + * Deserialize the TLVs until we reach the end of the packet + */ + while (lldpSize > 0) { + LLDPTLV tlv = new LLDPTLV(); + tlv.deserialize(data, lldpOffset, lldpSize); + if (tlv.getType() == 0 && tlv.getLength() == 0) { + break; + } + int tlvSize = tlv.getTLVSize(); // Size of current TLV in bits + lldpOffset += tlvSize; + lldpSize -= tlvSize; + this.tlvList.put(tlv.getType(), tlv); + } + return this; + } + + @Override + public byte[] serialize() throws PacketException { + int startOffset = 0; + byte[] serializedBytes = new byte[getLLDPPacketLength()]; + + for (Map.Entry entry : tlvList.entrySet()) { + LLDPTLV tlv = entry.getValue(); + int numBits = tlv.getTLVSize(); + try { + BitBufferHelper.setBytes(serializedBytes, tlv.serialize(), + startOffset, numBits); + } catch (BufferException e) { + throw new PacketException(e.getMessage()); + } + startOffset += numBits; + } + // Now add the empty LLDPTLV at the end + try { + BitBufferHelper.setBytes(serializedBytes, + LLDP.emptyTLV.serialize(), startOffset, + LLDP.emptyTLV.getTLVSize()); + } catch (BufferException e) { + throw new PacketException(e.getMessage()); + } + + if (logger.isTraceEnabled()) { + logger.trace("LLDP: serialized: {}", + HexEncode.bytesToHexString(serializedBytes)); + } + return serializedBytes; + } + + /** + * Returns the size of LLDP packet in bytes + * + * @return int - LLDP Packet size in bytes + */ + private int getLLDPPacketLength() { + int len = 0; + LLDPTLV tlv; + + for (Map.Entry entry : this.tlvList.entrySet()) { + tlv = entry.getValue(); + len += tlv.getTLVSize(); + } + len += LLDP.emptyTLV.getTLVSize(); + + return len / NetUtils.NumBitsInAByte; + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDPTLV.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDPTLV.java new file mode 100644 index 0000000000..22bd4626d1 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/LLDPTLV.java @@ -0,0 +1,337 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.io.UnsupportedEncodingException; +import java.nio.charset.Charset; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.commons.lang3.tuple.MutablePair; +import org.apache.commons.lang3.tuple.Pair; + +/** + * Class that represents the LLDPTLV objects + */ + +public class LLDPTLV extends Packet { + private static final String TYPE = "Type"; + private static final String LENGTH = "Length"; + private static final String VALUE = "Value"; + private static final int LLDPTLVFields = 3; + public static final byte[] OFOUI = new byte[] { (byte) 0x00, (byte) 0x26, + (byte) 0xe1 }; // OpenFlow OUI + public static final byte[] customTlvSubType = new byte[] { 0 }; + public static final int customTlvOffset = OFOUI.length + + customTlvSubType.length; + public static final byte chassisIDSubType[] = new byte[] { 4 }; // MAC address for the system + public static final byte portIDSubType[] = new byte[] { 7 }; // locally assigned + + public enum TLVType { + Unknown((byte) 0), ChassisID((byte) 1), PortID((byte) 2), TTL((byte) 3), PortDesc( + (byte) 4), SystemName((byte) 5), SystemDesc((byte) 6), Custom( + (byte) 127); + + private byte value; + + private TLVType(byte value) { + this.value = value; + } + + public byte getValue() { + return value; + } + } + + private static Map> fieldCoordinates = new LinkedHashMap>() { + private static final long serialVersionUID = 1L; + + { + put(TYPE, new MutablePair(0, 7)); + put(LENGTH, new MutablePair(7, 9)); + put(VALUE, new MutablePair(16, 0)); + } + }; + + protected Map fieldValues; + + /** + * Default constructor that creates and sets the hash map values and sets + * the payload to null + */ + public LLDPTLV() { + payload = null; + fieldValues = new HashMap(LLDPTLVFields); + hdrFieldCoordMap = fieldCoordinates; + hdrFieldsMap = fieldValues; + } + + /** + * Constructor that writes the passed LLDPTLV values to the hdrFieldsMap + */ + public LLDPTLV(LLDPTLV other) { + for (Map.Entry entry : other.hdrFieldsMap.entrySet()) { + this.hdrFieldsMap.put(entry.getKey(), entry.getValue()); + } + } + + /** + * @return int - the length of TLV + */ + public int getLength() { + return (int) BitBufferHelper.toNumber(fieldValues.get(LENGTH), + fieldCoordinates.get(LENGTH).getRight().intValue()); + } + + /** + * @return byte - the type of TLV + */ + public byte getType() { + return BitBufferHelper.getByte(fieldValues.get(TYPE)); + } + + /** + * @return byte[] - the value field of TLV + */ + public byte[] getValue() { + return fieldValues.get(VALUE); + } + + /** + * @param byte - the type to set + * @return LLDPTLV + */ + public LLDPTLV setType(byte type) { + byte[] lldpTLVtype = { type }; + fieldValues.put(TYPE, lldpTLVtype); + return this; + } + + /** + * @param short - the length to set + * @return LLDPTLV + */ + public LLDPTLV setLength(short length) { + fieldValues.put(LENGTH, BitBufferHelper.toByteArray(length)); + return this; + } + + /** + * @param byte[] - the value to set + * @return LLDPTLV + */ + public LLDPTLV setValue(byte[] value) { + fieldValues.put(VALUE, value); + return this; + } + + @Override + public void setHeaderField(String headerField, byte[] readValue) { + hdrFieldsMap.put(headerField, readValue); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + + ((fieldValues == null) ? 0 : fieldValues.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!super.equals(obj)) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + LLDPTLV other = (LLDPTLV) obj; + if (fieldValues == null) { + if (other.fieldValues != null) { + return false; + } + } else if (!fieldValues.equals(other.fieldValues)) { + return false; + } + return true; + } + + @Override + public int getfieldnumBits(String fieldName) { + if (fieldName.equals(VALUE)) { + return (NetUtils.NumBitsInAByte * BitBufferHelper.getShort( + fieldValues.get(LENGTH), fieldCoordinates.get(LENGTH) + .getRight().intValue())); + } + return fieldCoordinates.get(fieldName).getRight(); + } + + /** + * Returns the size in bits of the whole TLV + * + * @return int - size in bits of full TLV + */ + public int getTLVSize() { + return (LLDPTLV.fieldCoordinates.get(TYPE).getRight() + // static + LLDPTLV.fieldCoordinates.get(LENGTH).getRight() + // static + getfieldnumBits(VALUE)); // variable + } + + /** + * Creates the SystemName TLV value + * + * @param nodeId + * node identifier string + * @return the SystemName TLV value in byte array + */ + static public byte[] createSystemNameTLVValue(String nodeId) { + byte[] nid = nodeId.getBytes(); + return nid; + } + + /** + * Creates the ChassisID TLV value including the subtype and ChassisID + * string + * + * @param nodeId + * node identifier string + * @return the ChassisID TLV value in byte array + */ + static public byte[] createChassisIDTLVValue(String nodeId) { + byte[] nid = HexEncode.bytesFromHexString(nodeId); + byte[] cid = new byte[6]; + int srcPos = 0, dstPos = 0; + + if (nid.length > cid.length) { + srcPos = nid.length - cid.length; + } else { + dstPos = cid.length - nid.length; + } + System.arraycopy(nid, srcPos, cid, dstPos, cid.length); + + byte[] cidValue = new byte[cid.length + chassisIDSubType.length]; + + System.arraycopy(chassisIDSubType, 0, cidValue, 0, + chassisIDSubType.length); + System.arraycopy(cid, 0, cidValue, chassisIDSubType.length, cid.length); + + return cidValue; + } + + /** + * Creates the PortID TLV value including the subtype and PortID string + * + * @param portId + * port identifier string + * @return the PortID TLV value in byte array + */ + static public byte[] createPortIDTLVValue(String portId) { + byte[] pid = portId.getBytes(Charset.defaultCharset()); + byte[] pidValue = new byte[pid.length + portIDSubType.length]; + + System.arraycopy(portIDSubType, 0, pidValue, 0, portIDSubType.length); + System.arraycopy(pid, 0, pidValue, portIDSubType.length, pid.length); + + return pidValue; + } + + /** + * Creates the custom TLV value including OUI, subtype and custom string + * + * @param portId + * port identifier string + * @return the custom TLV value in byte array + */ + static public byte[] createCustomTLVValue(String customString) { + byte[] customArray = customString.getBytes(Charset.defaultCharset()); + byte[] customValue = new byte[customTlvOffset + customArray.length]; + + System.arraycopy(OFOUI, 0, customValue, 0, OFOUI.length); + System.arraycopy(customTlvSubType, 0, customValue, OFOUI.length, + customTlvSubType.length); + System.arraycopy(customArray, 0, customValue, customTlvOffset, + customArray.length); + + return customValue; + } + + /** + * Retrieves the string from TLV value and returns it in HexString format + * + * @param tlvValue + * the TLV value + * @param tlvLen + * the TLV length + * @return the HexString + */ + static public String getHexStringValue(byte[] tlvValue, int tlvLen) { + byte[] cidBytes = new byte[tlvLen - chassisIDSubType.length]; + System.arraycopy(tlvValue, chassisIDSubType.length, cidBytes, 0, + cidBytes.length); + return HexEncode.bytesToHexStringFormat(cidBytes); + } + + /** + * Retrieves the string from TLV value + * + * @param tlvValue + * the TLV value + * @param tlvLen + * the TLV length + * @return the string + */ + static public String getStringValue(byte[] tlvValue, int tlvLen) { + byte[] pidSubType = new byte[portIDSubType.length]; + byte[] pidBytes = new byte[tlvLen - portIDSubType.length]; + System.arraycopy(tlvValue, 0, pidSubType, 0, + pidSubType.length); + System.arraycopy(tlvValue, portIDSubType.length, pidBytes, 0, + pidBytes.length); + if (pidSubType[0] == (byte) 0x3) { + return HexEncode.bytesToHexStringFormat(pidBytes); + } else { + return (new String(pidBytes, Charset.defaultCharset())); + } + } + + /** + * Retrieves the custom string from the Custom TLV value which includes OUI, + * subtype and custom string + * + * @param customTlvValue + * the custom TLV value + * @param customTlvLen + * the custom TLV length + * @return the custom string + */ + static public String getCustomString(byte[] customTlvValue, int customTlvLen) { + String customString = ""; + byte[] vendor = new byte[3]; + System.arraycopy(customTlvValue, 0, vendor, 0, vendor.length); + if (Arrays.equals(vendor, LLDPTLV.OFOUI)) { + int customArrayLength = customTlvLen - customTlvOffset; + byte[] customArray = new byte[customArrayLength]; + System.arraycopy(customTlvValue, customTlvOffset, customArray, 0, + customArrayLength); + try { + customString = new String(customArray, "UTF-8"); + } catch (UnsupportedEncodingException e) { + } + } + + return customString; + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/NetUtils.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/NetUtils.java new file mode 100644 index 0000000000..0320cf6b47 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/NetUtils.java @@ -0,0 +1,521 @@ +/* + * Copyright (c) 2013-2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.net.Inet4Address; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Utility class containing the common utility functions needed for operating on + * networking data structures + */ +public abstract class NetUtils { + protected static final Logger logger = LoggerFactory.getLogger(NetUtils.class); + /** + * Constant holding the number of bits in a byte + */ + public static final int NumBitsInAByte = 8; + + /** + * Constant holding the number of bytes in MAC Address + */ + public static final int MACAddrLengthInBytes = 6; + + /** + * Constant holding the number of words in MAC Address + */ + public static final int MACAddrLengthInWords = 3; + + /** + * Constant holding the broadcast MAC address + */ + private static final byte[] BroadcastMACAddr = {-1, -1, -1, -1, -1, -1}; + + /** + * Converts a 4 bytes array into an integer number + * + * @param ba + * the 4 bytes long byte array + * @return the integer number + */ + public static int byteArray4ToInt(byte[] ba) { + if (ba == null || ba.length != 4) { + return 0; + } + return (0xff & ba[0]) << 24 | (0xff & ba[1]) << 16 | (0xff & ba[2]) << 8 | (0xff & ba[3]); + } + + /** + * Converts a 6 bytes array into a long number MAC addresses. + * + * @param ba + * The 6 bytes long byte array. + * @return The long number. + * Zero is returned if {@code ba} is {@code null} or + * the length of it is not six. + */ + public static long byteArray6ToLong(byte[] ba) { + if (ba == null || ba.length != MACAddrLengthInBytes) { + return 0L; + } + long num = 0L; + int i = 0; + do { + num <<= NumBitsInAByte; + num |= 0xff & ba[i]; + i++; + } while (i < MACAddrLengthInBytes); + return num; + } + + /** + * Converts a long number to a 6 bytes array for MAC addresses. + * + * @param addr + * The long number. + * @return The byte array. + */ + public static byte[] longToByteArray6(long addr){ + byte[] mac = new byte[MACAddrLengthInBytes]; + int i = MACAddrLengthInBytes - 1; + do { + mac[i] = (byte) addr; + addr >>>= NumBitsInAByte; + i--; + } while (i >= 0); + return mac; + } + + /** + * Converts an integer number into a 4 bytes array + * + * @param i + * the integer number + * @return the byte array + */ + public static byte[] intToByteArray4(int i) { + return new byte[] { (byte) ((i >> 24) & 0xff), (byte) ((i >> 16) & 0xff), (byte) ((i >> 8) & 0xff), + (byte) (i & 0xff) }; + } + + /** + * Converts an IP address passed as integer value into the respective + * InetAddress object + * + * @param address + * the IP address in integer form + * @return the IP address in InetAddress form + */ + public static InetAddress getInetAddress(int address) { + InetAddress ip = null; + try { + ip = InetAddress.getByAddress(NetUtils.intToByteArray4(address)); + } catch (UnknownHostException e) { + logger.error("", e); + } + return ip; + } + + /** + * Return the InetAddress Network Mask given the length of the prefix bit + * mask. The prefix bit mask indicates the contiguous leading bits that are + * NOT masked out. Example: A prefix bit mask length of 8 will give an + * InetAddress Network Mask of 255.0.0.0 + * + * @param prefixMaskLength + * integer representing the length of the prefix network mask + * @param isV6 + * boolean representing the IP version of the returned address + * @return + */ + public static InetAddress getInetNetworkMask(int prefixMaskLength, boolean isV6) { + if (prefixMaskLength < 0 || (!isV6 && prefixMaskLength > 32) || (isV6 && prefixMaskLength > 128)) { + return null; + } + byte v4Address[] = { 0, 0, 0, 0 }; + byte v6Address[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; + byte address[] = (isV6) ? v6Address : v4Address; + int numBytes = prefixMaskLength / 8; + int numBits = prefixMaskLength % 8; + int i = 0; + for (; i < numBytes; i++) { + address[i] = (byte) 0xff; + } + if (numBits > 0) { + int rem = 0; + for (int j = 0; j < numBits; j++) { + rem |= 1 << (7 - j); + } + address[i] = (byte) rem; + } + + try { + return InetAddress.getByAddress(address); + } catch (UnknownHostException e) { + logger.error("", e); + } + return null; + } + + /** + * Returns the prefix size in bits of the specified subnet mask. Example: + * For the subnet mask ff.ff.ff.e0 it returns 25 while for ff.00.00.00 it + * returns 8. If the passed subnetMask array is null, 0 is returned. + * + * @param subnetMask + * the subnet mask as byte array + * @return the prefix length as number of bits + */ + public static int getSubnetMaskLength(byte[] subnetMask) { + int maskLength = 0; + if (subnetMask != null && (subnetMask.length == 4 || subnetMask.length == 16)) { + int index = 0; + while (index < subnetMask.length && subnetMask[index] == (byte) 0xFF) { + maskLength += NetUtils.NumBitsInAByte; + index++; + } + if (index != subnetMask.length) { + int bits = NetUtils.NumBitsInAByte - 1; + while (bits >= 0 && (subnetMask[index] & 1 << bits) != 0) { + bits--; + maskLength++; + } + } + } + return maskLength; + } + + /** + * Returns the prefix size in bits of the specified subnet mask. Example: + * For the subnet mask 255.255.255.128 it returns 25 while for 255.0.0.0 it + * returns 8. If the passed subnetMask object is null, 0 is returned + * + * @param subnetMask + * the subnet mask as InetAddress + * @return the prefix length as number of bits + */ + public static int getSubnetMaskLength(InetAddress subnetMask) { + return subnetMask == null ? 0 : NetUtils.getSubnetMaskLength(subnetMask.getAddress()); + } + + /** + * Given an IP address and a prefix network mask length, it returns the + * equivalent subnet prefix IP address Example: for ip = "172.28.30.254" and + * maskLen = 25 it will return "172.28.30.128" + * + * @param ip + * the IP address in InetAddress form + * @param maskLen + * the length of the prefix network mask + * @return the subnet prefix IP address in InetAddress form + */ + public static InetAddress getSubnetPrefix(InetAddress ip, int maskLen) { + int bytes = maskLen / 8; + int bits = maskLen % 8; + byte modifiedByte; + byte[] sn = ip.getAddress(); + if (bits > 0) { + modifiedByte = (byte) (sn[bytes] >> (8 - bits)); + sn[bytes] = (byte) (modifiedByte << (8 - bits)); + bytes++; + } + for (; bytes < sn.length; bytes++) { + sn[bytes] = (byte) (0); + } + try { + return InetAddress.getByAddress(sn); + } catch (UnknownHostException e) { + return null; + } + } + + /** + * Checks if the test address and mask conflicts with the filter address and + * mask + * + * For example: + * testAddress: 172.28.2.23 + * testMask: 255.255.255.0 + * filterAddress: 172.28.1.10 + * testMask: 255.255.255.0 + * do conflict + * + * testAddress: 172.28.2.23 + * testMask: 255.255.255.0 + * filterAddress: 172.28.1.10 + * testMask: 255.255.0.0 + * do not conflict + * + * Null parameters are permitted + * + * @param testAddress + * @param filterAddress + * @param testMask + * @param filterMask + * @return + */ + public static boolean inetAddressConflict(InetAddress testAddress, InetAddress filterAddress, InetAddress testMask, + InetAddress filterMask) { + // Sanity check + if ((testAddress == null) || (filterAddress == null)) { + return false; + } + + // Presence check + if (isAny(testAddress) || isAny(filterAddress)) { + return false; + } + + int testMaskLen = (testMask == null) ? ((testAddress instanceof Inet4Address) ? 32 : 128) : NetUtils + .getSubnetMaskLength(testMask); + int filterMaskLen = (filterMask == null) ? ((testAddress instanceof Inet4Address) ? 32 : 128) : NetUtils + .getSubnetMaskLength(filterMask); + + // Mask length check. Test mask has to be more specific than filter one + if (testMaskLen < filterMaskLen) { + return true; + } + + // Subnet Prefix on filter mask length must be the same + InetAddress prefix1 = getSubnetPrefix(testAddress, filterMaskLen); + InetAddress prefix2 = getSubnetPrefix(filterAddress, filterMaskLen); + return (!prefix1.equals(prefix2)); + } + + /** + * Returns true if the passed MAC address is all zero + * + * @param mac + * the byte array representing the MAC address + * @return true if all MAC bytes are zero + */ + public static boolean isZeroMAC(byte[] mac) { + for (short i = 0; i < 6; i++) { + if (mac[i] != 0) { + return false; + } + } + return true; + } + + /** + * Returns true if the MAC address is the broadcast MAC address and false + * otherwise. + * + * @param MACAddress + * @return + */ + public static boolean isBroadcastMACAddr(byte[] MACAddress) { + if (MACAddress.length == MACAddrLengthInBytes) { + for (int i = 0; i < 6; i++) { + if (MACAddress[i] != BroadcastMACAddr[i]) { + return false; + } + } + return true; + } + + return false; + } + /** + * Returns true if the MAC address is a unicast MAC address and false + * otherwise. + * + * @param MACAddress + * @return + */ + public static boolean isUnicastMACAddr(byte[] MACAddress) { + if (MACAddress.length == MACAddrLengthInBytes) { + return (MACAddress[0] & 1) == 0; + } + return false; + } + + /** + * Returns true if the MAC address is a multicast MAC address and false + * otherwise. Note that this explicitly returns false for the broadcast MAC + * address. + * + * @param MACAddress + * @return + */ + public static boolean isMulticastMACAddr(byte[] MACAddress) { + if (MACAddress.length == MACAddrLengthInBytes && !isBroadcastMACAddr(MACAddress)) { + return (MACAddress[0] & 1) != 0; + } + return false; + } + + /** + * Returns true if the passed InetAddress contains all zero + * + * @param ip + * the IP address to test + * @return true if the address is all zero + */ + public static boolean isAny(InetAddress ip) { + for (byte b : ip.getAddress()) { + if (b != 0) { + return false; + } + } + return true; + } + + public static boolean fieldsConflict(int field1, int field2) { + if ((field1 == 0) || (field2 == 0) || (field1 == field2)) { + return false; + } + return true; + } + + public static InetAddress parseInetAddress(String addressString) { + InetAddress address = null; + try { + address = InetAddress.getByName(addressString); + } catch (UnknownHostException e) { + logger.error("", e); + } + return address; + } + + /** + * Checks if the passed IP v4 address in string form is valid The address + * may specify a mask at the end as "/MM" + * + * @param cidr + * the v4 address as A.B.C.D/MM + * @return + */ + public static boolean isIPv4AddressValid(String cidr) { + if (cidr == null) { + return false; + } + + String values[] = cidr.split("/"); + Pattern ipv4Pattern = Pattern + .compile("(([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\.){3}([01]?\\d\\d?|2[0-4]\\d|25[0-5])"); + Matcher mm = ipv4Pattern.matcher(values[0]); + if (!mm.matches()) { + return false; + } + if (values.length >= 2) { + int prefix = Integer.valueOf(values[1]); + if ((prefix < 0) || (prefix > 32)) { + return false; + } + } + return true; + } + + /** + * Checks if the passed IP v6 address in string form is valid The address + * may specify a mask at the end as "/MMM" + * + * @param cidr + * the v6 address as A::1/MMM + * @return + */ + public static boolean isIPv6AddressValid(String cidr) { + if (cidr == null) { + return false; + } + + String values[] = cidr.split("/"); + try { + // when given an IP address, InetAddress.getByName validates the ip + // address + InetAddress addr = InetAddress.getByName(values[0]); + if (!(addr instanceof Inet6Address)) { + return false; + } + } catch (UnknownHostException ex) { + return false; + } + + if (values.length >= 2) { + int prefix = Integer.valueOf(values[1]); + if ((prefix < 0) || (prefix > 128)) { + return false; + } + } + return true; + } + + /** + * Checks if the passed IP address in string form is a valid v4 or v6 + * address. The address may specify a mask at the end as "/MMM" + * + * @param cidr + * the v4 or v6 address as IP/MMM + * @return + */ + public static boolean isIPAddressValid(String cidr) { + return NetUtils.isIPv4AddressValid(cidr) || NetUtils.isIPv6AddressValid(cidr); + } + + /* + * Following utilities are useful when you need to compare or bit shift java + * primitive type variable which are inherently signed + */ + /** + * Returns the unsigned value of the passed byte variable + * + * @param b + * the byte value + * @return the int variable containing the unsigned byte value + */ + public static int getUnsignedByte(byte b) { + return b & 0xFF; + } + + /** + * Return the unsigned value of the passed short variable + * + * @param s + * the short value + * @return the int variable containing the unsigned short value + */ + public static int getUnsignedShort(short s) { + return s & 0xFFFF; + } + + /** + * Returns the highest v4 or v6 InetAddress + * + * @param v6 + * true for IPv6, false for Ipv4 + * @return The highest IPv4 or IPv6 address + */ + public static InetAddress gethighestIP(boolean v6) { + try { + return (v6) ? InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff") : InetAddress + .getByName("255.255.255.255"); + } catch (UnknownHostException e) { + return null; + } + } + + /** + * Returns Broadcast MAC Address + * + * @return the byte array containing broadcast mac address + */ + public static byte[] getBroadcastMACAddr() { + return Arrays.copyOf(BroadcastMACAddr, BroadcastMACAddr.length); + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Packet.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Packet.java new file mode 100644 index 0000000000..2af185221c --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/Packet.java @@ -0,0 +1,364 @@ +/* + * Copyright (c) 2013-2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.liblldp; + +import java.util.Arrays; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Abstract class which represents the generic network packet object It provides + * the basic methods which are common for all the packets, like serialize and + * deserialize + */ + +public abstract class Packet { + protected static final Logger logger = LoggerFactory + .getLogger(Packet.class); + // Access level granted to this packet + protected boolean writeAccess; + // When deserialized from wire, packet could result corrupted + protected boolean corrupted; + // The packet that encapsulate this packet + protected Packet parent; + // The packet encapsulated by this packet + protected Packet payload; + // The unparsed raw payload carried by this packet + protected byte[] rawPayload; + // Bit coordinates of packet header fields + protected Map> hdrFieldCoordMap; + // Header fields values: Map + protected Map hdrFieldsMap; + // The class of the encapsulated packet object + protected Class payloadClass; + + public Packet() { + writeAccess = false; + corrupted = false; + } + + public Packet(boolean writeAccess) { + this.writeAccess = writeAccess; + corrupted = false; + } + + public Packet getParent() { + return parent; + } + + public Packet getPayload() { + return payload; + } + + public void setParent(Packet parent) { + this.parent = parent; + } + + public void setPayload(Packet payload) { + this.payload = payload; + } + + public void setHeaderField(String headerField, byte[] readValue) { + hdrFieldsMap.put(headerField, readValue); + } + + /** + * This method deserializes the data bits obtained from the wire into the + * respective header and payload which are of type Packet + * + * @param byte[] data - data from wire to deserialize + * @param int bitOffset bit position where packet header starts in data + * array + * @param int size of packet in bits + * @return Packet + * @throws PacketException + */ + public Packet deserialize(byte[] data, int bitOffset, int size) + throws PacketException { + + // Deserialize the header fields one by one + int startOffset = 0, numBits = 0; + for (Entry> pairs : hdrFieldCoordMap + .entrySet()) { + String hdrField = pairs.getKey(); + startOffset = bitOffset + this.getfieldOffset(hdrField); + numBits = this.getfieldnumBits(hdrField); + + byte[] hdrFieldBytes = null; + try { + hdrFieldBytes = BitBufferHelper.getBits(data, startOffset, + numBits); + } catch (BufferException e) { + throw new PacketException(e.getMessage()); + } + + /* + * Store the raw read value, checks the payload type and set the + * payloadClass accordingly + */ + this.setHeaderField(hdrField, hdrFieldBytes); + + if (logger.isTraceEnabled()) { + logger.trace("{}: {}: {} (offset {} bitsize {})", + new Object[] { this.getClass().getSimpleName(), hdrField, + HexEncode.bytesToHexString(hdrFieldBytes), + startOffset, numBits }); + } + } + + // Deserialize the payload now + int payloadStart = startOffset + numBits; + int payloadSize = data.length * NetUtils.NumBitsInAByte - payloadStart; + + if (payloadClass != null) { + try { + payload = payloadClass.newInstance(); + } catch (Exception e) { + throw new RuntimeException( + "Error parsing payload for Ethernet packet", e); + } + payload.deserialize(data, payloadStart, payloadSize); + payload.setParent(this); + } else { + /* + * The payload class was not set, it means no class for parsing + * this payload is present. Let's store the raw payload if any. + */ + int start = payloadStart / NetUtils.NumBitsInAByte; + int stop = start + payloadSize / NetUtils.NumBitsInAByte; + rawPayload = Arrays.copyOfRange(data, start, stop); + } + + + // Take care of computation that can be done only after deserialization + postDeserializeCustomOperation(data, payloadStart - getHeaderSize()); + + return this; + } + + /** + * This method serializes the header and payload from the respective + * packet class, into a single stream of bytes to be sent on the wire + * + * @return The byte array representing the serialized Packet + * @throws PacketException + */ + public byte[] serialize() throws PacketException { + + // Acquire or compute the serialized payload + byte[] payloadBytes = null; + if (payload != null) { + payloadBytes = payload.serialize(); + } else if (rawPayload != null) { + payloadBytes = rawPayload; + } + int payloadSize = (payloadBytes == null) ? 0 : payloadBytes.length; + + // Allocate the buffer to contain the full (header + payload) packet + int headerSize = this.getHeaderSize() / NetUtils.NumBitsInAByte; + byte packetBytes[] = new byte[headerSize + payloadSize]; + if (payloadBytes != null) { + System.arraycopy(payloadBytes, 0, packetBytes, headerSize, payloadSize); + } + + // Serialize this packet header, field by field + for (Map.Entry> pairs : hdrFieldCoordMap + .entrySet()) { + String field = pairs.getKey(); + byte[] fieldBytes = hdrFieldsMap.get(field); + // Let's skip optional fields when not set + if (fieldBytes != null) { + try { + BitBufferHelper.setBytes(packetBytes, fieldBytes, + getfieldOffset(field), getfieldnumBits(field)); + } catch (BufferException e) { + throw new PacketException(e.getMessage()); + } + } + } + + // Perform post serialize operations (like checksum computation) + postSerializeCustomOperation(packetBytes); + + if (logger.isTraceEnabled()) { + logger.trace("{}: {}", this.getClass().getSimpleName(), + HexEncode.bytesToHexString(packetBytes)); + } + + return packetBytes; + } + + /** + * This method gets called at the end of the serialization process It is + * intended for the child packets to insert some custom data into the output + * byte stream which cannot be done or cannot be done efficiently during the + * normal Packet.serialize() path. An example is the checksum computation + * for IPv4 + * + * @param byte[] - serialized bytes + * @throws PacketException + */ + protected void postSerializeCustomOperation(byte[] myBytes) + throws PacketException { + // no op + } + + /** + * This method re-computes the checksum of the bits received on the wire and + * validates it with the checksum in the bits received Since the computation + * of checksum varies based on the protocol, this method is overridden. + * Currently only IPv4 and ICMP do checksum computation and validation. TCP + * and UDP need to implement these if required. + * + * @param byte[] data The byte stream representing the Ethernet frame + * @param int startBitOffset The bit offset from where the byte array corresponding to this Packet starts in the frame + * @throws PacketException + */ + protected void postDeserializeCustomOperation(byte[] data, int startBitOffset) + throws PacketException { + // no op + } + + /** + * Gets the header length in bits + * + * @return int the header length in bits + */ + public int getHeaderSize() { + int size = 0; + /* + * We need to iterate over the fields that were read in the frame + * (hdrFieldsMap) not all the possible ones described in + * hdrFieldCoordMap. For ex, 802.1Q may or may not be there + */ + for (Map.Entry fieldEntry : hdrFieldsMap.entrySet()) { + if (fieldEntry.getValue() != null) { + String field = fieldEntry.getKey(); + size += getfieldnumBits(field); + } + } + return size; + } + + /** + * This method fetches the start bit offset for header field specified by + * 'fieldname'. The offset is present in the hdrFieldCoordMap of the + * respective packet class + * + * @param String + * fieldName + * @return Integer - startOffset of the requested field + */ + public int getfieldOffset(String fieldName) { + return hdrFieldCoordMap.get(fieldName).getLeft(); + } + + /** + * This method fetches the number of bits for header field specified by + * 'fieldname'. The numBits are present in the hdrFieldCoordMap of the + * respective packet class + * + * @param String + * fieldName + * @return Integer - number of bits of the requested field + */ + public int getfieldnumBits(String fieldName) { + return hdrFieldCoordMap.get(fieldName).getRight(); + } + + @Override + public String toString() { + StringBuilder ret = new StringBuilder(); + ret.append(this.getClass().getSimpleName()); + ret.append(": ["); + for (String field : hdrFieldCoordMap.keySet()) { + byte[] value = hdrFieldsMap.get(field); + ret.append(field); + ret.append(": "); + ret.append(HexEncode.bytesToHexString(value)); + ret.append(", "); + } + ret.replace(ret.length()-2, ret.length()-1, "]"); + return ret.toString(); + } + + /** + * Returns the raw payload carried by this packet in case payload was not + * parsed. Caller can call this function in case the getPaylod() returns null. + * + * @return The raw payload if not parsable as an array of bytes, null otherwise + */ + public byte[] getRawPayload() { + return rawPayload; + } + + /** + * Set a raw payload in the packet class + * + * @param payload The raw payload as byte array + */ + public void setRawPayload(byte[] payload) { + this.rawPayload = Arrays.copyOf(payload, payload.length); + } + + /** + * Return whether the deserialized packet is to be considered corrupted. + * This is the case when the checksum computed after reconstructing the + * packet received from wire is not equal to the checksum read from the + * stream. For the Packet class which do not have a checksum field, this + * function will always return false. + * + * + * @return true if the deserialized packet's recomputed checksum is not + * equal to the packet carried checksum + */ + public boolean isCorrupted() { + return corrupted; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + + ((this.hdrFieldsMap == null) ? 0 : hdrFieldsMap.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (getClass() != obj.getClass()) { + return false; + } + Packet other = (Packet) obj; + if (hdrFieldsMap == other.hdrFieldsMap) { + return true; + } + if (hdrFieldsMap == null || other.hdrFieldsMap == null) { + return false; + } + if (hdrFieldsMap != null && other.hdrFieldsMap != null) { + for (String field : hdrFieldsMap.keySet()) { + if (!Arrays.equals(hdrFieldsMap.get(field), other.hdrFieldsMap.get(field))) { + return false; + } + } + } else { + return false; + } + return true; + } +} diff --git a/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/PacketException.java b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/PacketException.java new file mode 100644 index 0000000000..c69fc03e91 --- /dev/null +++ b/opendaylight/commons/liblldp/src/main/java/org/opendaylight/controller/liblldp/PacketException.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.liblldp; + +/** + * Describes an exception that is raised when the process of serializing or + * deserializing a network packet/stream fails. This generally happens when the + * packet/stream is malformed. + * + */ +public class PacketException extends Exception { + private static final long serialVersionUID = 1L; + + public PacketException(String message) { + super(message); + } +} diff --git a/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/BitBufferHelperTest.java b/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/BitBufferHelperTest.java new file mode 100644 index 0000000000..07fbf0599b --- /dev/null +++ b/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/BitBufferHelperTest.java @@ -0,0 +1,693 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.sal.packet; + +import junit.framework.Assert; + +import org.junit.Test; +import org.opendaylight.controller.liblldp.BitBufferHelper; + +public class BitBufferHelperTest { + + @Test + public void testGetByte() { + byte[] data = { 100 }; + Assert.assertTrue(BitBufferHelper.getByte(data) == 100); + } + + @Test + public void testGetBits() throws Exception { + byte[] data = { 10, 12, 14, 20, 55, 69, 82, 97, 109, 117, 127, -50 }; + byte[] bits; + + bits = BitBufferHelper.getBits(data, 88, 8); //BYTE extraOffsetBits = extranumBits = 0 + Assert.assertTrue(bits[0] == -50); + + bits = BitBufferHelper.getBits(data, 8, 16); //Short + Assert.assertTrue(bits[0] == 12); + Assert.assertTrue(bits[1] == 14); + + bits = BitBufferHelper.getBits(data, 32, 32); //Int + Assert.assertTrue(bits[0] == 55); + Assert.assertTrue(bits[1] == 69); + Assert.assertTrue(bits[2] == 82); + Assert.assertTrue(bits[3] == 97); + + bits = BitBufferHelper.getBits(data, 16, 48); //Long + Assert.assertTrue(bits[0] == 14); + Assert.assertTrue(bits[1] == 20); + Assert.assertTrue(bits[2] == 55); + Assert.assertTrue(bits[3] == 69); + Assert.assertTrue(bits[4] == 82); + Assert.assertTrue(bits[5] == 97); + + bits = BitBufferHelper.getBits(data, 40, 7); //BYTE extraOffsetBits = extranumBits != 0 + Assert.assertTrue(bits[0] == 34); + + bits = BitBufferHelper.getBits(data, 8, 13); //Short + Assert.assertTrue(bits[0] == 1); + Assert.assertTrue(bits[1] == -127); + + bits = BitBufferHelper.getBits(data, 32, 28); //Int + Assert.assertTrue(bits[0] == 3); + Assert.assertTrue(bits[1] == 116); + Assert.assertTrue(bits[2] == 85); + Assert.assertTrue(bits[3] == 38); + + bits = BitBufferHelper.getBits(data, 16, 41); //Long + Assert.assertTrue(bits[0] == 0); + Assert.assertTrue(bits[1] == 28); + Assert.assertTrue(bits[2] == 40); + Assert.assertTrue(bits[3] == 110); + Assert.assertTrue(bits[4] == -118); + Assert.assertTrue(bits[5] == -92); + + bits = BitBufferHelper.getBits(data, 3, 7); //BYTE extraOffsetBits != 0; extranumBits == 0 + Assert.assertTrue(bits[0] == 40); + + bits = BitBufferHelper.getBits(data, 13, 16); //Short + Assert.assertTrue(bits[0] == -127); + Assert.assertTrue(bits[1] == -62); + + bits = BitBufferHelper.getBits(data, 5, 32); //Int + Assert.assertTrue(bits[0] == 65); + Assert.assertTrue(bits[1] == -127); + Assert.assertTrue(bits[2] == -62); + Assert.assertTrue(bits[3] == -122); + + bits = BitBufferHelper.getBits(data, 23, 48); //Long + Assert.assertTrue(bits[0] == 10); + Assert.assertTrue(bits[1] == 27); + Assert.assertTrue(bits[2] == -94); + Assert.assertTrue(bits[3] == -87); + Assert.assertTrue(bits[4] == 48); + Assert.assertTrue(bits[5] == -74); + + bits = BitBufferHelper.getBits(data, 66, 9); //BYTE extraOffsetBits != 0; extranumBits != 0 + Assert.assertTrue(bits[0] == 1); + Assert.assertTrue(bits[1] == 107); + + bits = BitBufferHelper.getBits(data, 13, 15); //Short + Assert.assertTrue(bits[0] == 64); + Assert.assertTrue(bits[1] == -31); + + bits = BitBufferHelper.getBits(data, 5, 29); //Int + Assert.assertTrue(bits[0] == 8); + Assert.assertTrue(bits[1] == 48); + Assert.assertTrue(bits[2] == 56); + Assert.assertTrue(bits[3] == 80); + + bits = BitBufferHelper.getBits(data, 31, 43); //Long + Assert.assertTrue(bits[0] == 0); + Assert.assertTrue(bits[1] == -35); + Assert.assertTrue(bits[2] == 21); + Assert.assertTrue(bits[3] == 73); + Assert.assertTrue(bits[4] == -123); + Assert.assertTrue(bits[5] == -75); + + bits = BitBufferHelper.getBits(data, 4, 12); //Short + Assert.assertTrue(bits[0] == 10); + Assert.assertTrue(bits[1] == 12); + + byte[] data1 = { 0, 8 }; + bits = BitBufferHelper.getBits(data1, 7, 9); //Short + Assert.assertTrue(bits[0] == 0); + Assert.assertTrue(bits[1] == 8); + + byte[] data2 = { 2, 8 }; + bits = BitBufferHelper.getBits(data2, 0, 7); //Short + Assert.assertTrue(bits[0] == 1); + + bits = BitBufferHelper.getBits(data2, 7, 9); //Short + Assert.assertTrue(bits[0] == 0); + Assert.assertTrue(bits[1] == 8); + } + + // [01101100][01100000] + // [01100011] + @Test + public void testGetBytes() throws Exception { + byte data[] = { 108, 96, 125, -112, 5, 6, 108, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 21, 22 }; + byte[] x; + + Assert.assertTrue(BitBufferHelper.getBits(data, 0, 8)[0] == 108); + Assert.assertTrue(BitBufferHelper.getBits(data, 8, 8)[0] == 96); + + x = BitBufferHelper.getBits(data, 0, 10); + Assert.assertTrue(x[0] == 1); + Assert.assertTrue(x[1] == -79); + + x = BitBufferHelper.getBits(data, 3, 8); + Assert.assertTrue(x[0] == 99); + //Assert.assertTrue(x[1] == 97); + + } + + @Test + public void testMSBMask() { + int numBits = 1; //MSB + int mask = BitBufferHelper.getMSBMask(numBits); + Assert.assertTrue(mask == 128); + + numBits = 8; + mask = BitBufferHelper.getMSBMask(numBits); + Assert.assertTrue(mask == 255); + + numBits = 2; + mask = BitBufferHelper.getMSBMask(numBits); + Assert.assertTrue(mask == 192); + } + + @Test + public void testLSBMask() { + int numBits = 1; //LSB + int mask = BitBufferHelper.getLSBMask(numBits); + Assert.assertTrue(mask == 1); + + numBits = 3; + mask = BitBufferHelper.getLSBMask(numBits); + Assert.assertTrue(mask == 7); + + numBits = 8; + mask = BitBufferHelper.getLSBMask(numBits); + Assert.assertTrue(mask == 255); + } + + @Test + public void testToByteArray() { + short sh = Short.MAX_VALUE; + byte[] data_sh = new byte[Byte.SIZE / 8]; + data_sh = BitBufferHelper.toByteArray(sh); + Assert.assertTrue(data_sh[0] == 127); + Assert.assertTrue(data_sh[1] == -1); + + short sh2 = Short.MIN_VALUE; + byte[] data_sh2 = new byte[Byte.SIZE / 8]; + data_sh2 = BitBufferHelper.toByteArray(sh2); + Assert.assertTrue(data_sh2[0] == -128); + Assert.assertTrue(data_sh2[1] == 0); + + short sh3 = 16384; + byte[] data_sh3 = new byte[Byte.SIZE / 8]; + data_sh3 = BitBufferHelper.toByteArray(sh3); + Assert.assertTrue(data_sh3[0] == 64); + Assert.assertTrue(data_sh3[1] == 0); + + short sh4 = 146; //TCP headerlenflags - startoffset = 103 + byte[] data_sh4 = new byte[Byte.SIZE / 8]; + data_sh4 = BitBufferHelper.toByteArray(sh4); + Assert.assertTrue(data_sh4[0] == 0); + Assert.assertTrue(data_sh4[1] == -110); + + short sh4_2 = 5000; //IPv4 Offset - startOffset = 51 (to 63) + byte[] data_sh4_2 = new byte[Byte.SIZE / 8]; + data_sh4_2 = BitBufferHelper.toByteArray(sh4_2); + Assert.assertTrue(data_sh4_2[0] == 19); + Assert.assertTrue(data_sh4_2[1] == -120); + + short sh4_3 = 5312; //numEndRestBits < numBitstoShiftBy + byte[] data_sh4_3 = new byte[Byte.SIZE / 8]; + data_sh4_3 = BitBufferHelper.toByteArray(sh4_3); + Assert.assertTrue(data_sh4_3[0] == 20); + Assert.assertTrue(data_sh4_3[1] == -64); + + int Int = Integer.MAX_VALUE; + byte[] data_Int = new byte[Integer.SIZE / 8]; + data_Int = BitBufferHelper.toByteArray(Int); + Assert.assertTrue(data_Int[0] == 127); + Assert.assertTrue(data_Int[1] == -1); + Assert.assertTrue(data_Int[2] == -1); + Assert.assertTrue(data_Int[3] == -1); + + int Int2 = Integer.MIN_VALUE; + byte[] data_Int2 = new byte[Integer.SIZE / 8]; + data_Int2 = BitBufferHelper.toByteArray(Int2); + Assert.assertTrue(data_Int2[0] == -128); + Assert.assertTrue(data_Int2[1] == 0); + Assert.assertTrue(data_Int2[2] == 0); + Assert.assertTrue(data_Int2[3] == 0); + + int Int3 = 1077952576; + byte[] data_Int3 = new byte[Integer.SIZE / 8]; + data_Int3 = BitBufferHelper.toByteArray(Int3); + Assert.assertTrue(data_Int3[0] == 64); + Assert.assertTrue(data_Int3[1] == 64); + Assert.assertTrue(data_Int3[2] == 64); + Assert.assertTrue(data_Int3[3] == 64); + + long Lng = Long.MAX_VALUE; + byte[] data_lng = new byte[Long.SIZE / 8]; + data_lng = BitBufferHelper.toByteArray(Lng); + Assert.assertTrue(data_lng[0] == 127); + Assert.assertTrue(data_lng[1] == -1); + Assert.assertTrue(data_lng[2] == -1); + Assert.assertTrue(data_lng[3] == -1); + Assert.assertTrue(data_lng[4] == -1); + Assert.assertTrue(data_lng[5] == -1); + Assert.assertTrue(data_lng[6] == -1); + Assert.assertTrue(data_lng[7] == -1); + + long Lng2 = Long.MIN_VALUE; + byte[] data_lng2 = new byte[Long.SIZE / 8]; + data_lng2 = BitBufferHelper.toByteArray(Lng2); + Assert.assertTrue(data_lng2[0] == -128); + Assert.assertTrue(data_lng2[1] == 0); + Assert.assertTrue(data_lng2[2] == 0); + Assert.assertTrue(data_lng2[3] == 0); + Assert.assertTrue(data_lng2[4] == 0); + Assert.assertTrue(data_lng2[5] == 0); + Assert.assertTrue(data_lng2[6] == 0); + Assert.assertTrue(data_lng2[7] == 0); + + byte B = Byte.MAX_VALUE; + byte[] data_B = new byte[Byte.SIZE / 8]; + data_B = BitBufferHelper.toByteArray(B); + Assert.assertTrue(data_B[0] == 127); + + byte B1 = Byte.MIN_VALUE; + byte[] data_B1 = new byte[Byte.SIZE / 8]; + data_B1 = BitBufferHelper.toByteArray(B1); + Assert.assertTrue(data_B1[0] == -128); + + byte B2 = 64; + byte[] data_B2 = new byte[Byte.SIZE / 8]; + data_B2 = BitBufferHelper.toByteArray(B2); + Assert.assertTrue(data_B2[0] == 64); + + byte B3 = 32; + byte[] data_B3 = new byte[Byte.SIZE / 8]; + data_B3 = BitBufferHelper.toByteArray(B3); + Assert.assertTrue(data_B3[0] == 32); + + } + + @Test + public void testToByteArrayVariable() { + int len = 9; + byte[] data_sh; + data_sh = BitBufferHelper.toByteArray(511, len); + Assert.assertTrue(data_sh[0] == (byte) 255); + Assert.assertTrue(data_sh[1] == (byte) 128); + + data_sh = BitBufferHelper.toByteArray((int) 511, len); + Assert.assertTrue(data_sh[0] == (byte) 255); + Assert.assertTrue(data_sh[1] == (byte) 128); + + data_sh = BitBufferHelper.toByteArray((long) 511, len); + Assert.assertTrue(data_sh[0] == (byte) 255); + Assert.assertTrue(data_sh[1] == (byte) 128); + } + + @Test + public void testToInt() { + byte data[] = { 1 }; + Assert.assertTrue(BitBufferHelper.toNumber(data) == 1); + + byte data2[] = { 1, 1 }; + Assert.assertTrue(BitBufferHelper.toNumber(data2) == 257); + + byte data3[] = { 1, 1, 1 }; + Assert.assertTrue(BitBufferHelper.toNumber(data3) == 65793); + } + + @Test + public void testToLongGetter() { + byte data[] = { 1, 1 }; + Assert.assertTrue(BitBufferHelper.getLong(data) == 257L); + } + + @Test + public void testSetByte() throws Exception { + byte input; + byte[] data = new byte[20]; + + input = 125; + BitBufferHelper.setByte(data, input, 0, Byte.SIZE); + Assert.assertTrue(data[0] == 125); + + input = 109; + BitBufferHelper.setByte(data, input, 152, Byte.SIZE); + Assert.assertTrue(data[19] == 109); + } + + @Test + public void testSetBytes() throws Exception { + byte[] input = { 0, 1 }; + byte[] data = { 6, 0 }; + + BitBufferHelper.setBytes(data, input, 7, 9); + Assert.assertTrue(data[0] == 6); + Assert.assertTrue(data[1] == 1); + } + + //@Test + //INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001]*/ + public void testInsertBits() throws Exception { + //CASE 1: startOffset%8 == 0 && numBits%8 == 0 + byte inputdata[] = { 75, 110, 107, 80, 10, 12, 35, 100, 125, 65 }; + int startOffset = 0; + int numBits = 8; + + byte data1[] = new byte[2]; + startOffset = 0; + numBits = 16; + BitBufferHelper.insertBits(data1, inputdata, startOffset, numBits); + Assert.assertTrue(data1[0] == 75); + Assert.assertTrue(data1[1] == 110); + + byte data2[] = new byte[4]; + startOffset = 0; + numBits = 32; + BitBufferHelper.insertBits(data2, inputdata, startOffset, numBits); + Assert.assertTrue(data2[0] == 75); + Assert.assertTrue(data2[1] == 110); + Assert.assertTrue(data2[2] == 107); + Assert.assertTrue(data2[3] == 80); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] // OUTPUT: [01001011] [01101000] = {75, 104} + byte data10[] = new byte[2]; + startOffset = 0; + numBits = 13; + BitBufferHelper.insertBits(data10, inputdata, startOffset, numBits); + Assert.assertTrue(data10[0] == 75); + Assert.assertTrue(data10[1] == 104); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] // OUTPUT: [01001000] = {72} + byte data11[] = new byte[4]; + startOffset = 8; + numBits = 6; + BitBufferHelper.insertBits(data11, inputdata, startOffset, numBits); + Assert.assertTrue(data11[1] == 72); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [01001011] [01101110] [01101000] = {75, 110, 105} + byte data12[] = new byte[4]; + startOffset = 0; + numBits = 23; + BitBufferHelper.insertBits(data12, inputdata, startOffset, numBits); + Assert.assertTrue(data12[0] == 75); + Assert.assertTrue(data12[1] == 110); + Assert.assertTrue(data12[2] == 106); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [01001011] [01101110] [01100000] = {75, 110, 96} + byte data13[] = new byte[4]; + startOffset = 8; + numBits = 20; + BitBufferHelper.insertBits(data13, inputdata, startOffset, numBits); + Assert.assertTrue(data13[1] == 75); + Assert.assertTrue(data13[2] == 110); + Assert.assertTrue(data13[3] == 96); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [01001011] [01101110] [01101011] [10100000]= {75, 110, 107, 80} + byte data14[] = new byte[4]; + startOffset = 0; + numBits = 30; + BitBufferHelper.insertBits(data14, inputdata, startOffset, numBits); + Assert.assertTrue(data14[0] == 75); + Assert.assertTrue(data14[1] == 110); + Assert.assertTrue(data14[2] == 107); + Assert.assertTrue(data14[3] == 80); + + //CASE 3: startOffset%8 != 0, numBits%8 = 0 + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [10100000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00001001] [11000000] = {72, 96} + byte data16[] = new byte[5]; + startOffset = 3; + numBits = 8; + BitBufferHelper.insertBits(data16, inputdata, startOffset, numBits); + Assert.assertTrue(data16[0] == 9); + Assert.assertTrue(data16[1] == 96); + Assert.assertTrue(data16[2] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [00000100] [1011 0110] [1110 0000] = {4, -54, -96} + + startOffset = 3; + numBits = 16; + byte data17[] = new byte[5]; + BitBufferHelper.insertBits(data17, inputdata, startOffset, numBits); + Assert.assertTrue(data17[0] == 9); + Assert.assertTrue(data17[1] == 109); + Assert.assertTrue(data17[2] == -64); + Assert.assertTrue(data17[3] == 0); + + // INPUT: {79, 110, 111} + // = [01001111] [01101110] [01101111] + //OUTPUT: [0000 1001] [1110 1101] [110 00000] = {9, -19, -64} + byte data18[] = new byte[5]; + byte inputdata3[] = { 79, 110, 111 }; + startOffset = 3; + numBits = 16; + BitBufferHelper.insertBits(data18, inputdata3, startOffset, numBits); + Assert.assertTrue(data18[0] == 9); + Assert.assertTrue(data18[1] == -19); + Assert.assertTrue(data18[2] == -64); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 1001] [0110 1101] [1100 1101] [0110 1010] [0000 0001] = {9, 109, -51, 106, 0} + + startOffset = 3; + numBits = 32; + byte data19[] = new byte[5]; + BitBufferHelper.insertBits(data19, inputdata, startOffset, numBits); + Assert.assertTrue(data19[0] == 9); + Assert.assertTrue(data19[1] == 109); + Assert.assertTrue(data19[2] == -51); + Assert.assertTrue(data19[3] == 106); + Assert.assertTrue(data19[4] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: data[4, 5, 6] = [0 010 0101] [1 011 0111] [0 000 0000] = {37, -73, 0} + startOffset = 33; + numBits = 16; + byte data20[] = new byte[7]; + BitBufferHelper.insertBits(data20, inputdata, startOffset, numBits); + Assert.assertTrue(data20[4] == 37); + Assert.assertTrue(data20[5] == -73); + Assert.assertTrue(data20[6] == 0); + + //CASE 4: extranumBits != 0 AND extraOffsetBits != 0 + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 1001] [0100 0000] = {9, 96} + startOffset = 3; + numBits = 7; + byte data21[] = new byte[7]; + BitBufferHelper.insertBits(data21, inputdata, startOffset, numBits); + Assert.assertTrue(data21[0] == 9); + Assert.assertTrue(data21[1] == 64); + Assert.assertTrue(data21[2] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: data = [00000 010] [01011 011] [01110 000] = {37, -73, 0} + startOffset = 5; + numBits = 17; + byte data22[] = new byte[7]; + BitBufferHelper.insertBits(data22, inputdata, startOffset, numBits); + Assert.assertTrue(data22[0] == 2); + Assert.assertTrue(data22[1] == 91); + Assert.assertTrue(data22[2] == 112); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 1001] [0110 1101] [110 01101] [01 00000] = {9, 109, -51, 64} + startOffset = 3; + numBits = 23; + byte data23[] = new byte[7]; + BitBufferHelper.insertBits(data23, inputdata, startOffset, numBits); + Assert.assertTrue(data23[0] == 9); + Assert.assertTrue(data23[1] == 109); + Assert.assertTrue(data23[2] == -51); + Assert.assertTrue(data23[3] == 64); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 1001] [0110 1101] = {9, 109} + startOffset = 3; + numBits = 13; + byte data24[] = new byte[7]; + BitBufferHelper.insertBits(data24, inputdata, startOffset, numBits); + Assert.assertTrue(data24[0] == 9); + Assert.assertTrue(data24[1] == 109); + Assert.assertTrue(data24[2] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 0100] [1011 0110] [1110 0110] = {4, -74, -26} + startOffset = 4; + numBits = 20; + byte data25[] = new byte[7]; + BitBufferHelper.insertBits(data25, inputdata, startOffset, numBits); + Assert.assertTrue(data25[0] == 4); + Assert.assertTrue(data25[1] == -74); + Assert.assertTrue(data25[2] == -26); + Assert.assertTrue(data25[3] == -0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [0000 0010] [0101 1011] = {0, 2, 91, 0} + startOffset = 13; + numBits = 11; + byte data26[] = new byte[7]; + BitBufferHelper.insertBits(data26, inputdata, startOffset, numBits); + Assert.assertTrue(data26[0] == 0); + Assert.assertTrue(data26[1] == 2); + Assert.assertTrue(data26[2] == 91); + Assert.assertTrue(data26[3] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [000 01001] [011 01101] [110 0 0000] = {9, 109, -64, 0} + startOffset = 3; + numBits = 17; + byte data27[] = new byte[7]; + BitBufferHelper.insertBits(data27, inputdata, startOffset, numBits); + Assert.assertTrue(data27[0] == 9); + Assert.assertTrue(data27[1] == 109); + Assert.assertTrue(data27[2] == -64); + Assert.assertTrue(data27[3] == 0); + + // INPUT: {75, 110, 107, 80, 10, 12, 35, 100, 125, 65} = + // [01001011] [01101110] [01101011] [01010000] [00001010] [00001100] [00100011] [01100100] [11111101] [01000001] //OUTPUT: [00000000] [00000100] [10110110] [11100000]= {0, 4, -54, -96} + // OUTPUT: [00 000000] [00 000000] [00 010010] [11 011011] [10 011010] [11 010100] [0000 0000] = {0, 0, 18, -37,-102,-44,0} + startOffset = 18; + numBits = 34; + byte data28[] = new byte[7]; + BitBufferHelper.insertBits(data28, inputdata, startOffset, numBits); + Assert.assertTrue(data28[0] == 0); + Assert.assertTrue(data28[1] == 0); + Assert.assertTrue(data28[2] == 18); + Assert.assertTrue(data28[3] == -37); + Assert.assertTrue(data28[4] == -102); + Assert.assertTrue(data28[5] == -44); + Assert.assertTrue(data28[6] == 0); + + } + + @Test + public void testGetShort() throws Exception { + byte data[] = new byte[2]; + data[0] = 7; + data[1] = 8; + int length = 9; // num bits + Assert.assertTrue(BitBufferHelper.getShort(data, length) == 264); + + data[0] = 6; + data[1] = 8; + short result = BitBufferHelper.getShort(data, length); + Assert.assertTrue(result == 8); + + data[0] = 8; + data[1] = 47; + result = BitBufferHelper.getShort(data, length); + Assert.assertTrue(result == 47); + + //[0000 0001] [0001 0100] [0110 0100] + byte[] data1 = new byte[2]; + data1[0] = 1; + data1[1] = 20; //data1[2] = 100; + length = 15; + result = BitBufferHelper.getShort(data1, length); + Assert.assertTrue(result == 276); + + byte[] data2 = new byte[2]; + data2[0] = 64; + data2[1] = 99; //data2[2] = 100; + length = 13; + result = BitBufferHelper.getShort(data2, length); + Assert.assertTrue(result == 99); + + byte[] data3 = { 100, 50 }; + result = BitBufferHelper.getShort(data3); + Assert.assertTrue(result == 25650); + } + + @Test + public void testToIntVarLength() throws Exception { + byte data[] = { (byte) 255, (byte) 128 }; + int length = 9; // num bits + Assert.assertTrue(BitBufferHelper.getInt(data, length) == 384); + + byte data2[] = { 0, 8 }; + Assert.assertTrue(BitBufferHelper.getInt(data2, 9) == 8); + + byte data3[] = { 1, 1, 1 }; + Assert.assertTrue(BitBufferHelper.getInt(data3) == 65793); + + byte data4[] = { 1, 1, 1 }; + Assert.assertTrue(BitBufferHelper.getInt(data4) == 65793); + + byte data5[] = { 1, 1 }; + Assert.assertTrue(BitBufferHelper.getInt(data5) == 257); + + } + + @Test + public void testShiftBitstoLSB() { + byte[] data = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + + byte[] data2 = { 8, 9, 10 }; + byte[] shiftedBytes2 = BitBufferHelper.shiftBitsToLSB(data2, 11); + + Assert.assertTrue(shiftedBytes2[0] == 0); + Assert.assertTrue(shiftedBytes2[1] == 64); + Assert.assertTrue(shiftedBytes2[2] == 72); + + byte[] shiftedBytes = BitBufferHelper.shiftBitsToLSB(data, 49); + + Assert.assertTrue(shiftedBytes[0] == 0); + Assert.assertTrue(shiftedBytes[1] == 2); + Assert.assertTrue(shiftedBytes[2] == 4); + Assert.assertTrue(shiftedBytes[3] == 6); + Assert.assertTrue(shiftedBytes[4] == 8); + Assert.assertTrue(shiftedBytes[5] == 10); + Assert.assertTrue(shiftedBytes[6] == 12); + Assert.assertTrue(shiftedBytes[7] == 14); + Assert.assertTrue(shiftedBytes[8] == 16); + Assert.assertTrue(shiftedBytes[9] == 18); + + byte[] data1 = { 1, 2, 3 }; + byte[] shiftedBytes1 = BitBufferHelper.shiftBitsToLSB(data1, 18); + Assert.assertTrue(shiftedBytes1[0] == 0); + Assert.assertTrue(shiftedBytes1[1] == 4); + Assert.assertTrue(shiftedBytes1[2] == 8); + + } + + @Test + public void testShiftBitstoLSBMSB() { + byte[] data = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }; + + byte[] clone = BitBufferHelper.shiftBitsToMSB(BitBufferHelper + .shiftBitsToLSB(data, 72), 72); + + Assert.assertTrue(clone[0] == 1); + Assert.assertTrue(clone[1] == 2); + Assert.assertTrue(clone[2] == 3); + Assert.assertTrue(clone[3] == 4); + Assert.assertTrue(clone[4] == 5); + Assert.assertTrue(clone[5] == 6); + Assert.assertTrue(clone[6] == 7); + Assert.assertTrue(clone[7] == 8); + Assert.assertTrue(clone[8] == 9); + Assert.assertTrue(clone[9] == 0); + } + +} diff --git a/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/address/EthernetAddressTest.java b/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/address/EthernetAddressTest.java new file mode 100644 index 0000000000..cfdc7851e3 --- /dev/null +++ b/opendaylight/commons/liblldp/src/test/java/org/opendaylight/controller/sal/packet/address/EthernetAddressTest.java @@ -0,0 +1,114 @@ + +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +/** + * @file EthernetAddressTest.java + * + * @brief Unit Tests for EthernetAddress class + * + * Unit Tests for EthernetAddress class + */ +package org.opendaylight.controller.sal.packet.address; + +import org.junit.Assert; +import org.junit.Test; +import org.opendaylight.controller.liblldp.ConstructionException; +import org.opendaylight.controller.liblldp.EthernetAddress; + +public class EthernetAddressTest { + @Test + public void testNonValidConstructor() { + @SuppressWarnings("unused") + EthernetAddress ea1; + // Null input array + try { + ea1 = new EthernetAddress((byte[]) null); + + // Exception is expected if NOT raised test will fail + Assert.assertTrue(false); + } catch (ConstructionException e) { + } + + // Array too short + try { + ea1 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0 }); + + // Exception is expected if NOT raised test will fail + Assert.assertTrue(false); + } catch (ConstructionException e) { + } + + // Array too long + try { + ea1 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x0, + (byte) 0x0 }); + + // Exception is expected if NOT raised test will fail + Assert.assertTrue(false); + } catch (ConstructionException e) { + } + } + + @Test + public void testEquality() { + EthernetAddress ea1; + EthernetAddress ea2; + try { + ea1 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x1 }); + + ea2 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x1 }); + Assert.assertTrue(ea1.equals(ea2)); + } catch (ConstructionException e) { + // Exception is NOT expected if raised test will fail + Assert.assertTrue(false); + } + + try { + ea1 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x1 }); + + ea2 = ea1.clone(); + Assert.assertTrue(ea1.equals(ea2)); + } catch (ConstructionException e) { + // Exception is NOT expected if raised test will fail + Assert.assertTrue(false); + } + + // Check for well knowns + try { + ea1 = EthernetAddress.BROADCASTMAC; + ea2 = new EthernetAddress(new byte[] { (byte) 0xff, (byte) 0xff, + (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff }); + Assert.assertTrue(ea1.equals(ea2)); + } catch (ConstructionException e) { + // Exception is NOT expected if raised test will fail + Assert.assertTrue(false); + } + } + + @Test + public void testUnEquality() { + EthernetAddress ea1; + EthernetAddress ea2; + try { + ea1 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x2 }); + + ea2 = new EthernetAddress(new byte[] { (byte) 0x0, (byte) 0x0, + (byte) 0x0, (byte) 0x0, (byte) 0x0, (byte) 0x1 }); + Assert.assertTrue(!ea1.equals(ea2)); + } catch (ConstructionException e) { + // Exception is NOT expected if raised test will fail + Assert.assertTrue(false); + } + } +} diff --git a/opendaylight/commons/opendaylight/pom.xml b/opendaylight/commons/opendaylight/pom.xml index 4f678f6854..66a403560e 100644 --- a/opendaylight/commons/opendaylight/pom.xml +++ b/opendaylight/commons/opendaylight/pom.xml @@ -64,6 +64,13 @@ 0.5.2-SNAPSHOT 1.4 0.2.5-SNAPSHOT + etc/opendaylight/karaf + 00-netty.xml + 01-mdsal.xml + 01-netconf.xml + 03-toaster-sample.xml + 10-rest-connector.xml + 99-netconf-connector.xml 0.4.3-SNAPSHOT 0.4.3-SNAPSHOT 0.1.2-SNAPSHOT @@ -83,6 +90,7 @@ 0000.0002.0038.0 + 1.6.0 1.4.2-SNAPSHOT 2.4.0 0.4.2-SNAPSHOT @@ -338,6 +346,11 @@ akka-osgi_${scala.version} ${akka.version} + + com.typesafe.akka + akka-slf4j_${scala.version} + ${akka.version} + commons-codec commons-codec @@ -845,6 +858,11 @@ config-persister-file-xml-adapter ${config.version} + + org.opendaylight.controller + config-persister-feature-adapter + ${config.version} + org.opendaylight.controller config-persister-impl @@ -994,6 +1012,11 @@ karaf.branding ${karaf.branding.version} + + org.opendaylight.controller + liblldp + ${sal.version} + org.opendaylight.controller logback-config @@ -1204,6 +1227,11 @@ sal-common-util ${mdsal.version} + + org.opendaylight.controller + sal-inmemory-datastore + ${mdsal.version} + org.opendaylight.controller sal-compatibility @@ -1542,6 +1570,38 @@ toaster-config ${mdsal.version} + + org.opendaylight.yangtools + features-yangtools + ${yangtools.version} + features + xml + runtime + + + org.opendaylight.controller.samples + features-toaster + ${mdsal.version} + features + xml + runtime + + + org.opendaylight.controller + features-config-netty + ${config.version} + features + xml + runtime + + + org.opendaylight.controller + features-flow + ${mdsal.version} + features + xml + runtime + org.opendaylight.controller.thirdparty com.sun.jersey.jersey-servlet @@ -1804,7 +1864,7 @@ org.opendaylight.controller - config-features + features-config ${config.version} features xml @@ -1812,7 +1872,7 @@ org.opendaylight.controller - features-odl-protocol-framework + features-protocol-framework ${protocol-framework.version} features xml @@ -1820,7 +1880,7 @@ org.opendaylight.controller - netconf-features + features-netconf ${netconf.version} features xml @@ -1828,7 +1888,7 @@ org.opendaylight.controller - config-persister-features + features-config-persister ${config.version} features xml @@ -1852,7 +1912,7 @@ org.opendaylight.controller - mdsal-features + features-mdsal ${mdsal.version} features xml diff --git a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/osgi/ConfigManagerActivator.java b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/osgi/ConfigManagerActivator.java index 6381836af8..828fcb01e1 100644 --- a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/osgi/ConfigManagerActivator.java +++ b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/osgi/ConfigManagerActivator.java @@ -7,6 +7,14 @@ */ package org.opendaylight.controller.config.manager.impl.osgi; +import static org.opendaylight.controller.config.manager.impl.util.OsgiRegistrationUtil.registerService; +import static org.opendaylight.controller.config.manager.impl.util.OsgiRegistrationUtil.wrap; + +import java.lang.management.ManagementFactory; +import java.util.Arrays; +import java.util.List; +import javax.management.InstanceAlreadyExistsException; +import javax.management.MBeanServer; import org.opendaylight.controller.config.manager.impl.ConfigRegistryImpl; import org.opendaylight.controller.config.manager.impl.jmx.ConfigRegistryJMXRegistrator; import org.opendaylight.controller.config.manager.impl.osgi.mapping.CodecRegistryProvider; @@ -14,27 +22,19 @@ import org.opendaylight.controller.config.manager.impl.osgi.mapping.ModuleInfoBu import org.opendaylight.controller.config.manager.impl.osgi.mapping.RefreshingSCPModuleInfoRegistry; import org.opendaylight.controller.config.manager.impl.util.OsgiRegistrationUtil; import org.opendaylight.controller.config.spi.ModuleFactory; +import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy; import org.opendaylight.yangtools.sal.binding.generator.impl.ModuleInfoBackedContext; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.util.tracker.ServiceTracker; -import javax.management.InstanceAlreadyExistsException; -import javax.management.MBeanServer; -import java.lang.management.ManagementFactory; -import java.util.Arrays; -import java.util.List; - -import static org.opendaylight.controller.config.manager.impl.util.OsgiRegistrationUtil.registerService; -import static org.opendaylight.controller.config.manager.impl.util.OsgiRegistrationUtil.wrap; - public class ConfigManagerActivator implements BundleActivator { private final MBeanServer configMBeanServer = ManagementFactory.getPlatformMBeanServer(); private AutoCloseable autoCloseable; @Override - public void start(BundleContext context) { + public void start(final BundleContext context) { ModuleInfoBackedContext moduleInfoBackedContext = ModuleInfoBackedContext.create();// the inner strategy is backed by thread context cl? @@ -63,6 +63,7 @@ public class ConfigManagerActivator implements BundleActivator { bundleTracker.open(); // register config registry to OSGi + AutoCloseable clsReg = registerService(context, moduleInfoBackedContext, GeneratedClassLoadingStrategy.class); AutoCloseable configRegReg = registerService(context, configRegistry, ConfigRegistryImpl.class); // register config registry to jmx @@ -79,12 +80,12 @@ public class ConfigManagerActivator implements BundleActivator { serviceTracker.open(); List list = Arrays.asList( - codecRegistryProvider, configRegistry, wrap(bundleTracker), configRegReg, configRegistryJMXRegistrator, wrap(serviceTracker)); + codecRegistryProvider, clsReg,configRegistry, wrap(bundleTracker), configRegReg, configRegistryJMXRegistrator, wrap(serviceTracker)); autoCloseable = OsgiRegistrationUtil.aggregate(list); } @Override - public void stop(BundleContext context) throws Exception { + public void stop(final BundleContext context) throws Exception { autoCloseable.close(); } } diff --git a/opendaylight/config/config-persister-api/src/main/java/org/opendaylight/controller/config/persist/api/ConfigPusher.java b/opendaylight/config/config-persister-api/src/main/java/org/opendaylight/controller/config/persist/api/ConfigPusher.java new file mode 100644 index 0000000000..2dade8a82b --- /dev/null +++ b/opendaylight/config/config-persister-api/src/main/java/org/opendaylight/controller/config/persist/api/ConfigPusher.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.config.persist.api; + +import java.util.List; +/* + * The config pusher service pushes configs into the config subsystem + */ +public interface ConfigPusher { + + /* + * Pushes configs into the config subsystem + */ + + public void pushConfigs(List configs) throws InterruptedException; +} diff --git a/opendaylight/config/config-persister-feature-adapter/pom.xml b/opendaylight/config/config-persister-feature-adapter/pom.xml new file mode 100644 index 0000000000..7412a51425 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/pom.xml @@ -0,0 +1,74 @@ + + + + 4.0.0 + + + org.opendaylight.controller + config-subsystem + 0.2.5-SNAPSHOT + .. + + + config-persister-feature-adapter + bundle + + + + org.osgi + org.osgi.core + provided + + + org.apache.karaf.features + org.apache.karaf.features.core + ${karaf.version} + provided + + + org.opendaylight.controller + config-persister-impl + + + org.opendaylight.controller + config-persister-api + + + org.opendaylight.controller + config-persister-directory-xml-adapter + + + org.apache.felix + org.apache.felix.utils + 1.6.0 + provided + + + com.google.guava + guava + + + + + + + org.apache.felix + maven-bundle-plugin + true + + + ${project.artifactId} + ${project.version} + org.opendaylight.controller.configpusherfeature.ConfigPusherFeatureActivator + + org.apache.karaf.features.internal.model, + org.apache.felix.utils.version, + org.opendaylight.controller.configpusherfeature.internal + + + + + + + + diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/ConfigPusherFeatureActivator.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/ConfigPusherFeatureActivator.java new file mode 100644 index 0000000000..ea99579f16 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/ConfigPusherFeatureActivator.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature; + +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.opendaylight.controller.configpusherfeature.internal.ConfigPusherCustomizer; +import org.osgi.framework.BundleActivator; +import org.osgi.framework.BundleContext; +import org.osgi.util.tracker.ServiceTracker; + +public class ConfigPusherFeatureActivator implements BundleActivator { + + BundleContext bc = null; + ConfigPusherCustomizer cpc = null; + ServiceTracker cpst = null; + + public void start(BundleContext context) throws Exception { + bc = context; + cpc = new ConfigPusherCustomizer(); + cpst = new ServiceTracker(bc, ConfigPusher.class.getName(), cpc); + cpst.open(); + } + + public void stop(BundleContext context) throws Exception { + if(cpst != null) { + cpst.close(); + cpst = null; + } + if(cpc != null) { + cpc.close(); + cpc = null; + } + bc = null; + } +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/AbstractFeatureWrapper.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/AbstractFeatureWrapper.java new file mode 100644 index 0000000000..1bf2025c46 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/AbstractFeatureWrapper.java @@ -0,0 +1,214 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; + +import javax.xml.bind.JAXBException; + +import org.apache.karaf.features.BundleInfo; +import org.apache.karaf.features.Conditional; +import org.apache.karaf.features.ConfigFileInfo; +import org.apache.karaf.features.Dependency; +import org.apache.karaf.features.Feature; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +/* + * Wrap a Feature for the purposes of extracting the FeatureConfigSnapshotHolders from + * its underlying ConfigFileInfo's + * + * Delegates the the contained feature and provides additional methods. + */ +public class AbstractFeatureWrapper implements Feature { + private static final Logger logger = LoggerFactory.getLogger(AbstractFeatureWrapper.class); + protected Feature feature = null; + + protected AbstractFeatureWrapper() { + // prevent instantiation without Feature + } + + /* + * @param f Feature to wrap + */ + public AbstractFeatureWrapper(Feature f) { + Preconditions.checkNotNull(f,"FeatureWrapper requires non-null Feature in constructor"); + this.feature = f; + } + + /* + * Get FeatureConfigSnapshotHolders appropriate to feed to the config subsystem + * from the underlying Feature Config files + */ + public LinkedHashSet getFeatureConfigSnapshotHolders() throws Exception { + LinkedHashSet snapShotHolders = new LinkedHashSet(); + for(ConfigFileInfo c: getConfigurationFiles()) { + try { + snapShotHolders.add(new FeatureConfigSnapshotHolder(c,this)); + } catch (JAXBException e) { + logger.debug("{} is not a config subsystem config file",c.getFinalname()); + } + } + return snapShotHolders; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((feature == null) ? 0 : feature.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + AbstractFeatureWrapper other = (AbstractFeatureWrapper) obj; + if (feature == null) { + if (other.feature != null) + return false; + } else if (!feature.equals(other.feature)) + return false; + return true; + } + + @Override + public String toString() { + return feature.getName(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getId() + */ + public String getId() { + return feature.getId(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getName() + */ + public String getName() { + return feature.getName(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getDescription() + */ + public String getDescription() { + return feature.getDescription(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getDetails() + */ + public String getDetails() { + return feature.getDetails(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getVersion() + */ + public String getVersion() { + return feature.getVersion(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#hasVersion() + */ + public boolean hasVersion() { + return feature.hasVersion(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getResolver() + */ + public String getResolver() { + return feature.getResolver(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getInstall() + */ + public String getInstall() { + return feature.getInstall(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getDependencies() + */ + public List getDependencies() { + return feature.getDependencies(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getBundles() + */ + public List getBundles() { + return feature.getBundles(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getConfigurations() + */ + public Map> getConfigurations() { + return feature.getConfigurations(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getConfigurationFiles() + */ + public List getConfigurationFiles() { + return feature.getConfigurationFiles(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getConditional() + */ + public List getConditional() { + return feature.getConditional(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getStartLevel() + */ + public int getStartLevel() { + return feature.getStartLevel(); + } + + /** + * @return + * @see org.apache.karaf.features.Feature#getRegion() + */ + public String getRegion() { + return feature.getRegion(); + } + +} \ No newline at end of file diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ChildAwareFeatureWrapper.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ChildAwareFeatureWrapper.java new file mode 100644 index 0000000000..8d2ae68a9a --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ChildAwareFeatureWrapper.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.util.LinkedHashSet; +import java.util.List; + +import javax.xml.bind.JAXBException; + +import org.apache.felix.utils.version.VersionRange; +import org.apache.felix.utils.version.VersionTable; +import org.apache.karaf.features.Dependency; +import org.apache.karaf.features.Feature; +import org.apache.karaf.features.FeaturesService; +import org.osgi.framework.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +/* + * Wrap a Feature for the purposes of extracting the FeatureConfigSnapshotHolders from + * its underlying ConfigFileInfo's and those of its children recursively + * + * Delegates the the contained feature and provides additional methods. + */ +public class ChildAwareFeatureWrapper extends AbstractFeatureWrapper implements Feature { + private static final Logger logger = LoggerFactory.getLogger(ChildAwareFeatureWrapper.class); + private FeaturesService featuresService= null; + + protected ChildAwareFeatureWrapper(Feature f) { + // Don't use without a feature service + } + + /* + * @param f Feature to wrap + * @param s FeaturesService to look up dependencies + */ + ChildAwareFeatureWrapper(Feature f, FeaturesService s) throws Exception { + super(s.getFeature(f.getName(), f.getVersion())); + Preconditions.checkNotNull(s, "FeatureWrapper requires non-null FeatureService in constructor"); + this.featuresService = s; + } + + protected FeaturesService getFeaturesService() { + return featuresService; + } + + /* + * Get FeatureConfigSnapshotHolders appropriate to feed to the config subsystem + * from the underlying Feature Config files and those of its children recursively + */ + public LinkedHashSet getChildFeatures() throws Exception { + List dependencies = feature.getDependencies(); + LinkedHashSet childFeatures = new LinkedHashSet(); + if(dependencies != null) { + for(Dependency dependency: dependencies) { + Feature fi = extractFeatureFromDependency(dependency); + if(fi != null){ + ChildAwareFeatureWrapper wrappedFeature = new ChildAwareFeatureWrapper(fi,featuresService); + childFeatures.add(wrappedFeature); + } + } + } + return childFeatures; + } + + public LinkedHashSet getFeatureConfigSnapshotHolders() throws Exception { + LinkedHashSet snapShotHolders = new LinkedHashSet(); + for(ChildAwareFeatureWrapper c: getChildFeatures()) { + for(FeatureConfigSnapshotHolder h: c.getFeatureConfigSnapshotHolders()) { + FeatureConfigSnapshotHolder f; + try { + f = new FeatureConfigSnapshotHolder(h,this); + snapShotHolders.add(f); + } catch (JAXBException e) { + logger.debug("{} is not a config subsystem config file",h.getFileInfo().getFinalname()); + } + } + } + snapShotHolders.addAll(super.getFeatureConfigSnapshotHolders()); + return snapShotHolders; + } + + protected Feature extractFeatureFromDependency(Dependency dependency) throws Exception { + Feature[] features = featuresService.listFeatures(); + VersionRange range = org.apache.karaf.features.internal.model.Feature.DEFAULT_VERSION.equals(dependency.getVersion()) + ? VersionRange.ANY_VERSION : new VersionRange(dependency.getVersion(), true, true); + Feature fi = null; + for(Feature f: features) { + if (f.getName().equals(dependency.getName())) { + Version v = VersionTable.getVersion(f.getVersion()); + if (range.contains(v)) { + if (fi == null || VersionTable.getVersion(fi.getVersion()).compareTo(v) < 0) { + fi = f; + break; + } + } + } + } + return fi; + } + +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigFeaturesListener.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigFeaturesListener.java new file mode 100644 index 0000000000..f5f1b856ac --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigFeaturesListener.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +import org.apache.karaf.features.FeatureEvent; +import org.apache.karaf.features.FeaturesListener; +import org.apache.karaf.features.FeaturesService; +import org.apache.karaf.features.RepositoryEvent; +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ConfigFeaturesListener implements FeaturesListener, AutoCloseable { + private static final Logger logger = LoggerFactory.getLogger(ConfigFeaturesListener.class); + private static final int QUEUE_SIZE = 100; + private BlockingQueue queue = new LinkedBlockingQueue(QUEUE_SIZE); + Thread pushingThread = null; + + public ConfigFeaturesListener(ConfigPusher p, FeaturesService f) { + pushingThread = new Thread(new ConfigPushingRunnable(p, f, queue), "ConfigFeatureListener - ConfigPusher"); + pushingThread.start(); + } + + @Override + public void featureEvent(FeatureEvent event) { + queue.offer(event); + } + + @Override + public void repositoryEvent(RepositoryEvent event) { + logger.debug("Repository: " + event.getType() + " " + event.getRepository()); + } + + @Override + public void close() { + if(pushingThread != null) { + pushingThread.interrupt(); + pushingThread = null; + } + } +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPusherCustomizer.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPusherCustomizer.java new file mode 100644 index 0000000000..d33a8cba92 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPusherCustomizer.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import org.apache.karaf.features.FeaturesService; +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.osgi.framework.BundleContext; +import org.osgi.framework.ServiceReference; +import org.osgi.util.tracker.ServiceTracker; +import org.osgi.util.tracker.ServiceTrackerCustomizer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ConfigPusherCustomizer implements ServiceTrackerCustomizer, AutoCloseable { + private static final Logger logger = LoggerFactory.getLogger(ConfigPusherCustomizer.class); + private ConfigFeaturesListener configFeaturesListener = null; + private FeatureServiceCustomizer featureServiceCustomizer = null; + private ServiceTracker fsst = null; + + @Override + public ConfigPusher addingService(ServiceReference configPusherServiceReference) { + logger.trace("Got ConfigPusherCustomizer.addingService {}", configPusherServiceReference); + BundleContext bc = configPusherServiceReference.getBundle().getBundleContext(); + ConfigPusher cpService = bc.getService(configPusherServiceReference); + featureServiceCustomizer = new FeatureServiceCustomizer(cpService); + fsst = new ServiceTracker(bc, FeaturesService.class.getName(), featureServiceCustomizer); + fsst.open(); + return cpService; + } + + @Override + public void modifiedService(ServiceReference configPusherServiceReference, ConfigPusher configPusher) { + // we don't care if the properties change + } + + @Override + public void removedService(ServiceReference configPusherServiceReference, ConfigPusher configPusher) { + this.close(); + } + + @Override + public void close() { + if(fsst != null) { + fsst.close(); + fsst = null; + } + if(configFeaturesListener != null) { + configFeaturesListener.close(); + configFeaturesListener = null; + } + if(featureServiceCustomizer != null) { + featureServiceCustomizer.close(); + featureServiceCustomizer = null; + } + } +} \ No newline at end of file diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPushingRunnable.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPushingRunnable.java new file mode 100644 index 0000000000..06c5c920f4 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/ConfigPushingRunnable.java @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; + +import org.apache.karaf.features.Feature; +import org.apache.karaf.features.FeatureEvent; +import org.apache.karaf.features.FeatureEvent.EventType; +import org.apache.karaf.features.FeaturesService; +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.LinkedHashMultimap; + +public class ConfigPushingRunnable implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(ConfigPushingRunnable.class); + private static final int POLL_TIME = 1; + private BlockingQueue queue; + private FeatureConfigPusher configPusher; + public ConfigPushingRunnable(ConfigPusher p, FeaturesService f,BlockingQueue q) { + queue = q; + configPusher = new FeatureConfigPusher(p, f); + } + + @Override + public void run() { + List toInstall = new ArrayList(); + FeatureEvent event; + boolean interuppted = false; + while(true) { + try { + if(!interuppted) { + if(toInstall.isEmpty()) { + event = queue.take(); + } else { + event = queue.poll(POLL_TIME, TimeUnit.MILLISECONDS); + } + if(event != null && event.getFeature() !=null) { + processFeatureEvent(event,toInstall); + } + } else if(toInstall.isEmpty()) { + logger.error("ConfigPushingRunnable - exiting"); + return; + } + } catch (InterruptedException e) { + logger.error("ConfigPushingRunnable - interupted"); + interuppted = true; + } catch (Exception e) { + logger.error("Exception while processing features {}", e); + } + } + } + + protected void processFeatureEvent(FeatureEvent event, List toInstall) throws InterruptedException, Exception { + if(event.getType() == EventType.FeatureInstalled) { + toInstall.add(event.getFeature()); + LinkedHashMultimap result = configPusher.pushConfigs(toInstall); + toInstall.removeAll(result.keySet()); + } else if(event.getType() == EventType.FeatureUninstalled) { + toInstall.remove(event.getFeature()); + } + } + + protected void logPushResult(LinkedHashMultimap results) { + for(Feature f:results.keySet()) { + logger.info("Pushed configs for feature {} {}",f,results.get(f)); + } + } +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigPusher.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigPusher.java new file mode 100644 index 0000000000..1c094ad2dc --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigPusher.java @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.apache.karaf.features.Feature; +import org.apache.karaf.features.FeaturesService; +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.opendaylight.controller.config.persist.api.ConfigSnapshotHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.LinkedHashMultimap; + +/* + * Simple class to push configs to the config subsystem from Feature's configfiles + */ +public class FeatureConfigPusher { + private static final Logger logger = LoggerFactory.getLogger(FeatureConfigPusher.class); + private FeaturesService featuresService = null; + private ConfigPusher pusher = null; + /* + * A LinkedHashSet (to preserve order and insure uniqueness) of the pushedConfigs + * This is used to prevent pushing duplicate configs if a Feature is in multiple dependency + * chains. Also, preserves the *original* Feature chain for which we pushed the config. + * (which is handy for logging). + */ + LinkedHashSet pushedConfigs = new LinkedHashSet(); + /* + * LinkedHashMultimap to track which configs we pushed for each Feature installation + * For future use + */ + LinkedHashMultimap feature2configs = LinkedHashMultimap.create(); + + /* + * @param p - ConfigPusher to push ConfigSnapshotHolders + */ + public FeatureConfigPusher(ConfigPusher p, FeaturesService f) { + pusher = p; + featuresService = f; + } + /* + * Push config files from Features to config subsystem + * @param features - list of Features to extract config files from recursively and push + * to the config subsystem + * + * @return A LinkedHashMultimap of Features to the FeatureConfigSnapshotHolder actually pushed + * If a Feature is not in the returned LinkedHashMultimap then we couldn't push its configs + * (Ususally because it was not yet installed) + */ + public LinkedHashMultimap pushConfigs(List features) throws Exception, InterruptedException { + LinkedHashMultimap pushedFeatures = LinkedHashMultimap.create(); + for(Feature feature: features) { + LinkedHashSet configSnapShots = pushConfig(feature); + if(!configSnapShots.isEmpty()) { + pushedFeatures.putAll(feature,configSnapShots); + } + } + return pushedFeatures; + } + + private LinkedHashSet pushConfig(Feature feature) throws Exception, InterruptedException { + LinkedHashSet configs = new LinkedHashSet(); + if(isInstalled(feature)) { + ChildAwareFeatureWrapper wrappedFeature = new ChildAwareFeatureWrapper(feature,featuresService); + configs = wrappedFeature.getFeatureConfigSnapshotHolders(); + if(!configs.isEmpty()) { + configs = pushConfig(configs); + feature2configs.putAll(feature, configs); + } + } + return configs; + } + + private boolean isInstalled(Feature feature) { + List installedFeatures = Arrays.asList(featuresService.listInstalledFeatures()); + return installedFeatures.contains(feature); + } + + private LinkedHashSet pushConfig(LinkedHashSet configs) throws InterruptedException { + LinkedHashSet configsToPush = new LinkedHashSet(configs); + configsToPush.removeAll(pushedConfigs); + if(!configsToPush.isEmpty()) { + pusher.pushConfigs(new ArrayList(configsToPush)); + pushedConfigs.addAll(configsToPush); + } + LinkedHashSet configsPushed = new LinkedHashSet(pushedConfigs); + configsPushed.retainAll(configs); + return configsPushed; + } +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigSnapshotHolder.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigSnapshotHolder.java new file mode 100644 index 0000000000..d1a92ebe7f --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureConfigSnapshotHolder.java @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import java.io.File; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import java.util.SortedSet; + +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Unmarshaller; + +import org.apache.karaf.features.ConfigFileInfo; +import org.apache.karaf.features.Feature; +import org.opendaylight.controller.config.persist.api.ConfigSnapshotHolder; +import org.opendaylight.controller.config.persist.storage.file.xml.model.ConfigSnapshot; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; + +/* + * A ConfigSnapshotHolder that can track all the additional information + * relavent to the fact we are getting these from a Feature. + * + * Includes tracking the 'featureChain' - an reverse ordered list of the dependency + * graph of features that caused us to push this FeatureConfigSnapshotHolder. + * So if A -> B -> C, then the feature chain would be C -> B -> A + */ +public class FeatureConfigSnapshotHolder implements ConfigSnapshotHolder { + private ConfigSnapshot unmarshalled = null; + private ConfigFileInfo fileInfo = null; + private List featureChain = new ArrayList(); + + /* + * @param holder - FeatureConfigSnapshotHolder that we + * @param feature - new + */ + public FeatureConfigSnapshotHolder(final FeatureConfigSnapshotHolder holder, final Feature feature) throws JAXBException { + this(holder.fileInfo,holder.getFeature()); + this.featureChain.add(feature); + } + + /* + * Create a FeatureConfigSnapshotHolder for a given ConfigFileInfo and record the associated + * feature we are creating it from. + * @param fileInfo - ConfigFileInfo to read into the ConfigSnapshot + * @param feature - Feature the ConfigFileInfo was attached to + */ + public FeatureConfigSnapshotHolder(final ConfigFileInfo fileInfo, final Feature feature) throws JAXBException { + Preconditions.checkNotNull(fileInfo); + Preconditions.checkNotNull(fileInfo.getFinalname()); + Preconditions.checkNotNull(feature); + this.fileInfo = fileInfo; + this.featureChain.add(feature); + JAXBContext jaxbContext = JAXBContext.newInstance(ConfigSnapshot.class); + Unmarshaller um = jaxbContext.createUnmarshaller(); + File file = new File(fileInfo.getFinalname()); + unmarshalled = ((ConfigSnapshot) um.unmarshal(file)); + } + /* + * (non-Javadoc) + * @see java.lang.Object#hashCode() + * + * We really care most about the underlying ConfigShapshot, so compute hashcode on that + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((unmarshalled != null && unmarshalled.getConfigSnapshot() == null) ? 0 : unmarshalled.getConfigSnapshot().hashCode()); + return result; + } + /* + * (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + * * + * We really care most about the underlying ConfigShapshot, so compute equality on that + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + FeatureConfigSnapshotHolder fcsh = (FeatureConfigSnapshotHolder)obj; + if(this.unmarshalled.getConfigSnapshot().equals(fcsh.unmarshalled.getConfigSnapshot())) { + return true; + } + return false; + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + Path p = Paths.get(fileInfo.getFinalname()); + b.append(p.getFileName()) + .append("(") + .append(getCauseFeature()) + .append(",") + .append(getFeature()) + .append(")"); + return b.toString(); + + } + + @Override + public String getConfigSnapshot() { + return unmarshalled.getConfigSnapshot(); + } + + @Override + public SortedSet getCapabilities() { + return unmarshalled.getCapabilities(); + } + + public ConfigFileInfo getFileInfo() { + return fileInfo; + } + + /* + * @returns The original feature to which the ConfigFileInfo was attached + * Example: + * A -> B -> C, ConfigFileInfo Foo is attached to C. + * feature:install A + * thus C is the 'Feature' Foo was attached. + */ + public Feature getFeature() { + return featureChain.get(0); + } + + /* + * @return The dependency chain of the features that caused the ConfigFileInfo to be pushed in reverse order. + * Example: + * A -> B -> C, ConfigFileInfo Foo is attached to C. + * The returned list is + * [C,B,A] + */ + public ImmutableList getFeatureChain() { + return ImmutableList.copyOf(Lists.reverse(featureChain)); + } + + /* + * @return The feature the installation of which was the root cause + * of this pushing of the ConfigFileInfo. + * Example: + * A -> B -> C, ConfigFileInfo Foo is attached to C. + * feature:install A + * this A is the 'Cause' of the installation of Foo. + */ + public Feature getCauseFeature() { + return Iterables.getLast(featureChain); + } +} diff --git a/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureServiceCustomizer.java b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureServiceCustomizer.java new file mode 100644 index 0000000000..e72c8278e5 --- /dev/null +++ b/opendaylight/config/config-persister-feature-adapter/src/main/java/org/opendaylight/controller/configpusherfeature/internal/FeatureServiceCustomizer.java @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.configpusherfeature.internal; + +import org.apache.karaf.features.FeaturesListener; +import org.apache.karaf.features.FeaturesService; +import org.opendaylight.controller.config.persist.api.ConfigPusher; +import org.osgi.framework.BundleContext; +import org.osgi.framework.ServiceReference; +import org.osgi.framework.ServiceRegistration; +import org.osgi.util.tracker.ServiceTrackerCustomizer; + +public class FeatureServiceCustomizer implements ServiceTrackerCustomizer, AutoCloseable { + private ConfigPusher configPusher = null; + private ConfigFeaturesListener configFeaturesListener = null; + private ServiceRegistration registration; + + FeatureServiceCustomizer(ConfigPusher c) { + configPusher = c; + } + + + @Override + public FeaturesService addingService(ServiceReference reference) { + BundleContext bc = reference.getBundle().getBundleContext(); + FeaturesService featureService = bc.getService(reference); + configFeaturesListener = new ConfigFeaturesListener(configPusher,featureService); + registration = bc.registerService(FeaturesListener.class.getCanonicalName(), configFeaturesListener, null); + return featureService; + } + + @Override + public void modifiedService(ServiceReference reference, + FeaturesService service) { + // we don't care if the properties change + + } + + @Override + public void removedService(ServiceReference reference, + FeaturesService service) { + close(); + } + + @Override + public void close() { + if(registration != null) { + registration.unregister(); + registration = null; + } + } + +} diff --git a/opendaylight/config/pom.xml b/opendaylight/config/pom.xml index 343d13e9c1..b8ad26116a 100644 --- a/opendaylight/config/pom.xml +++ b/opendaylight/config/pom.xml @@ -23,6 +23,7 @@ config-util config-persister-api config-persister-file-xml-adapter + config-persister-feature-adapter yang-jmx-generator yang-jmx-generator-plugin yang-test diff --git a/opendaylight/distribution/opendaylight-karaf/pom.xml b/opendaylight/distribution/opendaylight-karaf/pom.xml index 5effbb09fc..b3c3f20ba8 100644 --- a/opendaylight/distribution/opendaylight-karaf/pom.xml +++ b/opendaylight/distribution/opendaylight-karaf/pom.xml @@ -61,14 +61,6 @@ kar runtime - - org.opendaylight.controller - config-netty-features - features - xml - runtime - - org.opendaylight.controller @@ -89,7 +81,14 @@ org.opendaylight.controller - mdsal-features + features-mdsal + features + xml + runtime + + + org.opendaylight.controller + features-flow features xml runtime diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/00-netty.xml b/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/00-netty.xml deleted file mode 100644 index 2365c700f9..0000000000 --- a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/00-netty.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - urn:opendaylight:params:xml:ns:yang:controller:netty?module=netty&revision=2013-11-19 - urn:opendaylight:params:xml:ns:yang:controller:netty:eventexecutor?module=netty-event-executor&revision=2013-11-12 - urn:opendaylight:params:xml:ns:yang:controller:netty:threadgroup?module=threadgroup&revision=2013-11-07 - urn:opendaylight:params:xml:ns:yang:controller:netty:timer?module=netty-timer&revision=2013-11-19 - - - - - - - netty:netty-threadgroup-fixed - global-boss-group - - - netty:netty-threadgroup-fixed - global-worker-group - - - netty:netty-hashed-wheel-timer - global-timer - - - netty:netty-global-event-executor - singleton - - - - - - netty:netty-threadgroup - - global-boss-group - /modules/module[type='netty-threadgroup-fixed'][name='global-boss-group'] - - - global-worker-group - /modules/module[type='netty-threadgroup-fixed'][name='global-worker-group'] - - - - netty:netty-event-executor - - global-event-executor - /modules/module[type='netty-global-event-executor'][name='singleton'] - - - - netty:netty-timer - - global-timer - /modules/module[type='netty-hashed-wheel-timer'][name='global-timer'] - - - - - - - diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/01-md-sal.xml b/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/01-md-sal.xml deleted file mode 100644 index 619ab06d8d..0000000000 --- a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/01-md-sal.xml +++ /dev/null @@ -1,203 +0,0 @@ - - - - - - - - - prefix:schema-service-singleton - yang-schema-service - - - - - prefix:hash-map-data-store - hash-map-data-store - - - - - - - prefix:dom-broker-impl - dom-broker - - - dom:dom-data-store - - hash-map-data-store - - - - - - - - - prefix:binding-broker-impl - binding-broker-impl - - binding:binding-notification-service - binding-notification-broker - - - binding:binding-data-broker - binding-data-broker - - - - prefix:runtime-generated-mapping - runtime-mapping-singleton - - - prefix:binding-notification-broker - binding-notification-broker - - - - prefix:binding-data-broker - binding-data-broker - - dom:dom-broker-osgi-registry - dom-broker - - - binding:binding-dom-mapping-service - runtime-mapping-singleton - - - - - - - - - - dom:schema-service - - yang-schema-service - /modules/module[type='schema-service-singleton'][name='yang-schema-service'] - - - - binding:binding-notification-service - - binding-notification-broker - /modules/module[type='binding-notification-broker'][name='binding-notification-broker'] - - - - - dom:dom-data-store - - hash-map-data-store - /modules/module[type='hash-map-data-store'][name='hash-map-data-store'] - - - - - - - - binding:binding-broker-osgi-registry - - binding-osgi-broker - /modules/module[type='binding-broker-impl'][name='binding-broker-impl'] - - - - binding:binding-rpc-registry - - binding-rpc-broker - /modules/module[type='binding-broker-impl'][name='binding-broker-impl'] - - - - binding-impl:binding-dom-mapping-service - - runtime-mapping-singleton - /modules/module[type='runtime-generated-mapping'][name='runtime-mapping-singleton'] - - - - dom:dom-broker-osgi-registry - - dom-broker - /modules/module[type='dom-broker-impl'][name='dom-broker'] - - - - - binding:binding-data-broker - - binding-data-broker - - /modules/module[type='binding-data-broker'][name='binding-data-broker'] - - - - - - - - - - - - urn:opendaylight:params:xml:ns:yang:controller:netty:eventexecutor?module=netty-event-executor&revision=2013-11-12 - urn:opendaylight:params:xml:ns:yang:controller:threadpool?module=threadpool&revision=2013-04-09 - urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding?module=opendaylight-md-sal-binding&revision=2013-10-28 - urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom?module=opendaylight-md-sal-dom&revision=2013-10-28 - urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl?module=opendaylight-sal-binding-broker-impl&revision=2013-10-28 - urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl?module=opendaylight-sal-dom-broker-impl&revision=2013-10-28 - urn:opendaylight:params:xml:ns:yang:controller:md:sal:common?module=opendaylight-md-sal-common&revision=2013-10-28 - - diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/03-toaster-sample.xml b/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/03-toaster-sample.xml deleted file mode 100644 index c481485c92..0000000000 --- a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/initial/03-toaster-sample.xml +++ /dev/null @@ -1,73 +0,0 @@ - - - - - - - prefix:toaster-provider-impl - - toaster-provider-impl - - - binding:binding-rpc-registry - binding-rpc-broker - - - - - binding:binding-notification-service - - binding-notification-broker - - - - - - prefix:toaster-consumer-impl - - toaster-consumer-impl - - - binding:binding-rpc-registry - binding-rpc-broker - - - - - binding:binding-notification-service - - binding-notification-broker - - - - - - - toaster:toaster-provider - - toaster-provider - /modules/module[type='toaster-provider-impl'][name='toaster-provider-impl'] - - - - toaster:toaster-consumer - - toaster-consumer - /modules/module[type='toaster-consumer-impl'][name='toaster-consumer-impl'] - - - - - - - - - urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding?module=opendaylight-md-sal-binding&revision=2013-10-28 - urn:opendaylight:params:xml:ns:yang:controller:config:toaster-consumer?module=toaster-consumer&revision=2014-01-31 - urn:opendaylight:params:xml:ns:yang:controller:config:toaster-consumer:impl?module=toaster-consumer-impl&revision=2014-01-31 - urn:opendaylight:params:xml:ns:yang:controller:config:toaster-provider?module=toaster-provider&revision=2014-01-31 - urn:opendaylight:params:xml:ns:yang:controller:config:toaster-provider:impl?module=toaster-provider-impl&revision=2014-01-31 - - - - diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml b/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml index d1a5dcc416..ed659bf603 100644 --- a/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml +++ b/opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml @@ -2,7 +2,7 @@ - %date{"yyyy-MM-dd HH:mm:ss.SSS z"} [%thread] %-5level %logger{36} - %msg%n + %date{"yyyy-MM-dd HH:mm:ss.SSS z"} [%thread] %-5level %logger{36} %X{akkaSource} - %msg%n @@ -76,6 +76,10 @@ + + + + + + + + - - - prefix:inmemory-operational-datastore-provider operational-store-service @@ -97,20 +75,11 @@ config-dom-store-spi:config-dom-datastore config-store-service - - operational-dom-store-spi:operational-dom-datastore operational-store-service - - - @@ -147,17 +116,6 @@ - - @@ -168,28 +126,6 @@ - - - - - - - config-dom-store-spi:config-dom-datastore diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftActor.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftActor.java index 0a979d24ee..ae8b6fe8e3 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftActor.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftActor.java @@ -295,6 +295,22 @@ public abstract class RaftActor extends UntypedPersistentActor { return currentBehavior.state(); } + protected ReplicatedLogEntry getLastLogEntry() { + return replicatedLog.last(); + } + + protected Long getCurrentTerm(){ + return context.getTermInformation().getCurrentTerm(); + } + + protected Long getCommitIndex(){ + return context.getCommitIndex(); + } + + protected Long getLastApplied(){ + return context.getLastApplied(); + } + /** * setPeerAddress sets the address of a known peer at a later time. *

@@ -602,7 +618,7 @@ public abstract class RaftActor extends UntypedPersistentActor { } @Override public void update(long currentTerm, String votedFor) { - LOG.info("Set currentTerm={}, votedFor={}", currentTerm, votedFor); + LOG.debug("Set currentTerm={}, votedFor={}", currentTerm, votedFor); this.currentTerm = currentTerm; this.votedFor = votedFor; diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java index 0a553b40fd..251a13d583 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java @@ -127,6 +127,9 @@ public abstract class AbstractRaftActorBehavior implements RaftActorBehavior { protected RaftState requestVote(ActorRef sender, RequestVote requestVote) { + + context.getLogger().debug(requestVote.toString()); + boolean grantVote = false; // Reply false if term < currentTerm (§5.1) @@ -301,7 +304,7 @@ public abstract class AbstractRaftActorBehavior implements RaftActorBehavior { * * @param index a log index that is known to be committed */ - protected void applyLogToStateMachine(long index) { + protected void applyLogToStateMachine(final long index) { // Now maybe we apply to the state machine for (long i = context.getLastApplied() + 1; i < index + 1; i++) { @@ -326,6 +329,7 @@ public abstract class AbstractRaftActorBehavior implements RaftActorBehavior { } // Send a local message to the local RaftActor (it's derived class to be // specific to apply the log to it's index) + context.getLogger().debug("Setting last applied to {}", index); context.setLastApplied(index); } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java index c125bd32b6..bb1927ef23 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java @@ -81,7 +81,7 @@ public class Candidate extends AbstractRaftActorBehavior { @Override protected RaftState handleAppendEntries(ActorRef sender, AppendEntries appendEntries) { - context.getLogger().info("Candidate: Received {}", appendEntries.toString()); + context.getLogger().debug(appendEntries.toString()); return state(); } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Follower.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Follower.java index c8cd41dfa1..54e0494b9d 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Follower.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Follower.java @@ -42,7 +42,7 @@ public class Follower extends AbstractRaftActorBehavior { if(appendEntries.getEntries() != null && appendEntries.getEntries().size() > 0) { context.getLogger() - .info("Follower: Received {}", appendEntries.toString()); + .debug(appendEntries.toString()); } // TODO : Refactor this method into a bunch of smaller methods diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java index a50666233c..234f9db664 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java @@ -19,7 +19,6 @@ import org.opendaylight.controller.cluster.raft.FollowerLogInformationImpl; import org.opendaylight.controller.cluster.raft.RaftActorContext; import org.opendaylight.controller.cluster.raft.RaftState; import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry; -import org.opendaylight.controller.cluster.raft.base.messages.ApplyState; import org.opendaylight.controller.cluster.raft.base.messages.Replicate; import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat; import org.opendaylight.controller.cluster.raft.base.messages.SendInstallSnapshot; @@ -121,7 +120,7 @@ public class Leader extends AbstractRaftActorBehavior { @Override protected RaftState handleAppendEntries(ActorRef sender, AppendEntries appendEntries) { - context.getLogger().info("Leader: Received {}", appendEntries.toString()); + context.getLogger().debug(appendEntries.toString()); return state(); } @@ -131,7 +130,7 @@ public class Leader extends AbstractRaftActorBehavior { if(! appendEntriesReply.isSuccess()) { context.getLogger() - .info("Leader: Received {}", appendEntriesReply.toString()); + .debug(appendEntriesReply.toString()); } // Update the FollowerLogInformation @@ -264,26 +263,18 @@ public class Leader extends AbstractRaftActorBehavior { context.getLogger().debug("Replicate message " + logIndex); + // Create a tracker entry we will use this later to notify the + // client actor + trackerList.add( + new ClientRequestTrackerImpl(replicate.getClientActor(), + replicate.getIdentifier(), + logIndex) + ); + if (followers.size() == 0) { - context.setCommitIndex( - replicate.getReplicatedLogEntry().getIndex()); - - context.getActor() - .tell(new ApplyState(replicate.getClientActor(), - replicate.getIdentifier(), - replicate.getReplicatedLogEntry()), - context.getActor() - ); + context.setCommitIndex(logIndex); + applyLogToStateMachine(logIndex); } else { - - // Create a tracker entry we will use this later to notify the - // client actor - trackerList.add( - new ClientRequestTrackerImpl(replicate.getClientActor(), - replicate.getIdentifier(), - logIndex) - ); - sendAppendEntries(); } } @@ -303,12 +294,7 @@ public class Leader extends AbstractRaftActorBehavior { List entries = Collections.emptyList(); if (context.getReplicatedLog().isPresent(nextIndex)) { - // TODO: Instead of sending all entries from nextIndex - // only send a fixed number of entries to each follower - // This is to avoid the situation where there are a lot of - // entries to install for a fresh follower or to a follower - // that has fallen too far behind with the log but yet is not - // eligible to receive a snapshot + // FIXME : Sending one entry at a time entries = context.getReplicatedLog().getFrom(nextIndex, 1); } diff --git a/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java index d33b33925b..17c22a134a 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java +++ b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java @@ -8,6 +8,7 @@ import org.junit.Test; import org.opendaylight.controller.cluster.raft.MockRaftActorContext; import org.opendaylight.controller.cluster.raft.RaftActorContext; import org.opendaylight.controller.cluster.raft.RaftState; +import org.opendaylight.controller.cluster.raft.ReplicatedLogImplEntry; import org.opendaylight.controller.cluster.raft.base.messages.ApplyState; import org.opendaylight.controller.cluster.raft.base.messages.Replicate; import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat; @@ -154,18 +155,25 @@ public class LeaderTest extends AbstractRaftActorBehaviorTest { MockRaftActorContext actorContext = new MockRaftActorContext("test", getSystem(), raftActor); + actorContext.getReplicatedLog().removeFrom(0); + + actorContext.getReplicatedLog().append(new ReplicatedLogImplEntry(0, 1, + new MockRaftActorContext.MockPayload("foo"))); + + ReplicatedLogImplEntry entry = + new ReplicatedLogImplEntry(1, 1, + new MockRaftActorContext.MockPayload("foo")); + + actorContext.getReplicatedLog().append(entry); + Leader leader = new Leader(actorContext); RaftState raftState = leader - .handleMessage(senderActor, new Replicate(null, "state-id", - new MockRaftActorContext.MockReplicatedLogEntry(1, - 100, - new MockRaftActorContext.MockPayload("foo")) - )); + .handleMessage(senderActor, new Replicate(null, "state-id",entry)); // State should not change assertEquals(RaftState.Leader, raftState); - assertEquals(100, actorContext.getCommitIndex()); + assertEquals(1, actorContext.getCommitIndex()); final String out = new ExpectMsg(duration("1 seconds"), diff --git a/opendaylight/md-sal/sal-binding-broker/src/main/java/org/opendaylight/controller/md/sal/binding/impl/AbstractReadWriteTransaction.java b/opendaylight/md-sal/sal-binding-broker/src/main/java/org/opendaylight/controller/md/sal/binding/impl/AbstractReadWriteTransaction.java index 5ced7bae9f..8fbc118a16 100644 --- a/opendaylight/md-sal/sal-binding-broker/src/main/java/org/opendaylight/controller/md/sal/binding/impl/AbstractReadWriteTransaction.java +++ b/opendaylight/md-sal/sal-binding-broker/src/main/java/org/opendaylight/controller/md/sal/binding/impl/AbstractReadWriteTransaction.java @@ -7,22 +7,19 @@ */ package org.opendaylight.controller.md.sal.binding.impl; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.ExecutionException; - import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; +import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationException; import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationOperation; import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction; import org.opendaylight.yangtools.yang.binding.InstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument; -import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Optional; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; public class AbstractReadWriteTransaction extends AbstractWriteTransaction { @@ -50,15 +47,15 @@ public class AbstractReadWriteTransaction extends AbstractWriteTransaction> d; + final Boolean exists; try { - d = getDelegate().read(store, currentPath).get(); - } catch (InterruptedException | ExecutionException e) { + exists = getDelegate().exists(store, currentPath).checkedGet(); + } catch (ReadFailedException e) { LOG.error("Failed to read pre-existing data from store {} path {}", store, currentPath, e); throw new IllegalStateException("Failed to read pre-existing data", e); } - if (!d.isPresent() && iterator.hasNext()) { + if (!exists && iterator.hasNext()) { getDelegate().merge(store, currentPath, currentOp.createDefault(currentArg)); } } diff --git a/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/impl/test/ForwardedBackwardsCompatibleDataBrokerTest.java b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/impl/test/ForwardedBackwardsCompatibleDataBrokerTest.java new file mode 100644 index 0000000000..f91e356b8e --- /dev/null +++ b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/impl/test/ForwardedBackwardsCompatibleDataBrokerTest.java @@ -0,0 +1,72 @@ +package org.opendaylight.controller.md.sal.binding.impl.test; + +import org.junit.Test; +import org.opendaylight.controller.md.sal.binding.impl.ForwardedBackwardsCompatibleDataBroker; +import org.opendaylight.controller.md.sal.binding.test.AbstractSchemaAwareTest; +import org.opendaylight.controller.md.sal.binding.test.DataBrokerTestCustomizer; +import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker; +import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction; +import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.Top; +import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList; +import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelListBuilder; +import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelListKey; +import org.opendaylight.yangtools.yang.binding.DataObject; +import org.opendaylight.yangtools.yang.binding.InstanceIdentifier; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; + +import java.util.concurrent.ExecutionException; + +import static junit.framework.TestCase.assertNotNull; + +public class ForwardedBackwardsCompatibleDataBrokerTest extends + AbstractSchemaAwareTest { + + private DataBrokerTestCustomizer testCustomizer; + private ForwardedBackwardsCompatibleDataBroker dataBroker; + private DOMDataBroker domBroker; + + private static final InstanceIdentifier TOP_PATH = InstanceIdentifier.create(Top.class); + private static final TopLevelListKey TOP_LIST_KEY = new TopLevelListKey("foo"); + private static final InstanceIdentifier NODE_PATH = TOP_PATH.child(TopLevelList.class, TOP_LIST_KEY); + private static final TopLevelList NODE = new TopLevelListBuilder().setKey(TOP_LIST_KEY).build(); + + protected DataBrokerTestCustomizer createDataBrokerTestCustomizer() { + return new DataBrokerTestCustomizer(); + } + + @Override + protected void setupWithSchema(final SchemaContext context) { + testCustomizer = createDataBrokerTestCustomizer(); + + domBroker = testCustomizer.createDOMDataBroker(); + dataBroker = testCustomizer.createBackwardsCompatibleDataBroker(); + testCustomizer.updateSchema(context); + } + + + /** + * The purpose of this test is to exercise the backwards compatible broker + *

+ * This test tries to execute the code which ensures that the parents + * for a given node get automatically created. + * + * @see org.opendaylight.controller.md.sal.binding.impl.AbstractReadWriteTransaction#ensureParentsByMerge(org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType, org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, org.opendaylight.yangtools.yang.binding.InstanceIdentifier) + */ + @Test + public void test() throws InterruptedException, ExecutionException { + DataModificationTransaction writeTx = + dataBroker.beginTransaction(); + + writeTx.putOperationalData(NODE_PATH, NODE); + + writeTx.commit(); + + // TOP_PATH should exist as it is the parent of NODE_PATH + DataObject object = dataBroker.readOperationalData(TOP_PATH); + + assertNotNull(object); + + } + + +} diff --git a/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/test/DataBrokerTestCustomizer.java b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/test/DataBrokerTestCustomizer.java index e0f6f3546f..60eec55ca5 100644 --- a/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/test/DataBrokerTestCustomizer.java +++ b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/md/sal/binding/test/DataBrokerTestCustomizer.java @@ -10,6 +10,7 @@ package org.opendaylight.controller.md.sal.binding.test; import javassist.ClassPool; import org.opendaylight.controller.md.sal.binding.api.DataBroker; +import org.opendaylight.controller.md.sal.binding.impl.ForwardedBackwardsCompatibleDataBroker; import org.opendaylight.controller.md.sal.binding.impl.ForwardedBindingDataBroker; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker; @@ -46,13 +47,15 @@ public class DataBrokerTestCustomizer { } public DOMStore createConfigurationDatastore() { - InMemoryDOMDataStore store = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore store = new InMemoryDOMDataStore("CFG", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); schemaService.registerSchemaContextListener(store); return store; } public DOMStore createOperationalDatastore() { - InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); schemaService.registerSchemaContextListener(store); return store; } @@ -69,6 +72,11 @@ public class DataBrokerTestCustomizer { return new ForwardedBindingDataBroker(getDOMDataBroker(), getMappingService(), getSchemaService()); } + public ForwardedBackwardsCompatibleDataBroker createBackwardsCompatibleDataBroker() { + return new ForwardedBackwardsCompatibleDataBroker(getDOMDataBroker(), getMappingService(), getSchemaService(), MoreExecutors.sameThreadExecutor()); + } + + private SchemaService getSchemaService() { return schemaService; } diff --git a/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/sal/binding/test/util/BindingTestContext.java b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/sal/binding/test/util/BindingTestContext.java index deb4a8aeca..fef5715f50 100644 --- a/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/sal/binding/test/util/BindingTestContext.java +++ b/opendaylight/md-sal/sal-binding-broker/src/test/java/org/opendaylight/controller/sal/binding/test/util/BindingTestContext.java @@ -63,6 +63,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.MutableClassToInstanceMap; import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; @Beta public class BindingTestContext implements AutoCloseable { @@ -133,8 +134,10 @@ public class BindingTestContext implements AutoCloseable { public void startNewDomDataBroker() { checkState(executor != null, "Executor needs to be set"); - InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", executor); - InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", executor); + InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", executor, + MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", executor, + MoreExecutors.sameThreadExecutor()); newDatastores = ImmutableMap.builder() .put(LogicalDatastoreType.OPERATIONAL, operStore) .put(LogicalDatastoreType.CONFIGURATION, configStore) diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/PathUtils.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/PathUtils.java index cf5174319d..1dd0f3b827 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/PathUtils.java +++ b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/PathUtils.java @@ -12,18 +12,19 @@ package org.opendaylight.controller.cluster.datastore.node.utils; public class PathUtils { public static String getParentPath(String currentElementPath){ - String parentPath = ""; + StringBuilder parentPath = new StringBuilder(); if(currentElementPath != null){ String[] parentPaths = currentElementPath.split("/"); if(parentPaths.length > 2){ for(int i=0;i 0){ - parentPath += "/" + parentPaths[i]; + parentPath.append("/"); + parentPath.append(parentPaths[i]); } } } } - return parentPath; + return parentPath.toString(); } } diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/protobuff/messages/transaction/ShardTransactionMessages.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/protobuff/messages/transaction/ShardTransactionMessages.java index 33ac9f6ca1..bdd66d3aba 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/protobuff/messages/transaction/ShardTransactionMessages.java +++ b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/protobuff/messages/transaction/ShardTransactionMessages.java @@ -6579,6 +6579,933 @@ public final class ShardTransactionMessages { // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.MergeDataReply) } + public interface DataExistsOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + boolean hasInstanceIdentifierPathArguments(); + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier getInstanceIdentifierPathArguments(); + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder getInstanceIdentifierPathArgumentsOrBuilder(); + } + /** + * Protobuf type {@code org.opendaylight.controller.mdsal.DataExists} + */ + public static final class DataExists extends + com.google.protobuf.GeneratedMessage + implements DataExistsOrBuilder { + // Use DataExists.newBuilder() to construct. + private DataExists(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DataExists(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DataExists defaultInstance; + public static DataExists getDefaultInstance() { + return defaultInstance; + } + + public DataExists getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DataExists( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = instanceIdentifierPathArguments_.toBuilder(); + } + instanceIdentifierPathArguments_ = input.readMessage(org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(instanceIdentifierPathArguments_); + instanceIdentifierPathArguments_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExists_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DataExists parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DataExists(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + public static final int INSTANCEIDENTIFIERPATHARGUMENTS_FIELD_NUMBER = 1; + private org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier instanceIdentifierPathArguments_; + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public boolean hasInstanceIdentifierPathArguments() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier getInstanceIdentifierPathArguments() { + return instanceIdentifierPathArguments_; + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder getInstanceIdentifierPathArgumentsOrBuilder() { + return instanceIdentifierPathArguments_; + } + + private void initFields() { + instanceIdentifierPathArguments_ = org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasInstanceIdentifierPathArguments()) { + memoizedIsInitialized = 0; + return false; + } + if (!getInstanceIdentifierPathArguments().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, instanceIdentifierPathArguments_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, instanceIdentifierPathArguments_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.opendaylight.controller.mdsal.DataExists} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExists_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.Builder.class); + } + + // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getInstanceIdentifierPathArgumentsFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (instanceIdentifierPathArgumentsBuilder_ == null) { + instanceIdentifierPathArguments_ = org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance(); + } else { + instanceIdentifierPathArgumentsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor; + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists getDefaultInstanceForType() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.getDefaultInstance(); + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists build() { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists buildPartial() { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (instanceIdentifierPathArgumentsBuilder_ == null) { + result.instanceIdentifierPathArguments_ = instanceIdentifierPathArguments_; + } else { + result.instanceIdentifierPathArguments_ = instanceIdentifierPathArgumentsBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists) { + return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists other) { + if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists.getDefaultInstance()) return this; + if (other.hasInstanceIdentifierPathArguments()) { + mergeInstanceIdentifierPathArguments(other.getInstanceIdentifierPathArguments()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasInstanceIdentifierPathArguments()) { + + return false; + } + if (!getInstanceIdentifierPathArguments().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExists) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + private org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier instanceIdentifierPathArguments_ = org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder> instanceIdentifierPathArgumentsBuilder_; + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public boolean hasInstanceIdentifierPathArguments() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier getInstanceIdentifierPathArguments() { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + return instanceIdentifierPathArguments_; + } else { + return instanceIdentifierPathArgumentsBuilder_.getMessage(); + } + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public Builder setInstanceIdentifierPathArguments(org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier value) { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + instanceIdentifierPathArguments_ = value; + onChanged(); + } else { + instanceIdentifierPathArgumentsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public Builder setInstanceIdentifierPathArguments( + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder builderForValue) { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + instanceIdentifierPathArguments_ = builderForValue.build(); + onChanged(); + } else { + instanceIdentifierPathArgumentsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public Builder mergeInstanceIdentifierPathArguments(org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier value) { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + instanceIdentifierPathArguments_ != org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance()) { + instanceIdentifierPathArguments_ = + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.newBuilder(instanceIdentifierPathArguments_).mergeFrom(value).buildPartial(); + } else { + instanceIdentifierPathArguments_ = value; + } + onChanged(); + } else { + instanceIdentifierPathArgumentsBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public Builder clearInstanceIdentifierPathArguments() { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + instanceIdentifierPathArguments_ = org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance(); + onChanged(); + } else { + instanceIdentifierPathArgumentsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder getInstanceIdentifierPathArgumentsBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getInstanceIdentifierPathArgumentsFieldBuilder().getBuilder(); + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + public org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder getInstanceIdentifierPathArgumentsOrBuilder() { + if (instanceIdentifierPathArgumentsBuilder_ != null) { + return instanceIdentifierPathArgumentsBuilder_.getMessageOrBuilder(); + } else { + return instanceIdentifierPathArguments_; + } + } + /** + * required .org.opendaylight.controller.mdsal.InstanceIdentifier instanceIdentifierPathArguments = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder> + getInstanceIdentifierPathArgumentsFieldBuilder() { + if (instanceIdentifierPathArgumentsBuilder_ == null) { + instanceIdentifierPathArgumentsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.Builder, org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifierOrBuilder>( + instanceIdentifierPathArguments_, + getParentForChildren(), + isClean()); + instanceIdentifierPathArguments_ = null; + } + return instanceIdentifierPathArgumentsBuilder_; + } + + // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.DataExists) + } + + static { + defaultInstance = new DataExists(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.DataExists) + } + + public interface DataExistsReplyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool exists = 1; + /** + * required bool exists = 1; + */ + boolean hasExists(); + /** + * required bool exists = 1; + */ + boolean getExists(); + } + /** + * Protobuf type {@code org.opendaylight.controller.mdsal.DataExistsReply} + */ + public static final class DataExistsReply extends + com.google.protobuf.GeneratedMessage + implements DataExistsReplyOrBuilder { + // Use DataExistsReply.newBuilder() to construct. + private DataExistsReply(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DataExistsReply(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DataExistsReply defaultInstance; + public static DataExistsReply getDefaultInstance() { + return defaultInstance; + } + + public DataExistsReply getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DataExistsReply( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + exists_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExistsReply_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DataExistsReply parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DataExistsReply(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool exists = 1; + public static final int EXISTS_FIELD_NUMBER = 1; + private boolean exists_; + /** + * required bool exists = 1; + */ + public boolean hasExists() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool exists = 1; + */ + public boolean getExists() { + return exists_; + } + + private void initFields() { + exists_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasExists()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, exists_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, exists_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.opendaylight.controller.mdsal.DataExistsReply} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReplyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExistsReply_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.class, org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.Builder.class); + } + + // Construct using org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + exists_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor; + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply getDefaultInstanceForType() { + return org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.getDefaultInstance(); + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply build() { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply buildPartial() { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply result = new org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.exists_ = exists_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply) { + return mergeFrom((org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply other) { + if (other == org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply.getDefaultInstance()) return this; + if (other.hasExists()) { + setExists(other.getExists()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasExists()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.DataExistsReply) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required bool exists = 1; + private boolean exists_ ; + /** + * required bool exists = 1; + */ + public boolean hasExists() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool exists = 1; + */ + public boolean getExists() { + return exists_; + } + /** + * required bool exists = 1; + */ + public Builder setExists(boolean value) { + bitField0_ |= 0x00000001; + exists_ = value; + onChanged(); + return this; + } + /** + * required bool exists = 1; + */ + public Builder clearExists() { + bitField0_ = (bitField0_ & ~0x00000001); + exists_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.DataExistsReply) + } + + static { + defaultInstance = new DataExistsReply(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.opendaylight.controller.mdsal.DataExistsReply) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_org_opendaylight_controller_mdsal_CloseTransaction_descriptor; private static @@ -6649,6 +7576,16 @@ public final class ShardTransactionMessages { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_org_opendaylight_controller_mdsal_MergeDataReply_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_opendaylight_controller_mdsal_DataExists_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_opendaylight_controller_mdsal_DataExistsReply_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -6683,10 +7620,13 @@ public final class ShardTransactionMessages { "thArguments\030\001 \002(\01325.org.opendaylight.con" + "troller.mdsal.InstanceIdentifier\022?\n\016norm" + "alizedNode\030\002 \002(\0132\'.org.opendaylight.cont" + - "roller.mdsal.Node\"\020\n\016MergeDataReplyBV\n:o" + - "rg.opendaylight.controller.protobuff.mes" + - "sages.transactionB\030ShardTransactionMessa" + - "ges" + "roller.mdsal.Node\"\020\n\016MergeDataReply\"l\n\nD" + + "ataExists\022^\n\037instanceIdentifierPathArgum" + + "ents\030\001 \002(\01325.org.opendaylight.controller" + + ".mdsal.InstanceIdentifier\"!\n\017DataExistsR" + + "eply\022\016\n\006exists\030\001 \002(\010BV\n:org.opendaylight", + ".controller.protobuff.messages.transacti" + + "onB\030ShardTransactionMessages" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -6777,6 +7717,18 @@ public final class ShardTransactionMessages { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_org_opendaylight_controller_mdsal_MergeDataReply_descriptor, new java.lang.String[] { }); + internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_org_opendaylight_controller_mdsal_DataExists_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_opendaylight_controller_mdsal_DataExists_descriptor, + new java.lang.String[] { "InstanceIdentifierPathArguments", }); + internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_org_opendaylight_controller_mdsal_DataExistsReply_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_opendaylight_controller_mdsal_DataExistsReply_descriptor, + new java.lang.String[] { "Exists", }); return null; } }; diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/resources/ShardTransaction.proto b/opendaylight/md-sal/sal-clustering-commons/src/main/resources/ShardTransaction.proto index 4177bd7a05..63b75ac430 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/main/resources/ShardTransaction.proto +++ b/opendaylight/md-sal/sal-clustering-commons/src/main/resources/ShardTransaction.proto @@ -65,3 +65,11 @@ required Node normalizedNode =2; message MergeDataReply{ } + +message DataExists { + required InstanceIdentifier instanceIdentifierPathArguments = 1; +} + +message DataExistsReply { + required bool exists = 1; +} diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/NormalizedNodeToNodeCodecTest.java b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/NormalizedNodeToNodeCodecTest.java index 1b85d46fc6..bdad86ddc1 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/NormalizedNodeToNodeCodecTest.java +++ b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/NormalizedNodeToNodeCodecTest.java @@ -78,8 +78,12 @@ public class NormalizedNodeToNodeCodecTest { NormalizedNodeToNodeCodec codec = new NormalizedNodeToNodeCodec(schemaContext); + long start = System.currentTimeMillis(); Container container = codec.encode(instanceIdentifierFromString(id), output); + long end = System.currentTimeMillis(); + + System.out.println("Timetaken to encode :"+(end-start)); assertNotNull(container); assertEquals(id, container.getParentPath() + "/" @@ -89,8 +93,12 @@ public class NormalizedNodeToNodeCodecTest { // first get the node representation of normalized node final Node node = container.getNormalizedNode(); + start = System.currentTimeMillis(); NormalizedNode normalizedNode = codec.decode(instanceIdentifierFromString(id), node); + end = System.currentTimeMillis(); + + System.out.println("Timetaken to decode :"+(end-start)); assertEquals(normalizedNode.getValue().toString(), output.getValue() .toString()); diff --git a/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/05-clustering.xml.conf b/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/05-clustering.xml.conf index 7891ee2088..72da6304e5 100644 --- a/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/05-clustering.xml.conf +++ b/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/05-clustering.xml.conf @@ -34,19 +34,19 @@ prefix:distributed-operational-datastore-provider distributed-operational-store-module - + dom:schema-service yang-schema-service - + prefix:distributed-config-datastore-provider distributed-config-store-module - + dom:schema-service yang-schema-service - + diff --git a/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/akka.conf b/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/akka.conf index 9749ae27ae..5bf231dbe1 100644 --- a/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/akka.conf +++ b/opendaylight/md-sal/sal-clustering-config/src/main/resources/initial/akka.conf @@ -18,9 +18,9 @@ odl-cluster-data { netty.tcp { hostname = "" port = 2550 - maximum-frame-size = 2097152 - send-buffer-size = 52428800 - receive-buffer-size = 52428800 + maximum-frame-size = 419430400 + send-buffer-size = 52428800 + receive-buffer-size = 52428800 } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/pom.xml b/opendaylight/md-sal/sal-distributed-datastore/pom.xml index 848d425bf9..648e8d23d0 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/pom.xml +++ b/opendaylight/md-sal/sal-distributed-datastore/pom.xml @@ -40,6 +40,11 @@ akka-testkit_${scala.version} + + com.typesafe.akka + akka-slf4j_${scala.version} + + diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractUntypedActor.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractUntypedActor.java index ce0516064e..ac01f42a7f 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractUntypedActor.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractUntypedActor.java @@ -33,4 +33,12 @@ public abstract class AbstractUntypedActor extends UntypedActor { } protected abstract void handleReceive(Object message) throws Exception; + + protected void ignoreMessage(Object message){ + LOG.debug("Unhandled message {} ", message); + } + + protected void unknownMessage(Object message) throws Exception{ + unhandled(message); + } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ClusterWrapperImpl.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ClusterWrapperImpl.java index 142aacde65..8910137ec4 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ClusterWrapperImpl.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ClusterWrapperImpl.java @@ -12,18 +12,31 @@ import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.cluster.Cluster; import akka.cluster.ClusterEvent; +import com.google.common.base.Preconditions; public class ClusterWrapperImpl implements ClusterWrapper { private final Cluster cluster; private final String currentMemberName; public ClusterWrapperImpl(ActorSystem actorSystem){ + Preconditions.checkNotNull(actorSystem, "actorSystem should not be null"); + cluster = Cluster.get(actorSystem); + + Preconditions.checkState(cluster.getSelfRoles().size() > 0, + "No akka roles were specified\n" + + "One way to specify the member name is to pass a property on the command line like so\n" + + " -Dakka.cluster.roles.0=member-3\n" + + "member-3 here would be the name of the member" + ); + currentMemberName = (String) cluster.getSelfRoles().toArray()[0]; } public void subscribeToMemberEvents(ActorRef actorRef){ + Preconditions.checkNotNull(actorRef, "actorRef should not be null"); + cluster.subscribe(actorRef, ClusterEvent.initialStateAsEvents(), ClusterEvent.MemberEvent.class, ClusterEvent.UnreachableMember.class); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/CompositeModificationPayload.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/CompositeModificationPayload.java index abc69f1897..d0abb20718 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/CompositeModificationPayload.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/CompositeModificationPayload.java @@ -29,7 +29,7 @@ public class CompositeModificationPayload extends Payload implements modification = null; } public CompositeModificationPayload(Object modification){ - this.modification = (PersistentMessages.CompositeModification) modification; + this.modification = (PersistentMessages.CompositeModification) Preconditions.checkNotNull(modification, "modification should not be null"); } @Override public Map encode() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ConfigurationImpl.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ConfigurationImpl.java index 34590025d5..37b565d213 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ConfigurationImpl.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ConfigurationImpl.java @@ -9,6 +9,7 @@ package org.opendaylight.controller.cluster.datastore; import com.google.common.base.Optional; +import com.google.common.base.Preconditions; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigObject; @@ -34,11 +35,23 @@ public class ConfigurationImpl implements Configuration { private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class); + // Look up maps to speed things up + + // key = memberName, value = list of shardNames + private Map> memberShardNames = new HashMap<>(); + + // key = shardName, value = list of replicaNames (replicaNames are the same as memberNames) + private Map> shardReplicaNames = new HashMap<>(); + public ConfigurationImpl(String moduleShardsConfigPath, String modulesConfigPath){ + Preconditions.checkNotNull(moduleShardsConfigPath, "moduleShardsConfigPath should not be null"); + Preconditions.checkNotNull(modulesConfigPath, "modulesConfigPath should not be null"); + + File moduleShardsFile = new File("./configuration/initial/" + moduleShardsConfigPath); File modulesFile = new File("./configuration/initial/" + modulesConfigPath); @@ -66,6 +79,13 @@ public class ConfigurationImpl implements Configuration { } @Override public List getMemberShardNames(String memberName){ + + Preconditions.checkNotNull(memberName, "memberName should not be null"); + + if(memberShardNames.containsKey(memberName)){ + return memberShardNames.get(memberName); + } + List shards = new ArrayList(); for(ModuleShard ms : moduleShards){ for(Shard s : ms.getShards()){ @@ -76,11 +96,17 @@ public class ConfigurationImpl implements Configuration { } } } + + memberShardNames.put(memberName, shards); + return shards; } @Override public Optional getModuleNameFromNameSpace(String nameSpace) { + + Preconditions.checkNotNull(nameSpace, "nameSpace should not be null"); + for(Module m : modules){ if(m.getNameSpace().equals(nameSpace)){ return Optional.of(m.getName()); @@ -98,6 +124,9 @@ public class ConfigurationImpl implements Configuration { } @Override public List getShardNamesFromModuleName(String moduleName) { + + Preconditions.checkNotNull(moduleName, "moduleName should not be null"); + for(ModuleShard m : moduleShards){ if(m.getModuleName().equals(moduleName)){ List l = new ArrayList<>(); @@ -112,14 +141,23 @@ public class ConfigurationImpl implements Configuration { } @Override public List getMembersFromShardName(String shardName) { - List shards = new ArrayList(); + + Preconditions.checkNotNull(shardName, "shardName should not be null"); + + if(shardReplicaNames.containsKey(shardName)){ + return shardReplicaNames.get(shardName); + } + for(ModuleShard ms : moduleShards){ for(Shard s : ms.getShards()) { if(s.getName().equals(shardName)){ - return s.getReplicas(); + List replicas = s.getReplicas(); + shardReplicaNames.put(shardName, replicas); + return replicas; } } } + shardReplicaNames.put(shardName, Collections.EMPTY_LIST); return Collections.EMPTY_LIST; } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListener.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListener.java index cdf04dd093..1dab285679 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListener.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListener.java @@ -10,6 +10,7 @@ package org.opendaylight.controller.cluster.datastore; import akka.actor.Props; import akka.japi.Creator; +import com.google.common.base.Preconditions; import org.opendaylight.controller.cluster.datastore.messages.DataChanged; import org.opendaylight.controller.cluster.datastore.messages.DataChangedReply; import org.opendaylight.controller.cluster.datastore.messages.EnableNotification; @@ -27,9 +28,10 @@ public class DataChangeListener extends AbstractUntypedActor { public DataChangeListener(SchemaContext schemaContext, AsyncDataChangeListener> listener, YangInstanceIdentifier pathId) { - this.listener = listener; - this.schemaContext = schemaContext; - this.pathId = pathId; + + this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null"); + this.listener = Preconditions.checkNotNull(listener, "listener should not be null"); + this.pathId = Preconditions.checkNotNull(pathId, "pathId should not be null"); } @Override public void handleReceive(Object message) throws Exception { @@ -44,7 +46,7 @@ public class DataChangeListener extends AbstractUntypedActor { notificationsEnabled = message.isEnabled(); } - public void dataChanged(Object message) { + private void dataChanged(Object message) { // Do nothing if notifications are not enabled if(!notificationsEnabled){ diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerProxy.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerProxy.java index a4ca456268..6d835498af 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerProxy.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerProxy.java @@ -9,6 +9,7 @@ package org.opendaylight.controller.cluster.datastore; import akka.actor.ActorSelection; +import com.google.common.base.Preconditions; import org.opendaylight.controller.cluster.datastore.messages.DataChanged; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener; @@ -24,7 +25,7 @@ public class DataChangeListenerProxy implements AsyncDataChangeListener @@ -79,10 +80,11 @@ public class Shard extends RaftActor { Logging.getLogger(getContext().system(), this); // By default persistent will be true and can be turned off using the system - // property persistent + // property shard.persistent private final boolean persistent; - private final String name; + /// The name of this shard + private final ShardIdentifier name; private volatile SchemaContext schemaContext; @@ -90,8 +92,8 @@ public class Shard extends RaftActor { private final List dataChangeListeners = new ArrayList<>(); - private Shard(String name, Map peerAddresses) { - super(name, peerAddresses, Optional.of(configParams)); + private Shard(ShardIdentifier name, Map peerAddresses) { + super(name.toString(), mapPeerAddresses(peerAddresses), Optional.of(configParams)); this.name = name; @@ -99,15 +101,32 @@ public class Shard extends RaftActor { this.persistent = !"false".equals(setting); - LOG.info("Creating shard : {} persistent : {}", name, persistent); + LOG.info("Shard created : {} persistent : {}", name, persistent); - store = new InMemoryDOMDataStore(name, storeExecutor); + store = InMemoryDOMDataStoreFactory.create(name.toString(), null); - shardMBean = ShardMBeanFactory.getShardStatsMBean(name); + shardMBean = ShardMBeanFactory.getShardStatsMBean(name.toString()); } - public static Props props(final String name, final Map peerAddresses) { + private static Map mapPeerAddresses(Map peerAddresses){ + Map map = new HashMap<>(); + + for(Map.Entry entry : peerAddresses.entrySet()){ + map.put(entry.getKey().toString(), entry.getValue()); + } + + return map; + } + + + + + public static Props props(final ShardIdentifier name, + final Map peerAddresses) { + Preconditions.checkNotNull(name, "name should not be null"); + Preconditions.checkNotNull(peerAddresses, "peerAddresses should not be null"); + return Props.create(new Creator() { @Override @@ -119,13 +138,15 @@ public class Shard extends RaftActor { } - @Override public void onReceiveCommand(Object message){ - LOG.debug("Received message {} from {}", message.getClass().toString(), getSender()); + @Override public void onReceiveCommand(Object message) { + LOG.debug("Received message {} from {}", message.getClass().toString(), + getSender()); - if (message.getClass().equals(CreateTransactionChain.SERIALIZABLE_CLASS)) { - if(isLeader()) { + if (message.getClass() + .equals(CreateTransactionChain.SERIALIZABLE_CLASS)) { + if (isLeader()) { createTransactionChain(); - } else if(getLeader() != null){ + } else if (getLeader() != null) { getLeader().forward(message, getContext()); } } else if (message instanceof RegisterChangeListener) { @@ -134,57 +155,84 @@ public class Shard extends RaftActor { updateSchemaContext((UpdateSchemaContext) message); } else if (message instanceof ForwardedCommitTransaction) { handleForwardedCommit((ForwardedCommitTransaction) message); - } else if (message.getClass().equals(CreateTransaction.SERIALIZABLE_CLASS)) { - if(isLeader()) { + } else if (message.getClass() + .equals(CreateTransaction.SERIALIZABLE_CLASS)) { + if (isLeader()) { createTransaction(CreateTransaction.fromSerializable(message)); - } else if(getLeader() != null){ + } else if (getLeader() != null) { getLeader().forward(message, getContext()); } - } else if (message instanceof PeerAddressResolved){ + } else if (message instanceof PeerAddressResolved) { PeerAddressResolved resolved = (PeerAddressResolved) message; - setPeerAddress(resolved.getPeerId(), resolved.getPeerAddress()); + setPeerAddress(resolved.getPeerId().toString(), resolved.getPeerAddress()); } else { - super.onReceiveCommand(message); + super.onReceiveCommand(message); } } - private ActorRef createTypedTransactionActor(CreateTransaction createTransaction,String transactionId){ - if(createTransaction.getTransactionType()== TransactionProxy.TransactionType.READ_ONLY.ordinal()){ - return getContext().actorOf( - ShardTransaction.props( store.newReadOnlyTransaction(), getSelf(), schemaContext), transactionId); + private ActorRef createTypedTransactionActor( + CreateTransaction createTransaction, ShardTransactionIdentifier transactionId) { + if (createTransaction.getTransactionType() + == TransactionProxy.TransactionType.READ_ONLY.ordinal()) { + + shardMBean.incrementReadOnlyTransactionCount(); + + return getContext().actorOf( + ShardTransaction + .props(store.newReadOnlyTransaction(), getSelf(), + schemaContext), transactionId.toString()); + + } else if (createTransaction.getTransactionType() + == TransactionProxy.TransactionType.READ_WRITE.ordinal()) { - }else if (createTransaction.getTransactionType()== TransactionProxy.TransactionType.READ_WRITE.ordinal()){ - return getContext().actorOf( - ShardTransaction.props( store.newReadWriteTransaction(), getSelf(), schemaContext), transactionId); + shardMBean.incrementReadWriteTransactionCount(); + return getContext().actorOf( + ShardTransaction + .props(store.newReadWriteTransaction(), getSelf(), + schemaContext), transactionId.toString()); - }else if (createTransaction.getTransactionType()== TransactionProxy.TransactionType.WRITE_ONLY.ordinal()){ - return getContext().actorOf( - ShardTransaction.props( store.newWriteOnlyTransaction(), getSelf(), schemaContext), transactionId); - }else{ - throw new IllegalArgumentException ("CreateTransaction message has unidentified transaction type="+createTransaction.getTransactionType()) ; - } - } + + } else if (createTransaction.getTransactionType() + == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) { + + shardMBean.incrementWriteOnlyTransactionCount(); + + return getContext().actorOf( + ShardTransaction + .props(store.newWriteOnlyTransaction(), getSelf(), + schemaContext), transactionId.toString()); + } else { + // FIXME: This does not seem right + throw new IllegalArgumentException( + "CreateTransaction message has unidentified transaction type=" + + createTransaction.getTransactionType()); + } + } private void createTransaction(CreateTransaction createTransaction) { - String transactionId = "shard-" + createTransaction.getTransactionId(); - LOG.info("Creating transaction : {} " , transactionId); - ActorRef transactionActor = createTypedTransactionActor(createTransaction,transactionId); + ShardTransactionIdentifier transactionId = ShardTransactionIdentifier.builder().remoteTransactionId(createTransaction.getTransactionId()).build(); + LOG.debug("Creating transaction : {} ", transactionId); + ActorRef transactionActor = + createTypedTransactionActor(createTransaction, transactionId); getSender() - .tell(new CreateTransactionReply(Serialization.serializedActorPath(transactionActor), createTransaction.getTransactionId()).toSerializable(), + .tell(new CreateTransactionReply( + Serialization.serializedActorPath(transactionActor), + createTransaction.getTransactionId()).toSerializable(), getSelf()); } private void commit(final ActorRef sender, Object serialized) { - Modification modification = MutableCompositeModification.fromSerializable(serialized, schemaContext); + Modification modification = MutableCompositeModification + .fromSerializable(serialized, schemaContext); DOMStoreThreePhaseCommitCohort cohort = modificationToCohort.remove(serialized); if (cohort == null) { - LOG.error( - "Could not find cohort for modification : {}", modification); - LOG.info("Writing modification using a new transaction"); + LOG.debug( + "Could not find cohort for modification : {}. Writing modification using a new transaction", + modification); DOMStoreReadWriteTransaction transaction = store.newReadWriteTransaction(); modification.apply(transaction); @@ -195,45 +243,46 @@ public class Shard extends RaftActor { future.get(); future = commitCohort.commit(); future.get(); - } catch (InterruptedException e) { - LOG.error("Failed to commit", e); - } catch (ExecutionException e) { + } catch (InterruptedException | ExecutionException e) { + shardMBean.incrementFailedTransactionsCount(); LOG.error("Failed to commit", e); + return; } + //we want to just apply the recovery commit and return + shardMBean.incrementCommittedTransactionCount(); + return; } final ListenableFuture future = cohort.commit(); - shardMBean.incrementCommittedTransactionCount(); final ActorRef self = getSelf(); future.addListener(new Runnable() { @Override public void run() { try { future.get(); - - if(sender != null) { sender .tell(new CommitTransactionReply().toSerializable(), self); - } else { - LOG.error("sender is null ???"); - } + shardMBean.incrementCommittedTransactionCount(); + shardMBean.setLastCommittedTransactionTime(new Date()); } catch (InterruptedException | ExecutionException e) { - // FIXME : Handle this properly - LOG.error(e, "An exception happened when committing"); + shardMBean.incrementFailedTransactionsCount(); + sender.tell(new akka.actor.Status.Failure(e),self); } } }, getContext().dispatcher()); } private void handleForwardedCommit(ForwardedCommitTransaction message) { - Object serializedModification = message.getModification().toSerializable(); + Object serializedModification = + message.getModification().toSerializable(); modificationToCohort - .put(serializedModification , message.getCohort()); + .put(serializedModification, message.getCohort()); - if(persistent) { - this.persistData(getSender(), "identifier", new CompositeModificationPayload(serializedModification)); + if (persistent) { + this.persistData(getSender(), "identifier", + new CompositeModificationPayload(serializedModification)); } else { this.commit(getSender(), serializedModification); } @@ -247,7 +296,8 @@ public class Shard extends RaftActor { private void registerChangeListener( RegisterChangeListener registerChangeListener) { - LOG.debug("registerDataChangeListener for " + registerChangeListener.getPath()); + LOG.debug("registerDataChangeListener for {}", registerChangeListener + .getPath()); ActorSelection dataChangeListenerPath = getContext() @@ -258,14 +308,16 @@ public class Shard extends RaftActor { // Notify the listener if notifications should be enabled or not // If this shard is the leader then it will enable notifications else // it will not - dataChangeListenerPath.tell(new EnableNotification(isLeader()), getSelf()); + dataChangeListenerPath + .tell(new EnableNotification(isLeader()), getSelf()); // Now store a reference to the data change listener so it can be notified // at a later point if notifications should be enabled or disabled dataChangeListeners.add(dataChangeListenerPath); AsyncDataChangeListener> - listener = new DataChangeListenerProxy(schemaContext,dataChangeListenerPath); + listener = + new DataChangeListenerProxy(schemaContext, dataChangeListenerPath); org.opendaylight.yangtools.concepts.ListenerRegistration>> registration = @@ -275,7 +327,9 @@ public class Shard extends RaftActor { getContext().actorOf( DataChangeListenerRegistration.props(registration)); - LOG.debug("registerDataChangeListener sending reply, listenerRegistrationPath = " + listenerRegistration.path().toString()); + LOG.debug( + "registerDataChangeListener sending reply, listenerRegistrationPath = {} " + , listenerRegistration.path().toString()); getSender() .tell(new RegisterChangeListenerReply(listenerRegistration.path()), @@ -289,21 +343,23 @@ public class Shard extends RaftActor { ShardTransactionChain.props(chain, schemaContext)); getSender() .tell(new CreateTransactionChainReply(transactionChain.path()) - .toSerializable(), + .toSerializable(), getSelf()); } @Override protected void applyState(ActorRef clientActor, String identifier, Object data) { - if(data instanceof CompositeModificationPayload){ + if (data instanceof CompositeModificationPayload) { Object modification = ((CompositeModificationPayload) data).getModification(); - if(modification != null){ + if (modification != null) { commit(clientActor, modification); } else { - LOG.error("modification is null - this is very unexpected"); + LOG.error( + "modification is null - this is very unexpected, clientActor = {}, identifier = {}", + identifier, clientActor.path().toString()); } @@ -311,6 +367,17 @@ public class Shard extends RaftActor { LOG.error("Unknown state received {}", data); } + // Update stats + ReplicatedLogEntry lastLogEntry = getLastLogEntry(); + + if(lastLogEntry != null){ + shardMBean.setLastLogIndex(lastLogEntry.getIndex()); + shardMBean.setLastLogTerm(lastLogEntry.getTerm()); + } + + shardMBean.setCommitIndex(getCommitIndex()); + shardMBean.setLastApplied(getLastApplied()); + } @Override protected Object createSnapshot() { @@ -322,19 +389,21 @@ public class Shard extends RaftActor { } @Override protected void onStateChanged() { - for(ActorSelection dataChangeListener : dataChangeListeners){ - dataChangeListener.tell(new EnableNotification(isLeader()), getSelf()); + for (ActorSelection dataChangeListener : dataChangeListeners) { + dataChangeListener + .tell(new EnableNotification(isLeader()), getSelf()); } - if(getLeaderId() != null){ + if (getLeaderId() != null) { shardMBean.setLeader(getLeaderId()); } shardMBean.setRaftState(getRaftState().name()); + shardMBean.setCurrentTerm(getCurrentTerm()); } @Override public String persistenceId() { - return this.name; + return this.name.toString(); } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardManager.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardManager.java index 64c6821120..6162a0327c 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardManager.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardManager.java @@ -18,6 +18,10 @@ import akka.cluster.ClusterEvent; import akka.japi.Creator; import akka.japi.Function; import com.google.common.base.Preconditions; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier; +import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfo; +import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager.ShardManagerInfoMBean; import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard; import org.opendaylight.controller.cluster.datastore.messages.FindPrimary; import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound; @@ -28,6 +32,7 @@ import org.opendaylight.controller.cluster.datastore.messages.PrimaryNotFound; import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext; import scala.concurrent.duration.Duration; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -61,6 +66,8 @@ public class ShardManager extends AbstractUntypedActor { private final Configuration configuration; + private ShardManagerInfoMBean mBean; + /** * @param type defines the kind of data that goes into shards created by this shard manager. Examples of type would be * configuration or operational @@ -82,6 +89,11 @@ public class ShardManager extends AbstractUntypedActor { public static Props props(final String type, final ClusterWrapper cluster, final Configuration configuration) { + + Preconditions.checkNotNull(type, "type should not be null"); + Preconditions.checkNotNull(cluster, "cluster should not be null"); + Preconditions.checkNotNull(configuration, "configuration should not be null"); + return Props.create(new Creator() { @Override @@ -108,7 +120,7 @@ public class ShardManager extends AbstractUntypedActor { } else if(message instanceof ClusterEvent.UnreachableMember) { ignoreMessage(message); } else{ - throw new Exception ("Not recognized message received, message="+message); + unknownMessage(message); } } @@ -122,11 +134,8 @@ public class ShardManager extends AbstractUntypedActor { return; } - getSender().tell(new LocalShardNotFound(message.getShardName()), getSelf()); - } - - private void ignoreMessage(Object message){ - LOG.debug("Unhandled message : " + message); + getSender().tell(new LocalShardNotFound(message.getShardName()), + getSelf()); } private void memberRemoved(ClusterEvent.MemberRemoved message) { @@ -140,7 +149,7 @@ public class ShardManager extends AbstractUntypedActor { for(ShardInformation info : localShards.values()){ String shardName = info.getShardName(); - info.updatePeerAddress(getShardActorName(memberName, shardName), + info.updatePeerAddress(getShardIdentifier(memberName, shardName), getShardActorPath(shardName, memberName)); } } @@ -159,9 +168,6 @@ public class ShardManager extends AbstractUntypedActor { private void findPrimary(FindPrimary message) { String shardName = message.getShardName(); - List members = - configuration.getMembersFromShardName(shardName); - // First see if the there is a local replica for the shard ShardInformation info = localShards.get(shardName); if(info != null) { @@ -175,6 +181,9 @@ public class ShardManager extends AbstractUntypedActor { } } + List members = + configuration.getMembersFromShardName(shardName); + if(cluster.getCurrentMemberName() != null) { members.remove(cluster.getCurrentMemberName()); } @@ -196,9 +205,13 @@ public class ShardManager extends AbstractUntypedActor { private String getShardActorPath(String shardName, String memberName) { Address address = memberNameToAddress.get(memberName); if(address != null) { - return address.toString() + "/user/shardmanager-" + this.type + "/" - + getShardActorName( - memberName, shardName); + StringBuilder builder = new StringBuilder(); + builder.append(address.toString()) + .append("/user/") + .append(ShardManagerIdentifier.builder().type(type).build().toString()) + .append("/") + .append(getShardIdentifier(memberName, shardName)); + return builder.toString(); } return null; } @@ -211,8 +224,8 @@ public class ShardManager extends AbstractUntypedActor { * @param shardName * @return */ - private String getShardActorName(String memberName, String shardName){ - return memberName + "-shard-" + shardName + "-" + this.type; + private ShardIdentifier getShardIdentifier(String memberName, String shardName){ + return ShardIdentifier.builder().memberName(memberName).shardName(shardName).type(type).build(); } /** @@ -225,15 +238,20 @@ public class ShardManager extends AbstractUntypedActor { List memberShardNames = this.configuration.getMemberShardNames(memberName); + List localShardActorNames = new ArrayList<>(); for(String shardName : memberShardNames){ - String shardActorName = getShardActorName(memberName, shardName); - Map peerAddresses = getPeerAddresses(shardName); + ShardIdentifier shardId = getShardIdentifier(memberName, shardName); + Map peerAddresses = getPeerAddresses(shardName); ActorRef actor = getContext() - .actorOf(Shard.props(shardActorName, peerAddresses), - shardActorName); + .actorOf(Shard.props(shardId, peerAddresses), + shardId.toString()); + localShardActorNames.add(shardId.toString()); localShards.put(shardName, new ShardInformation(shardName, actor, peerAddresses)); } + mBean = ShardManagerInfo + .createShardManagerMBean("shard-manager-" + this.type, localShardActorNames); + } /** @@ -242,9 +260,9 @@ public class ShardManager extends AbstractUntypedActor { * @param shardName * @return */ - private Map getPeerAddresses(String shardName){ + private Map getPeerAddresses(String shardName){ - Map peerAddresses = new HashMap<>(); + Map peerAddresses = new HashMap<>(); List members = this.configuration.getMembersFromShardName(shardName); @@ -253,16 +271,16 @@ public class ShardManager extends AbstractUntypedActor { for(String memberName : members){ if(!currentMemberName.equals(memberName)){ - String shardActorName = getShardActorName(memberName, shardName); + ShardIdentifier shardId = getShardIdentifier(memberName, + shardName); String path = getShardActorPath(shardName, currentMemberName); - peerAddresses.put(shardActorName, path); + peerAddresses.put(shardId, path); } } return peerAddresses; } - @Override public SupervisorStrategy supervisorStrategy() { return new OneForOneStrategy(10, Duration.create("1 minute"), @@ -280,10 +298,10 @@ public class ShardManager extends AbstractUntypedActor { private final String shardName; private final ActorRef actor; private final ActorPath actorPath; - private final Map peerAddresses; + private final Map peerAddresses; private ShardInformation(String shardName, ActorRef actor, - Map peerAddresses) { + Map peerAddresses) { this.shardName = shardName; this.actor = actor; this.actorPath = actor.path(); @@ -302,16 +320,15 @@ public class ShardManager extends AbstractUntypedActor { return actorPath; } - public Map getPeerAddresses() { - return peerAddresses; - } - - public void updatePeerAddress(String peerId, String peerAddress){ - LOG.info("updatePeerAddress for peer {} with address {}", peerId, peerAddress); + public void updatePeerAddress(ShardIdentifier peerId, String peerAddress){ + LOG.info("updatePeerAddress for peer {} with address {}", peerId, + peerAddress); if(peerAddresses.containsKey(peerId)){ peerAddresses.put(peerId, peerAddress); - LOG.info("Sending PeerAddressResolved for peer {} with address {} to {}", peerId, peerAddress, actor.path()); + LOG.debug( + "Sending PeerAddressResolved for peer {} with address {} to {}", + peerId, peerAddress, actor.path()); actor .tell(new PeerAddressResolved(peerId, peerAddress), @@ -321,3 +338,6 @@ public class ShardManager extends AbstractUntypedActor { } } } + + + diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadTransaction.java index f78935b5e7..1328d466f3 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadTransaction.java @@ -16,6 +16,7 @@ import akka.event.Logging; import akka.event.LoggingAdapter; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply; +import org.opendaylight.controller.cluster.datastore.messages.DataExists; import org.opendaylight.controller.cluster.datastore.messages.ReadData; import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction; import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain; @@ -44,7 +45,9 @@ public class ShardReadTransaction extends ShardTransaction { @Override public void handleReceive(Object message) throws Exception { if (ReadData.SERIALIZABLE_CLASS.equals(message.getClass())) { - readData(transaction,ReadData.fromSerializable(message)); + readData(transaction, ReadData.fromSerializable(message)); + } else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) { + dataExists(transaction, DataExists.fromSerializable(message)); } else { super.handleReceive(message); } @@ -55,4 +58,9 @@ public class ShardReadTransaction extends ShardTransaction { getSelf().tell(PoisonPill.getInstance(), getSelf()); } + //default scope test method to check if we get correct exception + void forUnitTestOnlyExplicitTransactionClose(){ + transaction.close(); + } + } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java index 6733bcfb9f..97bb196f9f 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java @@ -16,6 +16,7 @@ import akka.event.Logging; import akka.event.LoggingAdapter; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply; +import org.opendaylight.controller.cluster.datastore.messages.DataExists; import org.opendaylight.controller.cluster.datastore.messages.DeleteData; import org.opendaylight.controller.cluster.datastore.messages.MergeData; import org.opendaylight.controller.cluster.datastore.messages.ReadData; @@ -55,6 +56,8 @@ public class ShardReadWriteTransaction extends ShardTransaction { deleteData(transaction,DeleteData.fromSerizalizable(message)); } else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) { readyTransaction(transaction,new ReadyTransaction()); + } else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) { + dataExists(transaction, DataExists.fromSerializable(message)); }else { super.handleReceive(message); } @@ -65,4 +68,13 @@ public class ShardReadWriteTransaction extends ShardTransaction { getSender().tell(new CloseTransactionReply().toSerializable(), getSelf()); getSelf().tell(PoisonPill.getInstance(), getSelf()); } + + /** + * The following method is used in unit testing only + * hence the default scope. + * This is done to test out failure cases. + */ + public void forUnitTestOnlyExplicitTransactionClose() { + transaction.close(); + } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransaction.java index 3a916bda2c..360a10722c 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransaction.java @@ -14,8 +14,11 @@ import akka.event.Logging; import akka.event.LoggingAdapter; import akka.japi.Creator; import com.google.common.base.Optional; -import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.CheckedFuture; +import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; +import org.opendaylight.controller.cluster.datastore.messages.DataExists; +import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply; import org.opendaylight.controller.cluster.datastore.messages.DeleteData; import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply; import org.opendaylight.controller.cluster.datastore.messages.MergeData; @@ -32,6 +35,7 @@ import org.opendaylight.controller.cluster.datastore.modification.ImmutableCompo import org.opendaylight.controller.cluster.datastore.modification.MergeModification; import org.opendaylight.controller.cluster.datastore.modification.MutableCompositeModification; import org.opendaylight.controller.cluster.datastore.modification.WriteModification; +import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction; import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction; import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort; @@ -41,8 +45,6 @@ import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import java.util.concurrent.ExecutionException; - /** * The ShardTransaction Actor represents a remote transaction *

@@ -90,7 +92,6 @@ public abstract class ShardTransaction extends AbstractUntypedActor { protected ShardTransaction(DOMStoreTransactionChain transactionChain, ActorRef shardActor, SchemaContext schemaContext) { this.transactionChain = transactionChain; - //this.transaction = transaction; this.shardActor = shardActor; this.schemaContext = schemaContext; } @@ -174,7 +175,7 @@ public abstract class ShardTransaction extends AbstractUntypedActor { getSender().tell(new GetCompositeModificationReply( new ImmutableCompositeModification(modification)), getSelf()); }else{ - throw new Exception ("ShardTransaction:handleRecieve received an unknown message"+message); + throw new UnknownMessageException(message); } } @@ -184,50 +185,73 @@ public abstract class ShardTransaction extends AbstractUntypedActor { final ActorRef sender = getSender(); final ActorRef self = getSelf(); final YangInstanceIdentifier path = message.getPath(); - final ListenableFuture>> future = - transaction.read(path); + final CheckedFuture>, ReadFailedException> future = + transaction.read(path); - future.addListener(new Runnable() { + future.addListener(new Runnable() { @Override public void run() { try { - Optional> optional = future.get(); + Optional> optional = future.checkedGet(); if (optional.isPresent()) { sender.tell(new ReadDataReply(schemaContext,optional.get()).toSerializable(), self); } else { sender.tell(new ReadDataReply(schemaContext,null).toSerializable(), self); } - } catch (InterruptedException | ExecutionException e) { - log.error(e, - "An exception happened when reading data from path : " - + path.toString()); + } catch (Exception e) { + sender.tell(new akka.actor.Status.Failure(e),self); } } }, getContext().dispatcher()); } + protected void dataExists(DOMStoreReadTransaction transaction, DataExists message) { + final YangInstanceIdentifier path = message.getPath(); + + try { + Boolean exists = transaction.exists(path).checkedGet(); + getSender().tell(new DataExistsReply(exists).toSerializable(), getSelf()); + } catch (ReadFailedException e) { + getSender().tell(new akka.actor.Status.Failure(e),getSelf()); + } + + } protected void writeData(DOMStoreWriteTransaction transaction, WriteData message) { modification.addModification( new WriteModification(message.getPath(), message.getData(),schemaContext)); LOG.debug("writeData at path : " + message.getPath().toString()); - transaction.write(message.getPath(), message.getData()); - getSender().tell(new WriteDataReply().toSerializable(), getSelf()); + + try { + transaction.write(message.getPath(), message.getData()); + getSender().tell(new WriteDataReply().toSerializable(), getSelf()); + }catch(Exception e){ + getSender().tell(new akka.actor.Status.Failure(e), getSelf()); + } } protected void mergeData(DOMStoreWriteTransaction transaction, MergeData message) { modification.addModification( new MergeModification(message.getPath(), message.getData(), schemaContext)); LOG.debug("mergeData at path : " + message.getPath().toString()); - transaction.merge(message.getPath(), message.getData()); - getSender().tell(new MergeDataReply().toSerializable(), getSelf()); + try { + transaction.merge(message.getPath(), message.getData()); + getSender().tell(new MergeDataReply().toSerializable(), getSelf()); + }catch(Exception e){ + getSender().tell(new akka.actor.Status.Failure(e), getSelf()); + } } protected void deleteData(DOMStoreWriteTransaction transaction, DeleteData message) { + LOG.debug("deleteData at path : " + message.getPath().toString()); modification.addModification(new DeleteModification(message.getPath())); - transaction.delete(message.getPath()); - getSender().tell(new DeleteDataReply().toSerializable(), getSelf()); + try { + transaction.delete(message.getPath()); + getSender().tell(new DeleteDataReply().toSerializable(), getSelf()); + }catch(Exception e){ + getSender().tell(new akka.actor.Status.Failure(e), getSelf()); + } } protected void readyTransaction(DOMStoreWriteTransaction transaction, ReadyTransaction message) { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChain.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChain.java index ce63f1107d..c508255ea4 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChain.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChain.java @@ -40,23 +40,27 @@ public class ShardTransactionChain extends AbstractUntypedActor { chain.close(); getSender().tell(new CloseTransactionChainReply().toSerializable(), getSelf()); }else{ - throw new Exception("Not recognized message recieved="+message); + unknownMessage(message); } } + private ActorRef getShardActor(){ + return getContext().parent(); + } + private ActorRef createTypedTransactionActor(CreateTransaction createTransaction,String transactionId){ if(createTransaction.getTransactionType()== TransactionProxy.TransactionType.READ_ONLY.ordinal()){ return getContext().actorOf( - ShardTransaction.props( chain.newReadOnlyTransaction(), getSelf(), schemaContext), transactionId); + ShardTransaction.props( chain.newReadOnlyTransaction(), getShardActor(), schemaContext), transactionId); }else if (createTransaction.getTransactionType()== TransactionProxy.TransactionType.READ_WRITE.ordinal()){ return getContext().actorOf( - ShardTransaction.props( chain.newReadWriteTransaction(), getSelf(), schemaContext), transactionId); + ShardTransaction.props( chain.newReadWriteTransaction(), getShardActor(), schemaContext), transactionId); }else if (createTransaction.getTransactionType()== TransactionProxy.TransactionType.WRITE_ONLY.ordinal()){ return getContext().actorOf( - ShardTransaction.props( chain.newWriteOnlyTransaction(), getSelf(), schemaContext), transactionId); + ShardTransaction.props( chain.newWriteOnlyTransaction(), getShardActor(), schemaContext), transactionId); }else{ throw new IllegalArgumentException ("CreateTransaction message has unidentified transaction type="+createTransaction.getTransactionType()) ; } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java index 2a5429ba81..91e578b46d 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java @@ -63,4 +63,13 @@ public class ShardWriteTransaction extends ShardTransaction { getSender().tell(new CloseTransactionReply().toSerializable(), getSelf()); getSelf().tell(PoisonPill.getInstance(), getSelf()); } + + /** + * The following method is used in unit testing only + * hence the default scope. + * This is done to test out failure cases. + */ + public void forUnitTestOnlyExplicitTransactionClose() { + transaction.close(); + } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohort.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohort.java index a8deb0153a..500b73ce9d 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohort.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohort.java @@ -67,7 +67,7 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor { } else if (message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) { abort(new AbortTransaction()); } else { - throw new Exception ("Not recognized message received,message="+message); + unknownMessage(message); } } @@ -130,7 +130,7 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor { Boolean canCommit = future.get(); sender.tell(new CanCommitTransactionReply(canCommit).toSerializable(), self); } catch (InterruptedException | ExecutionException e) { - log.error(e, "An exception happened when aborting"); + log.error(e, "An exception happened when checking canCommit"); } } }, getContext().dispatcher()); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java index 915b13dd8b..5b447943ea 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java @@ -59,17 +59,22 @@ public class ThreePhaseCommitCohortProxy implements } @Override public ListenableFuture canCommit() { + LOG.debug("txn {} canCommit", transactionId); Callable call = new Callable() { @Override public Boolean call() throws Exception { for(ActorPath actorPath : cohortPaths){ + + Object message = new CanCommitTransaction().toSerializable(); + LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath); + ActorSelection cohort = actorContext.actorSelection(actorPath); try { Object response = actorContext.executeRemoteOperation(cohort, - new CanCommitTransaction().toSerializable(), + message, ActorContext.ASK_DURATION); if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) { @@ -80,6 +85,7 @@ public class ThreePhaseCommitCohortProxy implements } } } catch(RuntimeException e){ + // FIXME : Need to properly handle this LOG.error("Unexpected Exception", e); return false; } @@ -93,14 +99,17 @@ public class ThreePhaseCommitCohortProxy implements } @Override public ListenableFuture preCommit() { + LOG.debug("txn {} preCommit", transactionId); return voidOperation(new PreCommitTransaction().toSerializable(), PreCommitTransactionReply.SERIALIZABLE_CLASS); } @Override public ListenableFuture abort() { + LOG.debug("txn {} abort", transactionId); return voidOperation(new AbortTransaction().toSerializable(), AbortTransactionReply.SERIALIZABLE_CLASS); } @Override public ListenableFuture commit() { + LOG.debug("txn {} commit", transactionId); return voidOperation(new CommitTransaction().toSerializable(), CommitTransactionReply.SERIALIZABLE_CLASS); } @@ -111,6 +120,8 @@ public class ThreePhaseCommitCohortProxy implements for(ActorPath actorPath : cohortPaths){ ActorSelection cohort = actorContext.actorSelection(actorPath); + LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath); + try { Object response = actorContext.executeRemoteOperation(cohort, diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java index fa98905a66..95862ae9d9 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java @@ -13,14 +13,18 @@ import akka.actor.ActorRef; import akka.actor.ActorSelection; import akka.actor.Props; import com.google.common.base.Optional; +import com.google.common.base.Preconditions; import com.google.common.util.concurrent.CheckedFuture; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListeningExecutorService; import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException; import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException; +import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction; import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionReply; +import org.opendaylight.controller.cluster.datastore.messages.DataExists; +import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply; import org.opendaylight.controller.cluster.datastore.messages.DeleteData; import org.opendaylight.controller.cluster.datastore.messages.MergeData; import org.opendaylight.controller.cluster.datastore.messages.ReadData; @@ -75,7 +79,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { private final TransactionType transactionType; private final ActorContext actorContext; private final Map remoteTransactionPaths = new HashMap<>(); - private final String identifier; + private final TransactionIdentifier identifier; private final ListeningExecutorService executor; private final SchemaContext schemaContext; @@ -85,13 +89,18 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { ListeningExecutorService executor, SchemaContext schemaContext ) { + this.actorContext = Preconditions.checkNotNull(actorContext, "actorContext should not be null"); + this.transactionType = Preconditions.checkNotNull(transactionType, "transactionType should not be null"); + this.executor = Preconditions.checkNotNull(executor, "executor should not be null"); + this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null"); + + String memberName = actorContext.getCurrentMemberName(); + if(memberName == null){ + memberName = "UNKNOWN-MEMBER"; + } + this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(counter.getAndIncrement()).build(); - this.identifier = actorContext.getCurrentMemberName() + "-txn-" + counter.getAndIncrement(); - this.transactionType = transactionType; - this.actorContext = actorContext; - this.executor = executor; - this.schemaContext = schemaContext; - + LOG.debug("Created txn {}", identifier); } @@ -99,14 +108,27 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { public CheckedFuture>, ReadFailedException> read( final YangInstanceIdentifier path) { + LOG.debug("txn {} read {}", identifier, path); + createTransactionIfMissing(actorContext, path); return transactionContext(path).readData(path); } + @Override public CheckedFuture exists( + YangInstanceIdentifier path) { + LOG.debug("txn {} exists {}", identifier, path); + + createTransactionIfMissing(actorContext, path); + + return transactionContext(path).dataExists(path); + } + @Override public void write(YangInstanceIdentifier path, NormalizedNode data) { + LOG.debug("txn {} write {}", identifier, path); + createTransactionIfMissing(actorContext, path); transactionContext(path).writeData(path, data); @@ -115,6 +137,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { @Override public void merge(YangInstanceIdentifier path, NormalizedNode data) { + LOG.debug("txn {} merge {}", identifier, path); + createTransactionIfMissing(actorContext, path); transactionContext(path).mergeData(path, data); @@ -123,6 +147,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { @Override public void delete(YangInstanceIdentifier path) { + LOG.debug("txn {} delete {}", identifier, path); + createTransactionIfMissing(actorContext, path); transactionContext(path).deleteData(path); @@ -132,7 +158,12 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { public DOMStoreThreePhaseCommitCohort ready() { List cohortPaths = new ArrayList<>(); + LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier, remoteTransactionPaths.size()); + for(TransactionContext transactionContext : remoteTransactionPaths.values()) { + + LOG.debug("txn {} Readying transaction for shard {}", identifier, transactionContext.getShardName()); + Object result = transactionContext.readyTransaction(); if(result.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)){ @@ -143,7 +174,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { } } - return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier, executor); + return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString(), executor); } @Override @@ -180,7 +211,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { try { Object response = actorContext.executeShardOperation(shardName, - new CreateTransaction(identifier,this.transactionType.ordinal() ).toSerializable(), + new CreateTransaction(identifier.toString(),this.transactionType.ordinal() ).toSerializable(), ActorContext.ASK_DURATION); if (response.getClass() .equals(CreateTransactionReply.SERIALIZABLE_CLASS)) { @@ -189,7 +220,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { String transactionPath = reply.getTransactionPath(); - LOG.info("Received transaction path = {}" , transactionPath ); + LOG.debug("txn {} Received transaction path = {}", identifier, transactionPath); ActorSelection transactionActor = actorContext.actorSelection(transactionPath); @@ -200,7 +231,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { remoteTransactionPaths.put(shardName, transactionContext); } } catch(TimeoutException | PrimaryNotFoundException e){ - LOG.error("Creating NoOpTransaction because of : {}", e.getMessage()); + LOG.error("txn {} Creating NoOpTransaction because of : {}", identifier, e.getMessage()); remoteTransactionPaths.put(shardName, new NoOpTransactionContext(shardName)); } @@ -223,13 +254,15 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { final YangInstanceIdentifier path); void writeData(YangInstanceIdentifier path, NormalizedNode data); + + CheckedFuture dataExists(YangInstanceIdentifier path); } - private class TransactionContextImpl implements TransactionContext{ + private class TransactionContextImpl implements TransactionContext { private final String shardName; private final String actorPath; - private final ActorSelection actor; + private final ActorSelection actor; private TransactionContextImpl(String shardName, String actorPath, @@ -247,7 +280,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { return actor; } - @Override public String getResolvedCohortPath(String cohortPath){ + @Override public String getResolvedCohortPath(String cohortPath) { return actorContext.resolvePath(actorPath, cohortPath); } @@ -268,38 +301,76 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { getActor().tell(new DeleteData(path).toSerializable(), null); } - @Override public void mergeData(YangInstanceIdentifier path, NormalizedNode data){ - getActor().tell(new MergeData(path, data, schemaContext).toSerializable(), null); + @Override public void mergeData(YangInstanceIdentifier path, + NormalizedNode data) { + getActor() + .tell(new MergeData(path, data, schemaContext).toSerializable(), + null); } - @Override public CheckedFuture>, ReadFailedException> readData( - final YangInstanceIdentifier path) { - - Callable>> call = new Callable>>() { + @Override + public CheckedFuture>, ReadFailedException> readData( + final YangInstanceIdentifier path) { - @Override public Optional> call() throws Exception { - Object response = actorContext - .executeRemoteOperation(getActor(), new ReadData(path).toSerializable(), - ActorContext.ASK_DURATION); - if(response.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)){ - ReadDataReply reply = ReadDataReply.fromSerializable(schemaContext,path, response); - if(reply.getNormalizedNode() == null){ - return Optional.absent(); + Callable>> call = + new Callable>>() { + + @Override public Optional> call() + throws Exception { + Object response = actorContext + .executeRemoteOperation(getActor(), + new ReadData(path).toSerializable(), + ActorContext.ASK_DURATION); + if (response.getClass() + .equals(ReadDataReply.SERIALIZABLE_CLASS)) { + ReadDataReply reply = ReadDataReply + .fromSerializable(schemaContext, path, + response); + if (reply.getNormalizedNode() == null) { + return Optional.absent(); + } + return Optional.>of( + reply.getNormalizedNode()); } - return Optional.>of(reply.getNormalizedNode()); - } - return Optional.absent(); - } - }; + throw new ReadFailedException("Read Failed " + path); + } + }; - return MappingCheckedFuture.create(executor.submit(call), ReadFailedException.MAPPER); + return MappingCheckedFuture + .create(executor.submit(call), ReadFailedException.MAPPER); } - @Override public void writeData(YangInstanceIdentifier path, NormalizedNode data) { - getActor().tell(new WriteData(path, data, schemaContext).toSerializable(), null); + @Override public void writeData(YangInstanceIdentifier path, + NormalizedNode data) { + getActor() + .tell(new WriteData(path, data, schemaContext).toSerializable(), + null); } + @Override public CheckedFuture dataExists( + final YangInstanceIdentifier path) { + + Callable call = new Callable() { + + @Override public Boolean call() throws Exception { + Object o = actorContext.executeRemoteOperation(getActor(), + new DataExists(path).toSerializable(), + ActorContext.ASK_DURATION + ); + + + if (DataExistsReply.SERIALIZABLE_CLASS + .equals(o.getClass())) { + return DataExistsReply.fromSerializable(o).exists(); + } + + throw new ReadFailedException("Exists Failed " + path); + } + }; + return MappingCheckedFuture + .create(executor.submit(call), ReadFailedException.MAPPER); + } } private class NoOpTransactionContext implements TransactionContext { @@ -324,35 +395,44 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction { } @Override public void closeTransaction() { - LOG.error("closeTransaction called"); + LOG.warn("txn {} closeTransaction called", identifier); } @Override public Object readyTransaction() { - LOG.error("readyTransaction called"); + LOG.warn("txn {} readyTransaction called", identifier); cohort = actorContext.getActorSystem().actorOf(Props.create(NoOpCohort.class)); return new ReadyTransactionReply(cohort.path()).toSerializable(); } @Override public void deleteData(YangInstanceIdentifier path) { - LOG.error("deleteData called path = {}", path); + LOG.warn("txt {} deleteData called path = {}", identifier, path); } @Override public void mergeData(YangInstanceIdentifier path, NormalizedNode data) { - LOG.error("mergeData called path = {}", path); + LOG.warn("txn {} mergeData called path = {}", identifier, path); } @Override public CheckedFuture>, ReadFailedException> readData( YangInstanceIdentifier path) { - LOG.error("readData called path = {}", path); + LOG.warn("txn {} readData called path = {}", identifier, path); return Futures.immediateCheckedFuture( Optional.>absent()); } @Override public void writeData(YangInstanceIdentifier path, NormalizedNode data) { - LOG.error("writeData called path = {}", path); + LOG.warn("txn {} writeData called path = {}", identifier, path); + } + + @Override public CheckedFuture dataExists( + YangInstanceIdentifier path) { + LOG.warn("txn {} dataExists called path = {}", identifier, path); + + // Returning false instead of an exception to keep this aligned with + // read + return Futures.immediateCheckedFuture(false); } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/exceptions/UnknownMessageException.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/exceptions/UnknownMessageException.java new file mode 100644 index 0000000000..f4f2524a8d --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/exceptions/UnknownMessageException.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.exceptions; + +public class UnknownMessageException extends Exception { + private final Object message; + + public UnknownMessageException(Object message) { + this.message = message; + } + + @Override public String getMessage() { + return "Unknown message received " + " - " + message; + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifier.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifier.java new file mode 100644 index 0000000000..c692881593 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifier.java @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.identifiers; + +import com.google.common.base.Preconditions; + +public class ShardIdentifier { + private final String shardName; + private final String memberName; + private final String type; + + + public ShardIdentifier(String shardName, String memberName, String type) { + + Preconditions.checkNotNull(shardName, "shardName should not be null"); + Preconditions.checkNotNull(memberName, "memberName should not be null"); + Preconditions.checkNotNull(type, "type should not be null"); + + this.shardName = shardName; + this.memberName = memberName; + this.type = type; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ShardIdentifier that = (ShardIdentifier) o; + + if (!memberName.equals(that.memberName)) { + return false; + } + if (!shardName.equals(that.shardName)) { + return false; + } + if (!type.equals(that.type)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = shardName.hashCode(); + result = 31 * result + memberName.hashCode(); + result = 31 * result + type.hashCode(); + return result; + } + + @Override public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append(memberName).append("-shard-").append(shardName).append("-").append(type); + return builder.toString(); + } + + public static Builder builder(){ + return new Builder(); + } + + public static class Builder { + private String shardName; + private String memberName; + private String type; + + public ShardIdentifier build(){ + return new ShardIdentifier(shardName, memberName, type); + } + + public Builder shardName(String shardName){ + this.shardName = shardName; + return this; + } + + public Builder memberName(String memberName){ + this.memberName = memberName; + return this; + } + + public Builder type(String type){ + this.type = type; + return this; + } + + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifier.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifier.java new file mode 100644 index 0000000000..65bf010b0a --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifier.java @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.identifiers; + +public class ShardManagerIdentifier { + private final String type; + + public ShardManagerIdentifier(String type) { + this.type = type; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ShardManagerIdentifier that = (ShardManagerIdentifier) o; + + if (!type.equals(that.type)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return type.hashCode(); + } + + @Override public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("shardmanager-").append(type); + return builder.toString(); + } + + public static Builder builder(){ + return new Builder(); + } + + public static class Builder { + private String type; + + public Builder type(String type){ + this.type = type; + return this; + } + + public ShardManagerIdentifier build(){ + return new ShardManagerIdentifier(this.type); + } + + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardTransactionIdentifier.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardTransactionIdentifier.java new file mode 100644 index 0000000000..77e81422e6 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardTransactionIdentifier.java @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.identifiers; + +import com.google.common.base.Preconditions; + +public class ShardTransactionIdentifier { + private final String remoteTransactionId; + + public ShardTransactionIdentifier(String remoteTransactionId) { + this.remoteTransactionId = Preconditions.checkNotNull(remoteTransactionId, "remoteTransactionId should not be null"); + } + + public static Builder builder(){ + return new Builder(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ShardTransactionIdentifier that = (ShardTransactionIdentifier) o; + + if (!remoteTransactionId.equals(that.remoteTransactionId)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return remoteTransactionId.hashCode(); + } + + @Override public String toString() { + final StringBuilder sb = + new StringBuilder(); + sb.append("shard-").append(remoteTransactionId); + return sb.toString(); + } + + public static class Builder { + private String remoteTransactionId; + + public Builder remoteTransactionId(String remoteTransactionId){ + this.remoteTransactionId = remoteTransactionId; + return this; + } + + public ShardTransactionIdentifier build(){ + return new ShardTransactionIdentifier(remoteTransactionId); + } + + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/TransactionIdentifier.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/TransactionIdentifier.java new file mode 100644 index 0000000000..ba2e27c69f --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/identifiers/TransactionIdentifier.java @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.identifiers; + +import com.google.common.base.Preconditions; + +public class TransactionIdentifier { + private final String memberName; + private final long counter; + + + public TransactionIdentifier(String memberName, long counter) { + this.memberName = Preconditions.checkNotNull(memberName, "memberName should not be null"); + this.counter = counter; + } + + public static Builder builder(){ + return new Builder(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TransactionIdentifier that = (TransactionIdentifier) o; + + if (counter != that.counter) { + return false; + } + if (!memberName.equals(that.memberName)) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = memberName.hashCode(); + result = 31 * result + (int) (counter ^ (counter >>> 32)); + return result; + } + + @Override public String toString() { + final StringBuilder sb = + new StringBuilder(); + sb.append(memberName).append("-txn-").append(counter); + return sb.toString(); + } + + public static class Builder { + private String memberName; + private long counter; + + public TransactionIdentifier build(){ + return new TransactionIdentifier(memberName, counter); + } + + public Builder memberName(String memberName){ + this.memberName = memberName; + return this; + } + + public Builder counter(long counter){ + this.counter = counter; + return this; + } + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/AbstractBaseMBean.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/AbstractBaseMBean.java index de1ac18533..a5d7b77a64 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/AbstractBaseMBean.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/AbstractBaseMBean.java @@ -34,6 +34,7 @@ public abstract class AbstractBaseMBean { public static String BASE_JMX_PREFIX = "org.opendaylight.controller:"; public static String JMX_TYPE_DISTRIBUTED_DATASTORE = "DistributedDatastore"; public static String JMX_CATEGORY_SHARD = "Shard"; + public static String JMX_CATEGORY_SHARD_MANAGER = "ShardManager"; private static final Logger LOG = LoggerFactory .getLogger(AbstractBaseMBean.class); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardMBeanFactory.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardMBeanFactory.java index a3359086b6..afca87f0df 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardMBeanFactory.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardMBeanFactory.java @@ -8,19 +8,20 @@ import java.util.Map; * Date: 7/16/14 */ public class ShardMBeanFactory { - private static Map shardMBeans= new HashMap(); + private static Map shardMBeans = + new HashMap(); - public static ShardStats getShardStatsMBean(String shardName){ - if(shardMBeans.containsKey(shardName)){ + public static ShardStats getShardStatsMBean(String shardName) { + if (shardMBeans.containsKey(shardName)) { return shardMBeans.get(shardName); - }else { - ShardStats shardStatsMBeanImpl = new ShardStats(shardName); + } else { + ShardStats shardStatsMBeanImpl = new ShardStats(shardName); - if(shardStatsMBeanImpl.registerMBean()) { - shardMBeans.put(shardName, shardStatsMBeanImpl); - } - return shardStatsMBeanImpl; - } - } + if (shardStatsMBeanImpl.registerMBean()) { + shardMBeans.put(shardName, shardStatsMBeanImpl); + } + return shardStatsMBeanImpl; + } + } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStats.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStats.java index 4eb6a8cef9..c6c1579ce3 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStats.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStats.java @@ -2,85 +2,177 @@ package org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard; import org.opendaylight.controller.cluster.datastore.jmx.mbeans.AbstractBaseMBean; +import java.text.SimpleDateFormat; +import java.util.Date; + /** * @author: syedbahm */ public class ShardStats extends AbstractBaseMBean implements ShardStatsMBean { - private Long committedTransactionsCount; - private Long journalMessagesCount; - final private String shardName; - private String leader; - private String raftState; - ShardStats(String shardName){ - this.shardName = shardName; - committedTransactionsCount =0L; - journalMessagesCount = 0L; - }; + private final String shardName; + + private Long committedTransactionsCount = 0L; + + private Long readOnlyTransactionCount = 0L; + + private Long writeOnlyTransactionCount = 0L; + + private Long readWriteTransactionCount = 0L; + + private String leader; + + private String raftState; + + private Long lastLogTerm = -1L; + + private Long lastLogIndex = -1L; + + private Long currentTerm = -1L; + + private Long commitIndex = -1L; + + private Long lastApplied = -1L; + + private Date lastCommittedTransactionTime = new Date(0L); + + private Long failedTransactionsCount = 0L; + + private SimpleDateFormat sdf = + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + + ShardStats(String shardName) { + this.shardName = shardName; + } + + + @Override + public String getShardName() { + return shardName; + } + + @Override + public Long getCommittedTransactionsCount() { + return committedTransactionsCount; + } + + @Override public String getLeader() { + return leader; + } + + @Override public String getRaftState() { + return raftState; + } + + @Override public Long getReadOnlyTransactionCount() { + return readOnlyTransactionCount; + } + + @Override public Long getWriteOnlyTransactionCount() { + return writeOnlyTransactionCount; + } + + @Override public Long getReadWriteTransactionCount() { + return readWriteTransactionCount; + } + + @Override public Long getLastLogIndex() { + return lastLogIndex; + } + + @Override public Long getLastLogTerm() { + return lastLogTerm; + } + + @Override public Long getCurrentTerm() { + return currentTerm; + } + + @Override public Long getCommitIndex() { + return commitIndex; + } + + @Override public Long getLastApplied() { + return lastApplied; + } + + @Override + public String getLastCommittedTransactionTime() { + return sdf.format(lastCommittedTransactionTime); + } - @Override - public String getShardName() { - return shardName; - } + @Override public Long getFailedTransactionsCount() { + return failedTransactionsCount; + } - @Override - public Long getCommittedTransactionsCount() { - return committedTransactionsCount; - } + public Long incrementCommittedTransactionCount() { + return committedTransactionsCount++; + } - @Override - public Long getJournalMessagesCount() { - //FIXME: this will be populated once after integration with Raft stuff - return journalMessagesCount; - } + public Long incrementReadOnlyTransactionCount() { + return readOnlyTransactionCount++; + } - @Override public String getLeader() { - return leader; - } + public Long incrementWriteOnlyTransactionCount() { + return writeOnlyTransactionCount++; + } - @Override public String getRaftState() { - return raftState; - } + public Long incrementReadWriteTransactionCount() { + return readWriteTransactionCount++; + } - public Long incrementCommittedTransactionCount() { - return committedTransactionsCount++; - } + public void setLeader(String leader) { + this.leader = leader; + } + public void setRaftState(String raftState) { + this.raftState = raftState; + } - public void updateCommittedTransactionsCount(long currentCount){ - committedTransactionsCount = currentCount; + public void setLastLogTerm(Long lastLogTerm) { + this.lastLogTerm = lastLogTerm; + } - } + public void setLastLogIndex(Long lastLogIndex) { + this.lastLogIndex = lastLogIndex; + } - public void updateJournalMessagesCount(long currentCount){ - journalMessagesCount = currentCount; + public void setCurrentTerm(Long currentTerm) { + this.currentTerm = currentTerm; + } - } + public void setCommitIndex(Long commitIndex) { + this.commitIndex = commitIndex; + } - public void setLeader(String leader){ - this.leader = leader; - } + public void setLastApplied(Long lastApplied) { + this.lastApplied = lastApplied; + } - public void setRaftState(String raftState){ - this.raftState = raftState; - } + public void setLastCommittedTransactionTime( + Date lastCommittedTransactionTime) { + this.lastCommittedTransactionTime = lastCommittedTransactionTime; + } - @Override - protected String getMBeanName() { - return shardName; - } + @Override + protected String getMBeanName() { + return shardName; + } - @Override - protected String getMBeanType() { - return JMX_TYPE_DISTRIBUTED_DATASTORE; - } + @Override + protected String getMBeanType() { + return JMX_TYPE_DISTRIBUTED_DATASTORE; + } - @Override - protected String getMBeanCategory() { - return JMX_CATEGORY_SHARD; - } + @Override + protected String getMBeanCategory() { + return JMX_CATEGORY_SHARD; + } + public void incrementFailedTransactionsCount() { + this.failedTransactionsCount++; + } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsMBean.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsMBean.java index 9ebcc7fa5a..b8b220ee82 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsMBean.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsMBean.java @@ -4,9 +4,32 @@ package org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard; * @author: syedbahm */ public interface ShardStatsMBean { - String getShardName(); - Long getCommittedTransactionsCount(); - Long getJournalMessagesCount(); - String getLeader(); - String getRaftState(); + String getShardName(); + + Long getCommittedTransactionsCount(); + + String getLeader(); + + String getRaftState(); + + Long getReadOnlyTransactionCount(); + + Long getWriteOnlyTransactionCount(); + + Long getReadWriteTransactionCount(); + + Long getLastLogIndex(); + + Long getLastLogTerm(); + + Long getCurrentTerm(); + + Long getCommitIndex(); + + Long getLastApplied(); + + String getLastCommittedTransactionTime(); + + Long getFailedTransactionsCount(); + } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfo.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfo.java new file mode 100644 index 0000000000..0c609b459e --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfo.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager; + +import org.opendaylight.controller.cluster.datastore.jmx.mbeans.AbstractBaseMBean; + +import java.util.List; + +public class ShardManagerInfo extends AbstractBaseMBean implements + ShardManagerInfoMBean { + + private final String name; + private final List localShards; + + public ShardManagerInfo(String name, List localShards) { + this.name = name; + this.localShards = localShards; + } + + + @Override protected String getMBeanName() { + return name; + } + + @Override protected String getMBeanType() { + return JMX_TYPE_DISTRIBUTED_DATASTORE; + } + + @Override protected String getMBeanCategory() { + return JMX_CATEGORY_SHARD_MANAGER; + } + + public static ShardManagerInfo createShardManagerMBean(String name, List localShards){ + ShardManagerInfo shardManagerInfo = new ShardManagerInfo(name, + localShards); + + shardManagerInfo.registerMBean(); + + return shardManagerInfo; + } + + @Override public List getLocalShards() { + return localShards; + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfoMBean.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfoMBean.java new file mode 100644 index 0000000000..28ccc4f0b3 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shardmanager/ShardManagerInfoMBean.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.jmx.mbeans.shardmanager; + +import java.util.List; + +public interface ShardManagerInfoMBean { + List getLocalShards(); +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransaction.java index 4515bd7042..c639064036 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransaction.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages; public class AbortTransaction implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransaction.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransaction.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransactionReply.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransactionReply.java index 31a06fe4c5..88e26401f7 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransactionReply.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/AbortTransactionReply.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages; public class AbortTransactionReply implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransactionReply.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.AbortTransactionReply.class; @Override diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CanCommitTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CanCommitTransaction.java index 2c032aff65..08f81c121f 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CanCommitTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CanCommitTransaction.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages; public class CanCommitTransaction implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CanCommitTransaction.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CloseDataChangeListenerRegistration.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CloseDataChangeListenerRegistration.java index 57237bcbe2..a54ee6209c 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CloseDataChangeListenerRegistration.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CloseDataChangeListenerRegistration.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.registration.ListenerRegistrationMessages; public class CloseDataChangeListenerRegistration implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.CloseDataChangeListenerRegistration.class; + public static final Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.CloseDataChangeListenerRegistration.class; @Override public Object toSerializable() { return ListenerRegistrationMessages.CloseDataChangeListenerRegistration.newBuilder().build(); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransaction.java index 14187139aa..92138a769c 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransaction.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages; public class CommitTransaction implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransaction.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransaction.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransactionReply.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransactionReply.java index afeba29879..5751b71037 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransactionReply.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CommitTransactionReply.java @@ -12,7 +12,7 @@ import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommit public class CommitTransactionReply implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransactionReply.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.CommitTransactionReply.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransaction.java index b27ad86be9..d5c9e21611 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransaction.java @@ -13,7 +13,7 @@ import org.opendaylight.controller.protobuff.messages.transaction.ShardTransacti public class CreateTransaction implements SerializableMessage { - public static Class SERIALIZABLE_CLASS = ShardTransactionMessages.CreateTransaction.class; + public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.CreateTransaction.class; private final String transactionId; private final int transactionType; diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransactionChain.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransactionChain.java index 6339749f7b..8dd04e540e 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransactionChain.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/CreateTransactionChain.java @@ -11,7 +11,7 @@ package org.opendaylight.controller.cluster.datastore.messages; import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages; public class CreateTransactionChain implements SerializableMessage{ - public static Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChain.class; + public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CreateTransactionChain.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExists.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExists.java new file mode 100644 index 0000000000..d52daabd84 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExists.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.messages; + +import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils; +import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages; +import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; + +public class DataExists implements SerializableMessage{ + + public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DataExists.class; + + private final YangInstanceIdentifier path; + + public DataExists(YangInstanceIdentifier path) { + this.path = path; + } + + public YangInstanceIdentifier getPath() { + return path; + } + + @Override public Object toSerializable() { + return ShardTransactionMessages.DataExists.newBuilder() + .setInstanceIdentifierPathArguments( + InstanceIdentifierUtils.toSerializable(path)).build(); + } + + public static DataExists fromSerializable(Object serializable){ + ShardTransactionMessages.DataExists o = (ShardTransactionMessages.DataExists) serializable; + return new DataExists(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments())); + } + +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExistsReply.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExistsReply.java new file mode 100644 index 0000000000..04fafa10e2 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DataExistsReply.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.datastore.messages; + +import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages; + +public class DataExistsReply implements SerializableMessage{ + + + public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DataExistsReply.class; + + private final boolean exists; + + public DataExistsReply(boolean exists) { + this.exists = exists; + } + + public boolean exists() { + return exists; + } + + @Override public Object toSerializable() { + return ShardTransactionMessages.DataExistsReply.newBuilder() + .setExists(exists).build(); + } + + public static DataExistsReply fromSerializable(Object serializable){ + ShardTransactionMessages.DataExistsReply o = (ShardTransactionMessages.DataExistsReply) serializable; + return new DataExistsReply(o.getExists()); + } + +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PeerAddressResolved.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PeerAddressResolved.java index 8c2543e486..346519ed5a 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PeerAddressResolved.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PeerAddressResolved.java @@ -8,16 +8,18 @@ package org.opendaylight.controller.cluster.datastore.messages; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; + public class PeerAddressResolved { - private final String peerId; + private final ShardIdentifier peerId; private final String peerAddress; - public PeerAddressResolved(String peerId, String peerAddress) { + public PeerAddressResolved(ShardIdentifier peerId, String peerAddress) { this.peerId = peerId; this.peerAddress = peerAddress; } - public String getPeerId() { + public ShardIdentifier getPeerId() { return peerId; } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransaction.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransaction.java index 1e5a05329b..dae4cec3c3 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransaction.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransaction.java @@ -12,7 +12,7 @@ import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommit public class PreCommitTransaction implements SerializableMessage{ - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransaction.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransaction.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransactionReply.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransactionReply.java index 1aedae3ae7..fc07bfcb4b 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransactionReply.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/PreCommitTransactionReply.java @@ -12,7 +12,7 @@ import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommit public class PreCommitTransactionReply implements SerializableMessage{ - public static Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransactionReply.class; + public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransactionReply.class; @Override public Object toSerializable() { diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/InstanceIdentifierUtils.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/InstanceIdentifierUtils.java index 20268a6744..c154b81e35 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/InstanceIdentifierUtils.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/InstanceIdentifierUtils.java @@ -18,19 +18,21 @@ public class InstanceIdentifierUtils { .getLogger(InstanceIdentifierUtils.class); public static String getParentPath(String currentElementPath) { - String parentPath = ""; + + StringBuilder parentPath = new StringBuilder(); if (currentElementPath != null) { String[] parentPaths = currentElementPath.split("/"); if (parentPaths.length > 2) { for (int i = 0; i < parentPaths.length - 1; i++) { if (parentPaths[i].length() > 0) { - parentPath += "/" + parentPaths[i]; + parentPath.append( "/"); + parentPath.append( parentPaths[i]); } } } } - return parentPath; + return parentPath.toString(); } @Deprecated diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/resources/application.conf b/opendaylight/md-sal/sal-distributed-datastore/src/main/resources/application.conf index daac89c4c8..8af9bd07d7 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/resources/application.conf +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/resources/application.conf @@ -1,6 +1,7 @@ odl-cluster-data { akka { + loggers = ["akka.event.slf4j.Slf4jLogger"] cluster { roles = [ "member-1" @@ -23,7 +24,7 @@ odl-cluster-data { netty.tcp { hostname = "127.0.0.1" port = 2550 - maximum-frame-size = 2097152 + maximum-frame-size = 419430400 send-buffer-size = 52428800 receive-buffer-size = 52428800 } @@ -39,6 +40,7 @@ odl-cluster-data { odl-cluster-rpc { akka { + loggers = ["akka.event.slf4j.Slf4jLogger"] actor { provider = "akka.cluster.ClusterActorRefProvider" diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/BasicIntegrationTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/BasicIntegrationTest.java index 6599bd8eeb..319451f8f0 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/BasicIntegrationTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/BasicIntegrationTest.java @@ -14,8 +14,8 @@ import akka.actor.ActorSelection; import akka.actor.Props; import akka.event.Logging; import akka.testkit.JavaTestKit; -import junit.framework.Assert; import org.junit.Test; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction; import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction; import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionChain; @@ -37,6 +37,8 @@ import scala.concurrent.duration.FiniteDuration; import java.util.Collections; import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; public class BasicIntegrationTest extends AbstractActorTest { @@ -52,7 +54,11 @@ public class BasicIntegrationTest extends AbstractActorTest { new JavaTestKit(getSystem()) {{ - final Props props = Shard.props("config", Collections.EMPTY_MAP); + final ShardIdentifier identifier = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + final Props props = Shard.props(identifier, Collections.EMPTY_MAP); final ActorRef shard = getSystem().actorOf(props); new Within(duration("5 seconds")) { @@ -95,7 +101,7 @@ public class BasicIntegrationTest extends AbstractActorTest { } }.get(); // this extracts the received message - Assert.assertNotNull(transactionChain); + assertNotNull(transactionChain); System.out.println("Successfully created transaction chain"); @@ -116,7 +122,7 @@ public class BasicIntegrationTest extends AbstractActorTest { } }.get(); // this extracts the received message - Assert.assertNotNull(transaction); + assertNotNull(transaction); System.out.println("Successfully created transaction"); @@ -135,7 +141,7 @@ public class BasicIntegrationTest extends AbstractActorTest { } }.get(); // this extracts the received message - Assert.assertTrue(writeDone); + assertTrue(writeDone); System.out.println("Successfully wrote data"); @@ -158,7 +164,7 @@ public class BasicIntegrationTest extends AbstractActorTest { } }.get(); // this extracts the received message - Assert.assertNotNull(cohort); + assertNotNull(cohort); System.out.println("Successfully readied the transaction"); @@ -177,7 +183,7 @@ public class BasicIntegrationTest extends AbstractActorTest { } }.get(); // this extracts the received message - Assert.assertTrue(preCommitDone); + assertTrue(preCommitDone); System.out.println("Successfully pre-committed the transaction"); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ConfigurationImplTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ConfigurationImplTest.java index 56fd3c568a..17329611b0 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ConfigurationImplTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ConfigurationImplTest.java @@ -8,6 +8,8 @@ import org.junit.Test; import java.io.File; import java.util.List; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ConfigurationImplTest { @@ -31,6 +33,49 @@ public class ConfigurationImplTest { assertTrue(memberShardNames.contains("people-1")); assertTrue(memberShardNames.contains("cars-1")); + + // Retrieve once again to hit cache + + memberShardNames = + configuration.getMemberShardNames("member-1"); + + assertTrue(memberShardNames.contains("people-1")); + assertTrue(memberShardNames.contains("cars-1")); + + } + + @Test + public void testGetMembersFromShardName(){ + List members = + configuration.getMembersFromShardName("default"); + + assertEquals(3, members.size()); + + assertTrue(members.contains("member-1")); + assertTrue(members.contains("member-2")); + assertTrue(members.contains("member-3")); + + assertFalse(members.contains("member-26")); + + // Retrieve once again to hit cache + members = + configuration.getMembersFromShardName("default"); + + assertEquals(3, members.size()); + + assertTrue(members.contains("member-1")); + assertTrue(members.contains("member-2")); + assertTrue(members.contains("member-3")); + + assertFalse(members.contains("member-26")); + + + // Try to find a shard which is not present + + members = + configuration.getMembersFromShardName("foobar"); + + assertEquals(0, members.size()); } @Test diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerRegistrationTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerRegistrationTest.java index 920248521a..eb2c24292a 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerRegistrationTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DataChangeListenerRegistrationTest.java @@ -21,7 +21,8 @@ import static org.junit.Assert.assertEquals; public class DataChangeListenerRegistrationTest extends AbstractActorTest { private static ListeningExecutorService storeExecutor = MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor()); - private static final InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", storeExecutor); + private static final InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", storeExecutor, + MoreExecutors.sameThreadExecutor()); static { store.onGlobalContextUpdated(TestModel.createTestContext()); @@ -37,12 +38,14 @@ public class DataChangeListenerRegistrationTest extends AbstractActorTest { final ActorRef subject = getSystem().actorOf(props, "testCloseListenerRegistration"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new CloseDataChangeListenerRegistration().toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(CloseDataChangeListenerRegistrationReply.SERIALIZABLE_CLASS)) { return "match"; diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreTest.java index d1beab9049..406f0ffd9e 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreTest.java @@ -1,8 +1,11 @@ package org.opendaylight.controller.cluster.datastore; import akka.actor.ActorRef; +import akka.actor.ActorSystem; import akka.actor.Props; -import junit.framework.Assert; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply; import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory; import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor; @@ -21,13 +24,20 @@ import org.opendaylight.yangtools.concepts.ListenerRegistration; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + public class DistributedDataStoreTest extends AbstractActorTest{ private DistributedDataStore distributedDataStore; private MockActorContext mockActorContext; private ActorRef doNothingActorRef; - @org.junit.Before + @Before public void setUp() throws Exception { ShardStrategyFactory.setConfiguration(new MockConfiguration()); final Props props = Props.create(DoNothingActor.class); @@ -35,7 +45,7 @@ public class DistributedDataStoreTest extends AbstractActorTest{ doNothingActorRef = getSystem().actorOf(props); mockActorContext = new MockActorContext(getSystem(), doNothingActorRef); - distributedDataStore = new DistributedDataStore(mockActorContext, "config"); + distributedDataStore = new DistributedDataStore(mockActorContext); distributedDataStore.onGlobalContextUpdated( TestModel.createTestContext()); @@ -48,12 +58,22 @@ public class DistributedDataStoreTest extends AbstractActorTest{ .build()); } - @org.junit.After + @After public void tearDown() throws Exception { } - @org.junit.Test + @Test + public void testConstructor(){ + ActorSystem actorSystem = mock(ActorSystem.class); + + new DistributedDataStore(actorSystem, "config", + mock(ClusterWrapper.class), mock(Configuration.class)); + + verify(actorSystem).actorOf(any(Props.class), eq("shardmanager-config")); + } + + @Test public void testRegisterChangeListenerWhenShardIsNotLocal() throws Exception { ListenerRegistration registration = @@ -65,12 +85,12 @@ public class DistributedDataStoreTest extends AbstractActorTest{ }, AsyncDataBroker.DataChangeScope.BASE); // Since we do not expect the shard to be local registration will return a NoOpRegistration - Assert.assertTrue(registration instanceof NoOpDataChangeListenerRegistration); + assertTrue(registration instanceof NoOpDataChangeListenerRegistration); - Assert.assertNotNull(registration); + assertNotNull(registration); } - @org.junit.Test + @Test public void testRegisterChangeListenerWhenShardIsLocal() throws Exception { mockActorContext.setExecuteLocalShardOperationResponse(new RegisterChangeListenerReply(doNothingActorRef.path())); @@ -83,33 +103,33 @@ public class DistributedDataStoreTest extends AbstractActorTest{ } }, AsyncDataBroker.DataChangeScope.BASE); - Assert.assertTrue(registration instanceof DataChangeListenerRegistrationProxy); + assertTrue(registration instanceof DataChangeListenerRegistrationProxy); - Assert.assertNotNull(registration); + assertNotNull(registration); } - @org.junit.Test + @Test public void testCreateTransactionChain() throws Exception { final DOMStoreTransactionChain transactionChain = distributedDataStore.createTransactionChain(); - Assert.assertNotNull(transactionChain); + assertNotNull(transactionChain); } - @org.junit.Test + @Test public void testNewReadOnlyTransaction() throws Exception { final DOMStoreReadTransaction transaction = distributedDataStore.newReadOnlyTransaction(); - Assert.assertNotNull(transaction); + assertNotNull(transaction); } - @org.junit.Test + @Test public void testNewWriteOnlyTransaction() throws Exception { final DOMStoreWriteTransaction transaction = distributedDataStore.newWriteOnlyTransaction(); - Assert.assertNotNull(transaction); + assertNotNull(transaction); } - @org.junit.Test + @Test public void testNewReadWriteTransaction() throws Exception { final DOMStoreReadWriteTransaction transaction = distributedDataStore.newReadWriteTransaction(); - Assert.assertNotNull(transaction); + assertNotNull(transaction); } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java index 431a266b14..0d86ffb844 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java @@ -6,6 +6,7 @@ import akka.event.Logging; import akka.testkit.JavaTestKit; import junit.framework.Assert; import org.junit.Test; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction; import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionChain; import org.opendaylight.controller.cluster.datastore.messages.CreateTransactionChainReply; @@ -35,7 +36,11 @@ public class ShardTest extends AbstractActorTest { @Test public void testOnReceiveCreateTransactionChain() throws Exception { new JavaTestKit(getSystem()) {{ - final Props props = Shard.props("config", Collections.EMPTY_MAP); + final ShardIdentifier identifier = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + final Props props = Shard.props(identifier, Collections.EMPTY_MAP); final ActorRef subject = getSystem().actorOf(props, "testCreateTransactionChain"); @@ -87,7 +92,11 @@ public class ShardTest extends AbstractActorTest { @Test public void testOnReceiveRegisterListener() throws Exception { new JavaTestKit(getSystem()) {{ - final Props props = Shard.props("config", Collections.EMPTY_MAP); + final ShardIdentifier identifier = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + final Props props = Shard.props(identifier, Collections.EMPTY_MAP); final ActorRef subject = getSystem().actorOf(props, "testRegisterChangeListener"); @@ -141,7 +150,11 @@ public class ShardTest extends AbstractActorTest { @Test public void testCreateTransaction(){ new JavaTestKit(getSystem()) {{ - final Props props = Shard.props("config", Collections.EMPTY_MAP); + final ShardIdentifier identifier = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + final Props props = Shard.props(identifier, Collections.EMPTY_MAP); final ActorRef subject = getSystem().actorOf(props, "testCreateTransaction"); @@ -196,9 +209,14 @@ public class ShardTest extends AbstractActorTest { @Test public void testPeerAddressResolved(){ new JavaTestKit(getSystem()) {{ - Map peerAddresses = new HashMap<>(); - peerAddresses.put("member-2", null); - final Props props = Shard.props("config", peerAddresses); + Map peerAddresses = new HashMap<>(); + + final ShardIdentifier identifier = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + peerAddresses.put(identifier, null); + final Props props = Shard.props(identifier, peerAddresses); final ActorRef subject = getSystem().actorOf(props, "testPeerAddressResolved"); @@ -206,7 +224,7 @@ public class ShardTest extends AbstractActorTest { protected void run() { subject.tell( - new PeerAddressResolved("member-2", "akka://foobar"), + new PeerAddressResolved(identifier, "akka://foobar"), getRef()); expectNoMsg(); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChainTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChainTest.java index b35880a6a5..d468af6664 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChainTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionChainTest.java @@ -19,7 +19,8 @@ public class ShardTransactionChainTest extends AbstractActorTest { private static ListeningExecutorService storeExecutor = MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor()); - private static final InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", storeExecutor); + private static final InMemoryDOMDataStore store = new InMemoryDOMDataStore("OPER", storeExecutor, + MoreExecutors.sameThreadExecutor()); static { store.onGlobalContextUpdated(TestModel.createTestContext()); @@ -31,12 +32,14 @@ public class ShardTransactionChainTest extends AbstractActorTest { final ActorRef subject = getSystem().actorOf(props, "testCreateTransaction"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new CreateTransaction("txn-1", TransactionProxy.TransactionType.READ_ONLY.ordinal() ).toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) { return CreateTransactionReply.fromSerializable(in).getTransactionPath(); @@ -66,12 +69,14 @@ public class ShardTransactionChainTest extends AbstractActorTest { final ActorRef subject = getSystem().actorOf(props, "testCloseTransactionChain"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new CloseTransactionChain().toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(CloseTransactionChainReply.SERIALIZABLE_CLASS)) { return "match"; diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionFailureTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionFailureTest.java new file mode 100644 index 0000000000..02ceee82e0 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionFailureTest.java @@ -0,0 +1,313 @@ +/* + * + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + * + */ + +package org.opendaylight.controller.cluster.datastore; + +import akka.actor.ActorRef; +import akka.actor.Props; +import akka.testkit.TestActorRef; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import org.junit.Test; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; +import org.opendaylight.controller.md.cluster.datastore.model.TestModel; +import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; +import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; +import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages; +import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; + +import java.util.Collections; + +import static org.junit.Assert.assertTrue; + +/** + * Covers negative test cases + * @author Basheeruddin Ahmed + */ +public class ShardTransactionFailureTest extends AbstractActorTest { + private static ListeningExecutorService storeExecutor = + MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor()); + + private static final InMemoryDOMDataStore store = + new InMemoryDOMDataStore("OPER", storeExecutor, + MoreExecutors.sameThreadExecutor()); + + private static final SchemaContext testSchemaContext = + TestModel.createTestContext(); + + private static final ShardIdentifier SHARD_IDENTIFIER = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + static { + store.onGlobalContextUpdated(testSchemaContext); + } + + + @Test(expected = ReadFailedException.class) + public void testNegativeReadWithReadOnlyTransactionClosed() + throws Throwable { + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadOnlyTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeReadWithReadOnlyTransactionClosed"); + + ShardTransactionMessages.ReadData readData = + ShardTransactionMessages.ReadData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build() + ).build(); + Future future = + akka.pattern.Patterns.ask(subject, readData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ((ShardReadTransaction) subject.underlyingActor()) + .forUnitTestOnlyExplicitTransactionClose(); + + future = akka.pattern.Patterns.ask(subject, readData, 3000); + Await.result(future, Duration.Zero()); + + + } + + + @Test(expected = ReadFailedException.class) + public void testNegativeReadWithReadWriteOnlyTransactionClosed() + throws Throwable { + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadWriteTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeReadWithReadWriteOnlyTransactionClosed"); + + ShardTransactionMessages.ReadData readData = + ShardTransactionMessages.ReadData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build() + ).build(); + Future future = + akka.pattern.Patterns.ask(subject, readData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ((ShardReadWriteTransaction) subject.underlyingActor()) + .forUnitTestOnlyExplicitTransactionClose(); + + future = akka.pattern.Patterns.ask(subject, readData, 3000); + Await.result(future, Duration.Zero()); + + + } + + @Test(expected = ReadFailedException.class) + public void testNegativeExistsWithReadWriteOnlyTransactionClosed() + throws Throwable { + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadWriteTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeExistsWithReadWriteOnlyTransactionClosed"); + + ShardTransactionMessages.DataExists dataExists = + ShardTransactionMessages.DataExists.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build() + ).build(); + + Future future = + akka.pattern.Patterns.ask(subject, dataExists, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ((ShardReadWriteTransaction) subject.underlyingActor()) + .forUnitTestOnlyExplicitTransactionClose(); + + future = akka.pattern.Patterns.ask(subject, dataExists, 3000); + Await.result(future, Duration.Zero()); + + + } + + @Test(expected = IllegalStateException.class) + public void testNegativeWriteWithTransactionReady() throws Exception { + + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newWriteOnlyTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeWriteWithTransactionReady"); + + ShardTransactionMessages.ReadyTransaction readyTransaction = + ShardTransactionMessages.ReadyTransaction.newBuilder().build(); + + Future future = + akka.pattern.Patterns.ask(subject, readyTransaction, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ShardTransactionMessages.WriteData writeData = + ShardTransactionMessages.WriteData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build()).setNormalizedNode( + NormalizedNodeMessages.Node.newBuilder().build() + + ).build(); + + future = akka.pattern.Patterns.ask(subject, writeData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + + } + + + @Test(expected = IllegalStateException.class) + public void testNegativeReadWriteWithTransactionReady() throws Exception { + + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadWriteTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeReadWriteWithTransactionReady"); + + ShardTransactionMessages.ReadyTransaction readyTransaction = + ShardTransactionMessages.ReadyTransaction.newBuilder().build(); + + Future future = + akka.pattern.Patterns.ask(subject, readyTransaction, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ShardTransactionMessages.WriteData writeData = + ShardTransactionMessages.WriteData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build()).setNormalizedNode( + NormalizedNodeMessages.Node.newBuilder().build() + + ).build(); + + future = akka.pattern.Patterns.ask(subject, writeData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + + } + + @Test(expected = IllegalStateException.class) + public void testNegativeMergeTransactionReady() throws Exception { + + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadWriteTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, "testNegativeMergeTransactionReady"); + + ShardTransactionMessages.ReadyTransaction readyTransaction = + ShardTransactionMessages.ReadyTransaction.newBuilder().build(); + + Future future = + akka.pattern.Patterns.ask(subject, readyTransaction, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ShardTransactionMessages.MergeData mergeData = + ShardTransactionMessages.MergeData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build()).setNormalizedNode( + NormalizedNodeMessages.Node.newBuilder().build() + + ).build(); + + future = akka.pattern.Patterns.ask(subject, mergeData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + + } + + + @Test(expected = IllegalStateException.class) + public void testNegativeDeleteDataWhenTransactionReady() throws Exception { + + + final ActorRef shard = + getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadWriteTransaction(), shard, + TestModel.createTestContext()); + + final TestActorRef subject = TestActorRef + .create(getSystem(), props, + "testNegativeDeleteDataWhenTransactionReady"); + + ShardTransactionMessages.ReadyTransaction readyTransaction = + ShardTransactionMessages.ReadyTransaction.newBuilder().build(); + + Future future = + akka.pattern.Patterns.ask(subject, readyTransaction, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + ShardTransactionMessages.DeleteData deleteData = + ShardTransactionMessages.DeleteData.newBuilder() + .setInstanceIdentifierPathArguments( + NormalizedNodeMessages.InstanceIdentifier.newBuilder() + .build()).build(); + + future = akka.pattern.Patterns.ask(subject, deleteData, 3000); + assertTrue(future.isCompleted()); + Await.result(future, Duration.Zero()); + + + } + + +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionTest.java index 632ecc29cd..78895b2366 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionTest.java @@ -9,8 +9,12 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.junit.Assert; import org.junit.Test; +import org.opendaylight.controller.cluster.datastore.exceptions.UnknownMessageException; +import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply; +import org.opendaylight.controller.cluster.datastore.messages.DataExists; +import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply; import org.opendaylight.controller.cluster.datastore.messages.DeleteData; import org.opendaylight.controller.cluster.datastore.messages.DeleteDataReply; import org.opendaylight.controller.cluster.datastore.messages.MergeData; @@ -42,10 +46,15 @@ public class ShardTransactionTest extends AbstractActorTest { MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor()); private static final InMemoryDOMDataStore store = - new InMemoryDOMDataStore("OPER", storeExecutor); + new InMemoryDOMDataStore("OPER", storeExecutor, MoreExecutors.sameThreadExecutor()); private static final SchemaContext testSchemaContext = TestModel.createTestContext(); + private static final ShardIdentifier SHARD_IDENTIFIER = + ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + static { store.onGlobalContextUpdated(testSchemaContext); } @@ -53,12 +62,13 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveReadData() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext); final ActorRef subject = getSystem().actorOf(props, "testReadData"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell( @@ -67,6 +77,7 @@ public class ShardTransactionTest extends AbstractActorTest { final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) { if (ReadDataReply.fromSerializable(testSchemaContext,YangInstanceIdentifier.builder().build(), in) @@ -93,12 +104,13 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveReadDataWhenDataNotFound() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props( store.newReadOnlyTransaction(), shard, testSchemaContext); final ActorRef subject = getSystem().actorOf(props, "testReadDataWhenDataNotFound"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell( @@ -107,6 +119,7 @@ public class ShardTransactionTest extends AbstractActorTest { final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) { if (ReadDataReply.fromSerializable(testSchemaContext,TestModel.TEST_PATH, in) @@ -131,10 +144,95 @@ public class ShardTransactionTest extends AbstractActorTest { }}; } + @Test + public void testOnReceiveDataExistsPositive() throws Exception { + new JavaTestKit(getSystem()) {{ + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext); + final ActorRef subject = getSystem().actorOf(props, "testDataExistsPositive"); + + new Within(duration("1 seconds")) { + @Override + protected void run() { + + subject.tell( + new DataExists(YangInstanceIdentifier.builder().build()).toSerializable(), + getRef()); + + final String out = new ExpectMsg(duration("1 seconds"), "match hint") { + // do not put code outside this method, will run afterwards + @Override + protected String match(Object in) { + if (in.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) { + if (DataExistsReply.fromSerializable(in) + .exists()) { + return "match"; + } + return null; + } else { + throw noMatch(); + } + } + }.get(); // this extracts the received message + + assertEquals("match", out); + + expectNoMsg(); + } + + + }; + }}; + } + + @Test + public void testOnReceiveDataExistsNegative() throws Exception { + new JavaTestKit(getSystem()) {{ + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); + final Props props = + ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext); + final ActorRef subject = getSystem().actorOf(props, "testDataExistsNegative"); + + new Within(duration("1 seconds")) { + @Override + protected void run() { + + subject.tell( + new DataExists(TestModel.TEST_PATH).toSerializable(), + getRef()); + + final String out = new ExpectMsg(duration("1 seconds"), "match hint") { + // do not put code outside this method, will run afterwards + @Override + protected String match(Object in) { + if (in.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) { + if (!DataExistsReply.fromSerializable(in) + .exists()) { + return "match"; + } + return null; + } else { + throw noMatch(); + } + } + }.get(); // this extracts the received message + + assertEquals("match", out); + + expectNoMsg(); + } + + + }; + }}; + } + private void assertModification(final ActorRef subject, final Class modificationType) { new JavaTestKit(getSystem()) {{ new Within(duration("1 seconds")) { + @Override protected void run() { subject .tell(new ShardTransaction.GetCompositedModification(), @@ -143,6 +241,7 @@ public class ShardTransactionTest extends AbstractActorTest { final CompositeModification compositeModification = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected CompositeModification match(Object in) { if (in instanceof ShardTransaction.GetCompositeModificationReply) { return ((ShardTransaction.GetCompositeModificationReply) in) @@ -167,13 +266,14 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveWriteData() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard, TestModel.createTestContext()); final ActorRef subject = getSystem().actorOf(props, "testWriteData"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new WriteData(TestModel.TEST_PATH, @@ -182,6 +282,7 @@ public class ShardTransactionTest extends AbstractActorTest { final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(WriteDataReply.SERIALIZABLE_CLASS)) { return "match"; @@ -205,13 +306,14 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveMergeData() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard, testSchemaContext); final ActorRef subject = getSystem().actorOf(props, "testMergeData"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new MergeData(TestModel.TEST_PATH, @@ -220,6 +322,7 @@ public class ShardTransactionTest extends AbstractActorTest { final String out = new ExpectMsg(duration("500 milliseconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(MergeDataReply.SERIALIZABLE_CLASS)) { return "match"; @@ -244,19 +347,21 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveDeleteData() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props( store.newWriteOnlyTransaction(), shard, TestModel.createTestContext()); final ActorRef subject = getSystem().actorOf(props, "testDeleteData"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new DeleteData(TestModel.TEST_PATH).toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(DeleteDataReply.SERIALIZABLE_CLASS)) { return "match"; @@ -281,19 +386,21 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveReadyTransaction() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props( store.newReadWriteTransaction(), shard, TestModel.createTestContext()); final ActorRef subject = getSystem().actorOf(props, "testReadyTransaction"); new Within(duration("1 seconds")) { + @Override protected void run() { subject.tell(new ReadyTransaction().toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)) { return "match"; @@ -317,7 +424,7 @@ public class ShardTransactionTest extends AbstractActorTest { @Test public void testOnReceiveCloseTransaction() throws Exception { new JavaTestKit(getSystem()) {{ - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard, TestModel.createTestContext()); final ActorRef subject = @@ -326,12 +433,14 @@ public class ShardTransactionTest extends AbstractActorTest { watch(subject); new Within(duration("2 seconds")) { + @Override protected void run() { subject.tell(new CloseTransaction().toSerializable(), getRef()); final String out = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in.getClass().equals(CloseTransactionReply.SERIALIZABLE_CLASS)) { return "match"; @@ -345,6 +454,7 @@ public class ShardTransactionTest extends AbstractActorTest { final String termination = new ExpectMsg(duration("1 seconds"), "match hint") { // do not put code outside this method, will run afterwards + @Override protected String match(Object in) { if (in instanceof Terminated) { return "match"; @@ -369,7 +479,7 @@ public class ShardTransactionTest extends AbstractActorTest { public void testNegativePerformingWriteOperationOnReadTransaction() throws Exception { try { - final ActorRef shard = getSystem().actorOf(Shard.props("config", Collections.EMPTY_MAP)); + final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP)); final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard, TestModel.createTestContext()); final TestActorRef subject = TestActorRef.apply(props,getSystem()); @@ -379,8 +489,8 @@ public class ShardTransactionTest extends AbstractActorTest { } catch (Exception cs) { - assertEquals(cs.getClass().getSimpleName(), Exception.class.getSimpleName()); - assertTrue(cs.getMessage().startsWith("ShardTransaction:handleRecieve received an unknown message")); + assertEquals(UnknownMessageException.class.getSimpleName(), cs.getClass().getSimpleName()); + assertTrue(cs.getMessage(), cs.getMessage().startsWith("Unknown message received ")); } } } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java index 0cd029c2ff..62052f38ab 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java @@ -2,20 +2,19 @@ package org.opendaylight.controller.cluster.datastore; import akka.actor.ActorRef; import akka.actor.Props; - import com.google.common.base.Optional; +import com.google.common.util.concurrent.CheckedFuture; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; - import junit.framework.Assert; - import org.junit.After; import org.junit.Before; import org.junit.Test; import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException; import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException; import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction; +import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply; import org.opendaylight.controller.cluster.datastore.messages.DeleteData; import org.opendaylight.controller.cluster.datastore.messages.MergeData; import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound; @@ -30,6 +29,7 @@ import org.opendaylight.controller.cluster.datastore.utils.MockActorContext; import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper; import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration; import org.opendaylight.controller.md.cluster.datastore.model.TestModel; +import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.CreateTransactionReply; import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; @@ -81,6 +81,10 @@ public class TransactionProxyTest extends AbstractActorTest { TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext()); + actorContext.setExecuteRemoteOperationResponse( + new ReadDataReply(TestModel.createTestContext(), null) + .toSerializable()); + ListenableFuture>> read = transactionProxy.read(TestModel.TEST_PATH); @@ -99,7 +103,7 @@ public class TransactionProxyTest extends AbstractActorTest { } @Test - public void testReadWhenANullIsReturned() throws Exception { + public void testExists() throws Exception { final Props props = Props.create(DoNothingActor.class); final ActorRef actorRef = getSystem().actorOf(props); @@ -108,26 +112,57 @@ public class TransactionProxyTest extends AbstractActorTest { actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef)); actorContext.setExecuteRemoteOperationResponse("message"); + TransactionProxy transactionProxy = new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext()); - ListenableFuture>> read = - transactionProxy.read(TestModel.TEST_PATH); + actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(false).toSerializable()); - Optional> normalizedNodeOptional = read.get(); + CheckedFuture exists = + transactionProxy.exists(TestModel.TEST_PATH); - Assert.assertFalse(normalizedNodeOptional.isPresent()); + Assert.assertFalse(exists.checkedGet()); - actorContext.setExecuteRemoteOperationResponse(new ReadDataReply( - TestModel.createTestContext(), null).toSerializable()); + actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(true).toSerializable()); - read = transactionProxy.read(TestModel.TEST_PATH); + exists = transactionProxy.exists(TestModel.TEST_PATH); - normalizedNodeOptional = read.get(); + Assert.assertTrue(exists.checkedGet()); - Assert.assertFalse(normalizedNodeOptional.isPresent()); + actorContext.setExecuteRemoteOperationResponse("bad message"); + + exists = transactionProxy.exists(TestModel.TEST_PATH); + + try { + exists.checkedGet(); + fail(); + } catch(ReadFailedException e){ + } + + } + + @Test(expected = ReadFailedException.class) + public void testReadWhenAnInvalidMessageIsSentInReply() throws Exception { + final Props props = Props.create(DoNothingActor.class); + final ActorRef actorRef = getSystem().actorOf(props); + + final MockActorContext actorContext = new MockActorContext(this.getSystem()); + actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef)); + actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef)); + actorContext.setExecuteRemoteOperationResponse("message"); + + TransactionProxy transactionProxy = + new TransactionProxy(actorContext, + TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext()); + + + + CheckedFuture>, ReadFailedException> + read = transactionProxy.read(TestModel.TEST_PATH); + + read.checkedGet(); } @Test diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifierTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifierTest.java new file mode 100644 index 0000000000..afcd045434 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardIdentifierTest.java @@ -0,0 +1,18 @@ +package org.opendaylight.controller.cluster.datastore.identifiers; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class ShardIdentifierTest { + + @Test + public void testBasic(){ + ShardIdentifier id = ShardIdentifier.builder().memberName("member-1") + .shardName("inventory").type("config").build(); + + assertEquals("member-1-shard-inventory-config", id.toString()); + } + + +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifierTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifierTest.java new file mode 100644 index 0000000000..44bb4b3528 --- /dev/null +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/identifiers/ShardManagerIdentifierTest.java @@ -0,0 +1,14 @@ +package org.opendaylight.controller.cluster.datastore.identifiers; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + public class ShardManagerIdentifierTest { + + @Test + public void testIdentifier(){ + assertEquals("shardmanager-operational", ShardManagerIdentifier.builder().type("operational").build().toString()); + } + +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsTest.java index f7c467652d..41adcc55b1 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/jmx/mbeans/shard/ShardStatsTest.java @@ -8,48 +8,86 @@ import org.opendaylight.controller.cluster.datastore.jmx.mbeans.AbstractBaseMBea import javax.management.MBeanServer; import javax.management.ObjectName; +import java.text.SimpleDateFormat; +import java.util.Date; public class ShardStatsTest { - private MBeanServer mbeanServer; - private ShardStats shardStats; - private ObjectName testMBeanName; + private MBeanServer mbeanServer; + private ShardStats shardStats; + private ObjectName testMBeanName; - @Before - public void setUp() throws Exception { + @Before + public void setUp() throws Exception { - shardStats = new ShardStats("shard-1"); - shardStats.registerMBean(); - mbeanServer= shardStats.getMBeanServer(); - String objectName = AbstractBaseMBean.BASE_JMX_PREFIX + "type="+shardStats.getMBeanType()+",Category="+ - shardStats.getMBeanCategory() + ",name="+ - shardStats.getMBeanName(); - testMBeanName = new ObjectName(objectName); - } + shardStats = new ShardStats("shard-1"); + shardStats.registerMBean(); + mbeanServer = shardStats.getMBeanServer(); + String objectName = + AbstractBaseMBean.BASE_JMX_PREFIX + "type=" + shardStats + .getMBeanType() + ",Category=" + + shardStats.getMBeanCategory() + ",name=" + + shardStats.getMBeanName(); + testMBeanName = new ObjectName(objectName); + } - @After - public void tearDown() throws Exception { - shardStats.unregisterMBean(); - } + @After + public void tearDown() throws Exception { + shardStats.unregisterMBean(); + } - @Test - public void testGetShardName() throws Exception { + @Test + public void testGetShardName() throws Exception { - Object attribute = mbeanServer.getAttribute(testMBeanName,"ShardName"); - Assert.assertEquals((String) attribute, "shard-1"); + Object attribute = mbeanServer.getAttribute(testMBeanName, "ShardName"); + Assert.assertEquals((String) attribute, "shard-1"); - } + } - @Test - public void testGetCommittedTransactionsCount() throws Exception { - //let us increment some transactions count and then check - shardStats.incrementCommittedTransactionCount(); - shardStats.incrementCommittedTransactionCount(); - shardStats.incrementCommittedTransactionCount(); + @Test + public void testGetCommittedTransactionsCount() throws Exception { + //let us increment some transactions count and then check + shardStats.incrementCommittedTransactionCount(); + shardStats.incrementCommittedTransactionCount(); + shardStats.incrementCommittedTransactionCount(); - //now let us get from MBeanServer what is the transaction count. - Object attribute = mbeanServer.getAttribute(testMBeanName,"CommittedTransactionsCount"); - Assert.assertEquals((Long) attribute, (Long)3L); + //now let us get from MBeanServer what is the transaction count. + Object attribute = mbeanServer.getAttribute(testMBeanName, + "CommittedTransactionsCount"); + Assert.assertEquals((Long) attribute, (Long) 3L); - } -} \ No newline at end of file + } + + @Test + public void testGetLastCommittedTransactionTime() throws Exception { + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + Assert.assertEquals(shardStats.getLastCommittedTransactionTime(), + sdf.format(new Date(0L))); + long millis = System.currentTimeMillis(); + shardStats.setLastCommittedTransactionTime(new Date(millis)); + + //now let us get from MBeanServer what is the transaction count. + Object attribute = mbeanServer.getAttribute(testMBeanName, + "LastCommittedTransactionTime"); + Assert.assertEquals((String) attribute, sdf.format(new Date(millis))); + Assert.assertNotEquals((String) attribute, + sdf.format(new Date(millis - 1))); + + } + + @Test + public void testGetFailedTransactionsCount() throws Exception { + //let us increment some transactions count and then check + shardStats.incrementFailedTransactionsCount(); + shardStats.incrementFailedTransactionsCount(); + + + //now let us get from MBeanServer what is the transaction count. + Object attribute = + mbeanServer.getAttribute(testMBeanName, "FailedTransactionsCount"); + Assert.assertEquals((Long) attribute, (Long) 2L); + + + + } +} diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/modification/AbstractModificationTest.java b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/modification/AbstractModificationTest.java index d9c550a6db..84f3b92f1b 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/modification/AbstractModificationTest.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/modification/AbstractModificationTest.java @@ -26,7 +26,8 @@ public abstract class AbstractModificationTest { @Before public void setUp(){ - store = new InMemoryDOMDataStore("test", MoreExecutors.sameThreadExecutor()); + store = new InMemoryDOMDataStore("test", MoreExecutors.sameThreadExecutor(), + MoreExecutors.sameThreadExecutor()); store.onGlobalContextUpdated(TestModel.createTestContext()); } diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/test/resources/application.conf b/opendaylight/md-sal/sal-distributed-datastore/src/test/resources/application.conf index eda1c304e4..27b0374bac 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/test/resources/application.conf +++ b/opendaylight/md-sal/sal-distributed-datastore/src/test/resources/application.conf @@ -1,5 +1,6 @@ akka { - loggers = [akka.testkit.TestEventListener] + loggers = ["akka.testkit.TestEventListener", "akka.event.slf4j.Slf4jLogger"] + actor { serializers { java = "akka.serialization.JavaSerializer" diff --git a/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/md/sal/dom/api/DOMDataReadTransaction.java b/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/md/sal/dom/api/DOMDataReadTransaction.java index fc251c8445..9b70f0c4d7 100644 --- a/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/md/sal/dom/api/DOMDataReadTransaction.java +++ b/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/md/sal/dom/api/DOMDataReadTransaction.java @@ -47,4 +47,22 @@ public interface DOMDataReadTransaction extends AsyncReadTransaction>, ReadFailedException> read( LogicalDatastoreType store, YangInstanceIdentifier path); + + /** + * Checks if data is available in the logical data store located at provided path + * + * @param path + * Path which uniquely identifies subtree which client want to + * check existence of + * @return a CheckFuture containing the result of the check. + *
    + *
  • If the data at the supplied path exists, the Future returns a Boolean + * whose value is true, false otherwise
  • + *
  • If checking for the data fails, the Future will fail with a + * {@link ReadFailedException} or an exception derived from ReadFailedException.
  • + *
+ */ + CheckedFuture exists( + LogicalDatastoreType store, YangInstanceIdentifier path); + } diff --git a/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/sal/core/api/RpcImplementationUnavailableException.java b/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/sal/core/api/RpcImplementationUnavailableException.java new file mode 100644 index 0000000000..371082223a --- /dev/null +++ b/opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/sal/core/api/RpcImplementationUnavailableException.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.sal.core.api; + +/** + * Exception reported when no RPC implementation is found in the system. + */ +public class RpcImplementationUnavailableException extends RuntimeException { + private static final long serialVersionUID = 1L; + + public RpcImplementationUnavailableException(final String message) { + super(message); + } + + public RpcImplementationUnavailableException(final String message, final Throwable cause) { + super(message, cause); + } +} diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/config/yang/md/sal/dom/impl/DomInmemoryDataBrokerModule.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/config/yang/md/sal/dom/impl/DomInmemoryDataBrokerModule.java index 22dad6af23..948f3c8d8b 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/config/yang/md/sal/dom/impl/DomInmemoryDataBrokerModule.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/config/yang/md/sal/dom/impl/DomInmemoryDataBrokerModule.java @@ -7,18 +7,18 @@ */ package org.opendaylight.controller.config.yang.md.sal.dom.impl; -import java.util.concurrent.Executors; - +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitDeadlockException; import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataBrokerImpl; -import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; +import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory; import org.opendaylight.controller.sal.core.spi.data.DOMStore; import org.opendaylight.yangtools.util.concurrent.DeadlockDetectingListeningExecutorService; +import org.opendaylight.yangtools.util.concurrent.SpecialExecutors; +import org.opendaylight.yangtools.util.PropertyUtils; import com.google.common.collect.ImmutableMap; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; /** * @@ -26,6 +26,17 @@ import com.google.common.util.concurrent.MoreExecutors; public final class DomInmemoryDataBrokerModule extends org.opendaylight.controller.config.yang.md.sal.dom.impl.AbstractDomInmemoryDataBrokerModule { + private static final String FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE_PROP = + "mdsal.datastore-future-callback-queue.size"; + private static final int DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE = 1000; + + private static final String FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE_PROP = + "mdsal.datastore-future-callback-pool.size"; + private static final int DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE = 20; + private static final String COMMIT_EXECUTOR_MAX_QUEUE_SIZE_PROP = + "mdsal.datastore-commit-queue.size"; + private static final int DEFAULT_COMMIT_EXECUTOR_MAX_QUEUE_SIZE = 5000; + public DomInmemoryDataBrokerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) { super(identifier, dependencyResolver); @@ -45,30 +56,55 @@ public final class DomInmemoryDataBrokerModule extends @Override public java.lang.AutoCloseable createInstance() { - ListeningExecutorService storeExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(2)); //Initializing Operational DOM DataStore defaulting to InMemoryDOMDataStore if one is not configured DOMStore operStore = getOperationalDataStoreDependency(); if(operStore == null){ //we will default to InMemoryDOMDataStore creation - operStore = new InMemoryDOMDataStore("DOM-OPER", storeExecutor); - //here we will register the SchemaContext listener - getSchemaServiceDependency().registerSchemaContextListener((InMemoryDOMDataStore)operStore); + operStore = InMemoryDOMDataStoreFactory.create("DOM-OPER", getSchemaServiceDependency()); } DOMStore configStore = getConfigDataStoreDependency(); if(configStore == null){ //we will default to InMemoryDOMDataStore creation - configStore = new InMemoryDOMDataStore("DOM-CFG", storeExecutor); - //here we will register the SchemaContext listener - getSchemaServiceDependency().registerSchemaContextListener((InMemoryDOMDataStore)configStore); + configStore = InMemoryDOMDataStoreFactory.create("DOM-CFG", getSchemaServiceDependency()); } ImmutableMap datastores = ImmutableMap . builder().put(LogicalDatastoreType.OPERATIONAL, operStore) .put(LogicalDatastoreType.CONFIGURATION, configStore).build(); + /* + * We use a single-threaded executor for commits with a bounded queue capacity. If the + * queue capacity is reached, subsequent commit tasks will be rejected and the commits will + * fail. This is done to relieve back pressure. This should be an extreme scenario - either + * there's deadlock(s) somewhere and the controller is unstable or some rogue component is + * continuously hammering commits too fast or the controller is just over-capacity for the + * system it's running on. + */ + ExecutorService commitExecutor = SpecialExecutors.newBoundedSingleThreadExecutor( + PropertyUtils.getIntSystemProperty( + COMMIT_EXECUTOR_MAX_QUEUE_SIZE_PROP, + DEFAULT_COMMIT_EXECUTOR_MAX_QUEUE_SIZE), "WriteTxCommit"); + + /* + * We use an executor for commit ListenableFuture callbacks that favors reusing available + * threads over creating new threads at the expense of execution time. The assumption is + * that most ListenableFuture callbacks won't execute a lot of business logic where we want + * it to run quicker - many callbacks will likely just handle error conditions and do + * nothing on success. The executor queue capacity is bounded and, if the capacity is + * reached, subsequent submitted tasks will block the caller. + */ + Executor listenableFutureExecutor = SpecialExecutors.newBlockingBoundedCachedThreadPool( + PropertyUtils.getIntSystemProperty( + FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE_PROP, + DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE), + PropertyUtils.getIntSystemProperty( + FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE_PROP, + DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE), "CommitFutures"); + DOMDataBrokerImpl newDataBroker = new DOMDataBrokerImpl(datastores, - new DeadlockDetectingListeningExecutorService(Executors.newSingleThreadExecutor(), - TransactionCommitDeadlockException.DEADLOCK_EXECUTOR_FUNCTION)); + new DeadlockDetectingListeningExecutorService(commitExecutor, + TransactionCommitDeadlockException.DEADLOCK_EXECUTOR_FUNCTION, + listenableFutureExecutor)); return newDataBroker; } diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMDataCommitCoordinatorImpl.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMDataCommitCoordinatorImpl.java index 9a6d12fb18..521e2d0e73 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMDataCommitCoordinatorImpl.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMDataCommitCoordinatorImpl.java @@ -9,6 +9,7 @@ package org.opendaylight.controller.md.sal.dom.broker.impl; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; +import java.util.concurrent.RejectedExecutionException; import javax.annotation.concurrent.GuardedBy; @@ -86,8 +87,18 @@ public class DOMDataCommitCoordinatorImpl implements DOMDataCommitExecutor { Preconditions.checkArgument(cohorts != null, "Cohorts must not be null."); Preconditions.checkArgument(listener != null, "Listener must not be null"); LOG.debug("Tx: {} is submitted for execution.", transaction.getIdentifier()); - ListenableFuture commitFuture = executor.submit(new CommitCoordinationTask( - transaction, cohorts, listener)); + + ListenableFuture commitFuture = null; + try { + commitFuture = executor.submit(new CommitCoordinationTask(transaction, cohorts, listener)); + } catch(RejectedExecutionException e) { + LOG.error("The commit executor's queue is full - submit task was rejected. \n" + + executor, e); + return Futures.immediateFailedCheckedFuture( + new TransactionCommitFailedException( + "Could not submit the commit task - the commit queue capacity has been exceeded.", e)); + } + if (listener.isPresent()) { Futures.addCallback(commitFuture, new DOMDataCommitErrorInvoker(transaction, listener.get())); } diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadOnlyTransaction.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadOnlyTransaction.java index b4562cf2ec..5e2a417d28 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadOnlyTransaction.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadOnlyTransaction.java @@ -40,6 +40,12 @@ class DOMForwardedReadOnlyTransaction extends return getSubtransaction(store).read(path); } + @Override public CheckedFuture exists( + LogicalDatastoreType store, + YangInstanceIdentifier path) { + return getSubtransaction(store).exists(path); + } + @Override public void close() { closeSubtransactions(); diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadWriteTransaction.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadWriteTransaction.java index 74a4c52e36..67351ec945 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadWriteTransaction.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMForwardedReadWriteTransaction.java @@ -50,4 +50,10 @@ class DOMForwardedReadWriteTransaction extends DOMForwardedWriteTransaction exists( + LogicalDatastoreType store, + YangInstanceIdentifier path) { + return getSubtransaction(store).exists(path); + } +} diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/BackwardsCompatibleMountPoint.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/BackwardsCompatibleMountPoint.java index 5bd8a7bc02..fb72b5a99a 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/BackwardsCompatibleMountPoint.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/BackwardsCompatibleMountPoint.java @@ -15,13 +15,6 @@ import com.google.common.util.concurrent.CheckedFuture; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.JdkFutureAdapters; import com.google.common.util.concurrent.ListenableFuture; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.annotation.Nullable; - import org.opendaylight.controller.md.sal.common.api.RegistrationListener; import org.opendaylight.controller.md.sal.common.api.TransactionStatus; import org.opendaylight.controller.md.sal.common.api.data.DataCommitHandler; @@ -78,6 +71,12 @@ import org.opendaylight.yangtools.yang.model.api.Module; import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.opendaylight.yangtools.yang.model.api.SchemaContextListener; +import javax.annotation.Nullable; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; + public class BackwardsCompatibleMountPoint implements MountProvisionInstance, SchemaContextProvider, SchemaService { private final DataProviderService dataReader; @@ -405,6 +404,16 @@ public class BackwardsCompatibleMountPoint implements MountProvisionInstance, Sc final Optional> normalizedNodeOptional = Optional.>fromNullable(normalized.getValue()); return Futures.immediateCheckedFuture(normalizedNodeOptional); } + + @Override public CheckedFuture exists(LogicalDatastoreType store, + YangInstanceIdentifier path) { + + try { + return Futures.immediateCheckedFuture(read(store, path).get().isPresent()); + } catch (InterruptedException | ExecutionException e) { + return Futures.immediateFailedCheckedFuture(new ReadFailedException("Exists failed",e)); + } + } } @VisibleForTesting @@ -518,6 +527,16 @@ public class BackwardsCompatibleMountPoint implements MountProvisionInstance, Sc return new BackwardsCompatibleReadTransaction(dataReader, dataNormalizer).read(store, path); } + @Override public CheckedFuture exists(LogicalDatastoreType store, + YangInstanceIdentifier path) { + + try { + return Futures.immediateCheckedFuture(read(store, path).get().isPresent()); + } catch (InterruptedException | ExecutionException e) { + return Futures.immediateFailedCheckedFuture(new ReadFailedException("Exists failed",e)); + } + } + @Override public boolean cancel() { return delegateWriteTx.cancel(); diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/RoutedRpcSelector.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/RoutedRpcSelector.java index 19ff03b7d2..c8e3c0b6e0 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/RoutedRpcSelector.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/RoutedRpcSelector.java @@ -8,7 +8,9 @@ package org.opendaylight.controller.sal.dom.broker.impl; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkState; + +import com.google.common.collect.ImmutableSet; +import com.google.common.util.concurrent.ListenableFuture; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -22,11 +24,8 @@ import org.opendaylight.yangtools.concepts.Identifiable; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.common.RpcResult; import org.opendaylight.yangtools.yang.data.api.CompositeNode; -import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.SimpleNode; - -import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.ListenableFuture; +import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; class RoutedRpcSelector implements RpcImplementation, AutoCloseable, Identifiable { @@ -81,9 +80,9 @@ class RoutedRpcSelector implements RpcImplementation, AutoCloseable, Identifiabl } if (potential == null) { return router.invokeRpc(rpc, (YangInstanceIdentifier) route, input); + } else { + return potential.invokeRpc(rpc, input); } - checkState(potential != null, "No implementation is available for rpc:%s path:%s", rpc, route); - return potential.invokeRpc(rpc, input); } public void addPath(final QName context, final YangInstanceIdentifier path, final RoutedRpcRegImpl routedRpcRegImpl) { diff --git a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/SchemaAwareRpcBroker.java b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/SchemaAwareRpcBroker.java index 1ba6594563..b4d7d2d001 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/SchemaAwareRpcBroker.java +++ b/opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/sal/dom/broker/impl/SchemaAwareRpcBroker.java @@ -10,6 +10,13 @@ package org.opendaylight.controller.sal.dom.broker.impl; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.Preconditions; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; + import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -22,6 +29,7 @@ import org.opendaylight.controller.sal.core.api.Broker.RoutedRpcRegistration; import org.opendaylight.controller.sal.core.api.Broker.RpcRegistration; import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation; import org.opendaylight.controller.sal.core.api.RpcImplementation; +import org.opendaylight.controller.sal.core.api.RpcImplementationUnavailableException; import org.opendaylight.controller.sal.core.api.RpcRegistrationListener; import org.opendaylight.controller.sal.core.api.RpcRoutingContext; import org.opendaylight.controller.sal.dom.broker.spi.RpcRouter; @@ -38,12 +46,9 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Preconditions; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.ListenableFuture; - +/** + * RPC broker responsible for routing requests to remote systems. + */ public class SchemaAwareRpcBroker implements RpcRouter, Identifiable, RoutedRpcDefaultImplementation { private static final Logger LOG = LoggerFactory.getLogger(SchemaAwareRpcBroker.class); @@ -217,8 +222,12 @@ public class SchemaAwareRpcBroker implements RpcRouter, Identifiable, Ro @Override public ListenableFuture> invokeRpc(final QName rpc, final YangInstanceIdentifier route, final CompositeNode input) { - checkState(defaultDelegate != null, "No implementation is available for rpc:%s path:%s", rpc, route); - return defaultDelegate.invokeRpc(rpc, route, input); + if (defaultDelegate == null) { + return Futures.immediateFailedCheckedFuture(new RpcImplementationUnavailableException("No RPC implementation found")); + } + + LOG.debug("Forwarding RPC {} path {} to delegate {}", rpc, route); + return defaultDelegate.invokeRpc(rpc, route, input); } void remove(final GlobalRpcRegistration registration) { diff --git a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerPerformanceTest.java b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerPerformanceTest.java index 181396fc88..e9ed5b1b30 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerPerformanceTest.java +++ b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerPerformanceTest.java @@ -63,8 +63,10 @@ public class DOMBrokerPerformanceTest { @Before public void setupStore() { - InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor()); - InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); schemaContext = TestModel.createTestContext(); operStore.onGlobalContextUpdated(schemaContext); diff --git a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerTest.java b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerTest.java index 0bb16a39b9..e57d08f173 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerTest.java +++ b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMBrokerTest.java @@ -7,19 +7,24 @@ import static org.junit.Assert.assertEquals; import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION; import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.OPERATIONAL; +import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitDeadlockException; +import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException; import org.opendaylight.controller.md.sal.dom.api.DOMDataChangeListener; import org.opendaylight.controller.md.sal.dom.api.DOMDataReadTransaction; import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction; @@ -28,6 +33,7 @@ import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; import org.opendaylight.controller.md.sal.dom.store.impl.TestModel; import org.opendaylight.controller.sal.core.spi.data.DOMStore; import org.opendaylight.yangtools.util.concurrent.DeadlockDetectingListeningExecutorService; +import org.opendaylight.yangtools.util.concurrent.SpecialExecutors; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes; @@ -35,6 +41,7 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; +import com.google.common.util.concurrent.ForwardingExecutorService; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; @@ -46,11 +53,16 @@ public class DOMBrokerTest { private SchemaContext schemaContext; private DOMDataBrokerImpl domBroker; private ListeningExecutorService executor; + private ExecutorService futureExecutor; + private CommitExecutorService commitExecutor; @Before public void setupStore() { - InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor()); - InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor()); + + InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); schemaContext = TestModel.createTestContext(); operStore.onGlobalContextUpdated(schemaContext); @@ -61,8 +73,10 @@ public class DOMBrokerTest { .put(OPERATIONAL, operStore) // .build(); - executor = new DeadlockDetectingListeningExecutorService(Executors.newSingleThreadExecutor(), - TransactionCommitDeadlockException.DEADLOCK_EXECUTOR_FUNCTION); + commitExecutor = new CommitExecutorService(Executors.newSingleThreadExecutor()); + futureExecutor = SpecialExecutors.newBlockingBoundedCachedThreadPool(1, 5, "FCB"); + executor = new DeadlockDetectingListeningExecutorService(commitExecutor, + TransactionCommitDeadlockException.DEADLOCK_EXECUTOR_FUNCTION, futureExecutor); domBroker = new DOMDataBrokerImpl(stores, executor); } @@ -71,6 +85,10 @@ public class DOMBrokerTest { if( executor != null ) { executor.shutdownNow(); } + + if(futureExecutor != null) { + futureExecutor.shutdownNow(); + } } @Test(timeout=10000) @@ -137,6 +155,24 @@ public class DOMBrokerTest { assertTrue(afterCommitRead.isPresent()); } + @Test(expected=TransactionCommitFailedException.class) + public void testRejectedCommit() throws Exception { + + commitExecutor.delegate = Mockito.mock( ExecutorService.class ); + Mockito.doThrow( new RejectedExecutionException( "mock" ) ) + .when( commitExecutor.delegate ).execute( Mockito.any( Runnable.class ) ); + Mockito.doNothing().when( commitExecutor.delegate ).shutdown(); + Mockito.doReturn( Collections.emptyList() ).when( commitExecutor.delegate ).shutdownNow(); + Mockito.doReturn( "" ).when( commitExecutor.delegate ).toString(); + Mockito.doReturn( true ).when( commitExecutor.delegate ) + .awaitTermination( Mockito.anyLong(), Mockito.any( TimeUnit.class ) ); + + DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction(); + writeTx.put( OPERATIONAL, TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME) ); + + writeTx.submit().checkedGet( 5, TimeUnit.SECONDS ); + } + /** * Tests a simple DataChangeListener notification after a write. */ @@ -306,4 +342,18 @@ public class DOMBrokerTest { assertTrue( "onDataChanged was not called", latch.await( 5, TimeUnit.SECONDS ) ); } } + + static class CommitExecutorService extends ForwardingExecutorService { + + ExecutorService delegate; + + public CommitExecutorService( ExecutorService delegate ) { + this.delegate = delegate; + } + + @Override + protected ExecutorService delegate() { + return delegate; + } + } } diff --git a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMTransactionChainTest.java b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMTransactionChainTest.java index 3ea0bcefa5..18b11c8300 100644 --- a/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMTransactionChainTest.java +++ b/opendaylight/md-sal/sal-dom-broker/src/test/java/org/opendaylight/controller/md/sal/dom/broker/impl/DOMTransactionChainTest.java @@ -44,8 +44,10 @@ public class DOMTransactionChainTest { @Before public void setupStore() { - InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", MoreExecutors.sameThreadExecutor()); - InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); + InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", + MoreExecutors.sameThreadExecutor(), MoreExecutors.sameThreadExecutor()); schemaContext = TestModel.createTestContext(); operStore.onGlobalContextUpdated(schemaContext); diff --git a/opendaylight/md-sal/sal-dom-spi/src/main/java/org/opendaylight/controller/sal/core/spi/data/DOMStoreReadTransaction.java b/opendaylight/md-sal/sal-dom-spi/src/main/java/org/opendaylight/controller/sal/core/spi/data/DOMStoreReadTransaction.java index 84d09c7cb0..719a6f0499 100644 --- a/opendaylight/md-sal/sal-dom-spi/src/main/java/org/opendaylight/controller/sal/core/spi/data/DOMStoreReadTransaction.java +++ b/opendaylight/md-sal/sal-dom-spi/src/main/java/org/opendaylight/controller/sal/core/spi/data/DOMStoreReadTransaction.java @@ -34,4 +34,20 @@ public interface DOMStoreReadTransaction extends DOMStoreTransaction { * */ CheckedFuture>, ReadFailedException> read(YangInstanceIdentifier path); + + /** + * Checks if data is available in the logical data store located at provided path + * + * @param path + * Path which uniquely identifies subtree which client want to + * check existence of + * @return a CheckFuture containing the result of the check. + *
    + *
  • If the data at the supplied path exists, the Future returns a Boolean + * whose value is true, false otherwise
  • + *
  • If checking for the data fails, the Future will fail with a + * {@link ReadFailedException} or an exception derived from ReadFailedException.
  • + *
+ */ + CheckedFuture exists(YangInstanceIdentifier path); } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryConfigDataStoreProviderModule.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryConfigDataStoreProviderModule.java index 805608d479..39a448ff6c 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryConfigDataStoreProviderModule.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryConfigDataStoreProviderModule.java @@ -1,12 +1,9 @@ package org.opendaylight.controller.config.yang.inmemory_datastore_provider; -import java.util.concurrent.Executors; - -import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; - -import com.google.common.util.concurrent.MoreExecutors; +import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory; public class InMemoryConfigDataStoreProviderModule extends org.opendaylight.controller.config.yang.inmemory_datastore_provider.AbstractInMemoryConfigDataStoreProviderModule { + public InMemoryConfigDataStoreProviderModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) { super(identifier, dependencyResolver); } @@ -22,9 +19,7 @@ public class InMemoryConfigDataStoreProviderModule extends org.opendaylight.cont @Override public java.lang.AutoCloseable createInstance() { - InMemoryDOMDataStore ids = new InMemoryDOMDataStore("DOM-CFG", MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor())); - getSchemaServiceDependency().registerSchemaContextListener(ids); - return ids; + return InMemoryDOMDataStoreFactory.create("DOM-CFG", getSchemaServiceDependency()); } } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryOperationalDataStoreProviderModule.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryOperationalDataStoreProviderModule.java index f4795588ab..615fe0211c 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryOperationalDataStoreProviderModule.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryOperationalDataStoreProviderModule.java @@ -1,12 +1,9 @@ package org.opendaylight.controller.config.yang.inmemory_datastore_provider; -import java.util.concurrent.Executors; - -import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore; - -import com.google.common.util.concurrent.MoreExecutors; +import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory; public class InMemoryOperationalDataStoreProviderModule extends org.opendaylight.controller.config.yang.inmemory_datastore_provider.AbstractInMemoryOperationalDataStoreProviderModule { + public InMemoryOperationalDataStoreProviderModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) { super(identifier, dependencyResolver); } @@ -22,9 +19,7 @@ public class InMemoryOperationalDataStoreProviderModule extends org.opendaylight @Override public java.lang.AutoCloseable createInstance() { - InMemoryDOMDataStore ids = new InMemoryDOMDataStore("DOM-OPER", MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor())); - getOperationalSchemaServiceDependency().registerSchemaContextListener(ids); - return ids; + return InMemoryDOMDataStoreFactory.create("DOM-OPER", getOperationalSchemaServiceDependency()); } } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ChangeListenerNotifyTask.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ChangeListenerNotifyTask.java index 27325d84a9..ac1f2e32d5 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ChangeListenerNotifyTask.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ChangeListenerNotifyTask.java @@ -8,6 +8,8 @@ package org.opendaylight.controller.md.sal.dom.store.impl; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; +import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener; +import org.opendaylight.yangtools.util.concurrent.NotificationManager; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.slf4j.Logger; @@ -16,31 +18,33 @@ import org.slf4j.LoggerFactory; class ChangeListenerNotifyTask implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(ChangeListenerNotifyTask.class); + private final Iterable> listeners; private final AsyncDataChangeEvent> event; + @SuppressWarnings("rawtypes") + private final NotificationManager + notificationMgr; + + @SuppressWarnings("rawtypes") public ChangeListenerNotifyTask(final Iterable> listeners, - final AsyncDataChangeEvent> event) { + final AsyncDataChangeEvent> event, + final NotificationManager notificationMgr) { this.listeners = listeners; this.event = event; + this.notificationMgr = notificationMgr; } @Override public void run() { for (DataChangeListenerRegistration listener : listeners) { - try { - listener.getInstance().onDataChanged(event); - } catch (Exception e) { - LOG.error("Unhandled exception during invoking listener {} with event {}", listener, event, e); - } + notificationMgr.submitNotification(listener.getInstance(), event); } - } @Override public String toString() { return "ChangeListenerNotifyTask [listeners=" + listeners + ", event=" + event + "]"; } - } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStore.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStore.java index c44d0909d6..b61b367103 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStore.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStore.java @@ -13,11 +13,17 @@ import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; + import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; +import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener; import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException; import org.opendaylight.controller.md.sal.dom.store.impl.SnapshotBackedWriteTransaction.TransactionReadyPrototype; +import org.opendaylight.yangtools.util.ExecutorServiceUtil; +import org.opendaylight.yangtools.util.PropertyUtils; +import org.opendaylight.yangtools.util.concurrent.NotificationManager; +import org.opendaylight.yangtools.util.concurrent.QueuedNotificationManager; import org.opendaylight.yangtools.yang.data.api.schema.tree.ConflictingModificationAppliedException; import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTree; import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeCandidate; @@ -43,8 +49,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.concurrent.GuardedBy; + import java.util.Collections; import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static com.google.common.base.Preconditions.checkState; @@ -61,16 +70,51 @@ import static com.google.common.base.Preconditions.checkState; public class InMemoryDOMDataStore implements DOMStore, Identifiable, SchemaContextListener, TransactionReadyPrototype,AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(InMemoryDOMDataStore.class); + + @SuppressWarnings("rawtypes") + private static final QueuedNotificationManager.Invoker DCL_NOTIFICATION_MGR_INVOKER = + new QueuedNotificationManager.Invoker() { + + @SuppressWarnings("unchecked") + @Override + public void invokeListener( AsyncDataChangeListener listener, + AsyncDataChangeEvent notification ) { + listener.onDataChanged(notification); + } + }; + + private static final String DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE_PROP = + "mdsal.datastore-dcl-notification-queue.size"; + + private static final int DEFAULT_DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE = 1000; + private final DataTree dataTree = InMemoryDataTreeFactory.getInstance().create(); private final ListenerTree listenerTree = ListenerTree.create(); private final AtomicLong txCounter = new AtomicLong(0); - private final ListeningExecutorService executor; + private final ListeningExecutorService listeningExecutor; + + @SuppressWarnings("rawtypes") + private final NotificationManager + dataChangeListenerNotificationManager; + private final ExecutorService dataChangeListenerExecutor; private final String name; - public InMemoryDOMDataStore(final String name, final ListeningExecutorService executor) { + public InMemoryDOMDataStore(final String name, final ListeningExecutorService listeningExecutor, + final ExecutorService dataChangeListenerExecutor) { this.name = Preconditions.checkNotNull(name); - this.executor = Preconditions.checkNotNull(executor); + this.listeningExecutor = Preconditions.checkNotNull(listeningExecutor); + + this.dataChangeListenerExecutor = Preconditions.checkNotNull(dataChangeListenerExecutor); + + int maxDCLQueueSize = PropertyUtils.getIntSystemProperty( + DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE_PROP, DEFAULT_DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE ); + + dataChangeListenerNotificationManager = + new QueuedNotificationManager<>(this.dataChangeListenerExecutor, + DCL_NOTIFICATION_MGR_INVOKER, maxDCLQueueSize, "DataChangeListenerQueueMgr"); } @Override @@ -104,8 +148,9 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch } @Override - public void close(){ - executor.shutdownNow(); + public void close() { + ExecutorServiceUtil.tryGracefulShutdown(listeningExecutor, 30, TimeUnit.SECONDS); + ExecutorServiceUtil.tryGracefulShutdown(dataChangeListenerExecutor, 30, TimeUnit.SECONDS); } @Override public >> ListenerRegistration registerChangeListener( @@ -132,7 +177,9 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch .setAfter(data) // .addCreated(path, data) // .build(); - executor.submit(new ChangeListenerNotifyTask(Collections.singletonList(reg), event)); + + new ChangeListenerNotifyTask(Collections.singletonList(reg), event, + dataChangeListenerNotificationManager).run(); } } @@ -221,8 +268,9 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch @Override public void close() { - executor.shutdownNow(); - + // FIXME: this call doesn't look right here - listeningExecutor is shared and owned + // by the outer class. + //listeningExecutor.shutdownNow(); } protected synchronized void onTransactionFailed(final SnapshotBackedWriteTransaction transaction, @@ -308,7 +356,7 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch @Override public ListenableFuture canCommit() { - return executor.submit(new Callable() { + return listeningExecutor.submit(new Callable() { @Override public Boolean call() throws TransactionCommitFailedException { try { @@ -330,11 +378,12 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch @Override public ListenableFuture preCommit() { - return executor.submit(new Callable() { + return listeningExecutor.submit(new Callable() { @Override public Void call() { candidate = dataTree.prepare(modification); - listenerResolver = ResolveDataChangeEventsTask.create(candidate, listenerTree); + listenerResolver = ResolveDataChangeEventsTask.create(candidate, listenerTree, + dataChangeListenerNotificationManager); return null; } }); @@ -359,7 +408,7 @@ public class InMemoryDOMDataStore implements DOMStore, Identifiable, Sch for (ChangeListenerNotifyTask task : listenerResolver.call()) { LOG.trace("Scheduling invocation of listeners: {}", task); - executor.submit(task); + task.run(); } } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStoreFactory.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStoreFactory.java new file mode 100644 index 0000000000..c853a132de --- /dev/null +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStoreFactory.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.md.sal.dom.store.impl; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import javax.annotation.Nullable; + +import org.opendaylight.controller.sal.core.api.model.SchemaService; +import org.opendaylight.yangtools.util.concurrent.SpecialExecutors; +import org.opendaylight.yangtools.util.PropertyUtils; +import com.google.common.util.concurrent.MoreExecutors; + +/** + * A factory for creating InMemoryDOMDataStore instances. + * + * @author Thomas Pantelis + */ +public final class InMemoryDOMDataStoreFactory { + + private static final String DCL_EXECUTOR_MAX_QUEUE_SIZE_PROP = + "mdsal.datastore-dcl-notification-queue.size"; + private static final int DEFAULT_DCL_EXECUTOR_MAX_QUEUE_SIZE = 1000; + + private static final String DCL_EXECUTOR_MAX_POOL_SIZE_PROP = + "mdsal.datastore-dcl-notification-pool.size"; + private static final int DEFAULT_DCL_EXECUTOR_MAX_POOL_SIZE = 20; + + private InMemoryDOMDataStoreFactory() { + } + + /** + * Creates an InMemoryDOMDataStore instance. + * + * @param name the name of the data store + * @param schemaService the SchemaService to which to register the data store. + * @return an InMemoryDOMDataStore instance + */ + public static InMemoryDOMDataStore create(final String name, + @Nullable final SchemaService schemaService) { + + // For DataChangeListener notifications we use an executor that provides the fastest + // task execution time to get higher throughput as DataChangeListeners typically provide + // much of the business logic for a data model. If the executor queue size limit is reached, + // subsequent submitted notifications will block the calling thread. + + int dclExecutorMaxQueueSize = PropertyUtils.getIntSystemProperty( + DCL_EXECUTOR_MAX_QUEUE_SIZE_PROP, DEFAULT_DCL_EXECUTOR_MAX_QUEUE_SIZE); + int dclExecutorMaxPoolSize = PropertyUtils.getIntSystemProperty( + DCL_EXECUTOR_MAX_POOL_SIZE_PROP, DEFAULT_DCL_EXECUTOR_MAX_POOL_SIZE); + + ExecutorService dataChangeListenerExecutor = SpecialExecutors.newBlockingBoundedFastThreadPool( + dclExecutorMaxPoolSize, dclExecutorMaxQueueSize, name + "-DCL" ); + + InMemoryDOMDataStore dataStore = new InMemoryDOMDataStore(name, + MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()), + dataChangeListenerExecutor); + + if(schemaService != null) { + schemaService.registerSchemaContextListener(dataStore); + } + + return dataStore; + } +} diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeEventsTask.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeEventsTask.java index 3ddf0b60fa..d8feaa71f6 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeEventsTask.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeEventsTask.java @@ -24,12 +24,15 @@ import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.Callable; +import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; +import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.Builder; import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.SimpleEventFactory; import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree; import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Node; import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Walker; +import org.opendaylight.yangtools.util.concurrent.NotificationManager; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates; @@ -57,9 +60,15 @@ final class ResolveDataChangeEventsTask implements Callable notificationMgr; + + @SuppressWarnings("rawtypes") + public ResolveDataChangeEventsTask(final DataTreeCandidate candidate, final ListenerTree listenerTree, + final NotificationManager notificationMgr) { this.candidate = Preconditions.checkNotNull(candidate); this.listenerRoot = Preconditions.checkNotNull(listenerTree); + this.notificationMgr = Preconditions.checkNotNull(notificationMgr); } /** @@ -120,7 +129,7 @@ final class ResolveDataChangeEventsTask implements Callable taskListBuilder, + private void addNotificationTask(final ImmutableList.Builder taskListBuilder, final ListenerTree.Node listeners, final Collection entries) { if (!entries.isEmpty()) { @@ -141,7 +150,7 @@ final class ResolveDataChangeEventsTask implements Callable taskListBuilder, final ListenerTree.Node listeners, final DOMImmutableDataChangeEvent event) { DataChangeScope eventScope = event.getScope(); @@ -150,11 +159,11 @@ final class ResolveDataChangeEventsTask implements Callable> listenerSet = Collections .> singletonList(listenerReg); if (eventScope == DataChangeScope.BASE) { - taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event)); + taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr)); } else if (eventScope == DataChangeScope.ONE && listenerScope != DataChangeScope.BASE) { - taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event)); + taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr)); } else if (eventScope == DataChangeScope.SUBTREE && listenerScope == DataChangeScope.SUBTREE) { - taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event)); + taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr)); } } } @@ -172,7 +181,7 @@ final class ResolveDataChangeEventsTask implements Callable taskListBuilder, final ListenerTree.Node listeners, final Collection entries) { @@ -210,14 +219,14 @@ final class ResolveDataChangeEventsTask implements Callable taskListBuilder, final Node listeners, final DOMImmutableDataChangeEvent event) { for (DataChangeListenerRegistration listener : listeners.getListeners()) { if (listener.getScope() == event.getScope()) { Set> listenerSet = Collections .> singleton(listener); - taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event)); + taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr)); } } } @@ -519,7 +528,10 @@ final class ResolveDataChangeEventsTask implements Callable notificationMgr) { + return new ResolveDataChangeEventsTask(candidate, listenerTree, notificationMgr); } } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadTransaction.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadTransaction.java index 2a98406343..44ee61c116 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadTransaction.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadTransaction.java @@ -7,19 +7,19 @@ */ package org.opendaylight.controller.md.sal.dom.store.impl; -import static com.google.common.base.Preconditions.checkNotNull; -import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeSnapshot; +import com.google.common.base.Optional; +import com.google.common.base.Preconditions; +import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.Futures; import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; +import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeSnapshot; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Optional; -import com.google.common.base.Preconditions; -import com.google.common.util.concurrent.CheckedFuture; -import com.google.common.util.concurrent.Futures; +import static com.google.common.base.Preconditions.checkNotNull; /** * @@ -63,4 +63,16 @@ final class SnapshotBackedReadTransaction extends AbstractDOMStoreTransaction return Futures.immediateFailedCheckedFuture(new ReadFailedException("Read failed",e)); } } -} \ No newline at end of file + + @Override public CheckedFuture exists(YangInstanceIdentifier path) { + LOG.debug("Tx: {} Exists: {}", getIdentifier(), path); + checkNotNull(path, "Path must not be null."); + + try { + return Futures.immediateCheckedFuture( + read(path).checkedGet().isPresent()); + } catch (ReadFailedException e) { + return Futures.immediateFailedCheckedFuture(e); + } + } +} diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadWriteTransaction.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadWriteTransaction.java index 5c5e9c6b6d..ce7043fd47 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadWriteTransaction.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/SnapshotBackedReadWriteTransaction.java @@ -61,4 +61,14 @@ class SnapshotBackedReadWriteTransaction extends SnapshotBackedWriteTransaction return Futures.immediateFailedCheckedFuture(new ReadFailedException("Read failed",e)); } } -} \ No newline at end of file + + @Override public CheckedFuture exists( + YangInstanceIdentifier path) { + try { + return Futures.immediateCheckedFuture( + read(path).checkedGet().isPresent()); + } catch (ReadFailedException e) { + return Futures.immediateFailedCheckedFuture(e); + } + } +} diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/tree/ListenerTree.java b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/tree/ListenerTree.java index 39152767dd..ac7a318187 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/tree/ListenerTree.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/tree/ListenerTree.java @@ -144,7 +144,10 @@ public final class ListenerTree { /** * A walking context, pretty much equivalent to an iterator, but it - * exposes the undelying tree structure. + * exposes the underlying tree structure. + */ + /* + * FIXME: BUG-1511: split this class out as ListenerWalker. */ public static final class Walker implements AutoCloseable { private final Lock lock; @@ -177,6 +180,9 @@ public final class ListenerTree { * only as long as the {@link org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Walker} instance through which it is reached remains * unclosed. */ + /* + * FIXME: BUG-1511: split this class out as ListenerNode. + */ public static final class Node implements StoreTreeNode, Identifiable { private final Collection> listeners = new ArrayList<>(); private final Map children = new HashMap<>(); diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/AbstractDataChangeListenerTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/AbstractDataChangeListenerTest.java index 3176ca764d..76a9354d1a 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/AbstractDataChangeListenerTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/AbstractDataChangeListenerTest.java @@ -9,7 +9,7 @@ package org.opendaylight.controller.md.sal.dom.store.impl; import java.util.Collection; import java.util.Map; - +import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.opendaylight.controller.md.sal.dom.store.impl.DatastoreTestTask.WriteTransactionCustomizer; @@ -18,6 +18,7 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controll import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.top.level.list.NestedList; import org.opendaylight.yangtools.sal.binding.generator.impl.ModuleInfoBackedContext; +import org.opendaylight.yangtools.util.concurrent.SpecialExecutors; import org.opendaylight.yangtools.yang.binding.YangModuleInfo; import org.opendaylight.yangtools.yang.binding.util.BindingReflections; import org.opendaylight.yangtools.yang.common.QName; @@ -48,6 +49,7 @@ public abstract class AbstractDataChangeListenerTest { private InMemoryDOMDataStore datastore; private SchemaContext schemaContext; + private TestDCLExecutorService dclExecutorService; @Before public final void setup() throws Exception { @@ -56,13 +58,24 @@ public abstract class AbstractDataChangeListenerTest { ModuleInfoBackedContext context = ModuleInfoBackedContext.create(); context.registerModuleInfo(moduleInfo); schemaContext = context.tryToCreateSchemaContext().get(); + + dclExecutorService = new TestDCLExecutorService( + SpecialExecutors.newBlockingBoundedFastThreadPool(1, 10, "DCL" )); + datastore = new InMemoryDOMDataStore("TEST", - MoreExecutors.sameThreadExecutor()); + MoreExecutors.sameThreadExecutor(), dclExecutorService ); datastore.onGlobalContextUpdated(schemaContext); } + @After + public void tearDown() { + if( dclExecutorService != null ) { + dclExecutorService.shutdownNow(); + } + } + public final DatastoreTestTask newTestTask() { - return new DatastoreTestTask(datastore).cleanup(DatastoreTestTask + return new DatastoreTestTask(datastore, dclExecutorService).cleanup(DatastoreTestTask .simpleDelete(TOP_LEVEL)); } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DatastoreTestTask.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DatastoreTestTask.java index 26987a6fba..98d79bee8b 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DatastoreTestTask.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DatastoreTestTask.java @@ -8,9 +8,11 @@ package org.opendaylight.controller.md.sal.dom.store.impl; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; @@ -37,11 +39,13 @@ public class DatastoreTestTask { private WriteTransactionCustomizer cleanup; private YangInstanceIdentifier changePath; private DataChangeScope changeScope; - private boolean postSetup = false; + private volatile boolean postSetup = false; private final ChangeEventListener internalListener; + private final TestDCLExecutorService dclExecutorService; - public DatastoreTestTask(final DOMStore datastore) { + public DatastoreTestTask(final DOMStore datastore, final TestDCLExecutorService dclExecutorService) { this.store = datastore; + this.dclExecutorService = dclExecutorService; internalListener = new ChangeEventListener(); } @@ -79,7 +83,7 @@ public class DatastoreTestTask { return this; } - public void run() throws InterruptedException, ExecutionException { + public void run() throws InterruptedException, ExecutionException, TimeoutException { if (setup != null) { execute(setup); } @@ -89,13 +93,17 @@ public class DatastoreTestTask { } Preconditions.checkState(write != null, "Write Transaction must be set."); + postSetup = true; + dclExecutorService.afterTestSetup(); + execute(write); if (registration != null) { registration.close(); } + if (changeListener != null) { - changeListener.onDataChanged(internalListener.receivedChange.get()); + changeListener.onDataChanged(getChangeEvent()); } if (read != null) { read.verify(store.newReadOnlyTransaction()); @@ -105,8 +113,26 @@ public class DatastoreTestTask { } } - public Future>> getChangeEvent() { - return internalListener.receivedChange; + public AsyncDataChangeEvent> getChangeEvent() { + try { + return internalListener.receivedChange.get(10, TimeUnit.SECONDS); + } catch( Exception e ) { + fail( "Error getting the AsyncDataChangeEvent from the Future: " + e ); + } + + // won't get here + return null; + } + + public void verifyNoChangeEvent() { + try { + Object unexpected = internalListener.receivedChange.get(500, TimeUnit.MILLISECONDS); + fail( "Got unexpected AsyncDataChangeEvent from the Future: " + unexpected ); + } catch( TimeoutException e ) { + // Expected + } catch( Exception e ) { + fail( "Error getting the AsyncDataChangeEvent from the Future: " + e ); + } } private void execute(final WriteTransactionCustomizer writeCustomizer) throws InterruptedException, diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DefaultDataChangeListenerTestSuite.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DefaultDataChangeListenerTestSuite.java index 54d2043dc7..84337de419 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DefaultDataChangeListenerTestSuite.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DefaultDataChangeListenerTestSuite.java @@ -20,7 +20,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha abstract protected void customizeTask(DatastoreTestTask task); @Test - public final void putTopLevelOneNested() throws InterruptedException, ExecutionException { + public final void putTopLevelOneNested() throws Exception { DatastoreTestTask task = newTestTask().test(writeOneTopMultipleNested(FOO, BAR)); customizeTask(task); @@ -29,7 +29,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha } @Test - public final void existingTopWriteSibling() throws InterruptedException, ExecutionException { + public final void existingTopWriteSibling() throws Exception { DatastoreTestTask task = newTestTask().setup(writeOneTopMultipleNested(FOO)).test( new WriteTransactionCustomizer() { @Override @@ -46,7 +46,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha @Test - public final void existingTopWriteTwoNested() throws InterruptedException, ExecutionException { + public final void existingTopWriteTwoNested() throws Exception { DatastoreTestTask task = newTestTask().setup(writeOneTopMultipleNested(FOO)).test( new WriteTransactionCustomizer() { @Override @@ -64,7 +64,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha @Test - public final void existingOneNestedWriteAdditionalNested() throws InterruptedException, ExecutionException { + public final void existingOneNestedWriteAdditionalNested() throws Exception { DatastoreTestTask task = newTestTask().setup(writeOneTopMultipleNested(FOO, BAR)).test( new WriteTransactionCustomizer() { @Override @@ -79,11 +79,10 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha protected abstract void existingOneNestedWriteAdditionalNested(DatastoreTestTask task) throws InterruptedException, ExecutionException; - protected abstract void putTopLevelOneNested(DatastoreTestTask task) throws InterruptedException, - ExecutionException; + protected abstract void putTopLevelOneNested(DatastoreTestTask task) throws Exception; @Test - public final void replaceTopLevelNestedChanged() throws InterruptedException, ExecutionException { + public final void replaceTopLevelNestedChanged() throws Exception { DatastoreTestTask task = newTestTask().setup(writeOneTopMultipleNested(FOO, BAR)).test( writeOneTopMultipleNested(FOO, BAZ)); customizeTask(task); @@ -95,7 +94,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha ExecutionException; @Test - public final void putTopLevelWithTwoNested() throws InterruptedException, ExecutionException { + public final void putTopLevelWithTwoNested() throws Exception { DatastoreTestTask task = newTestTask().test(writeOneTopMultipleNested(FOO, BAR, BAZ)); customizeTask(task); @@ -107,7 +106,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha ExecutionException; @Test - public final void twoNestedExistsOneIsDeleted() throws InterruptedException, ExecutionException { + public final void twoNestedExistsOneIsDeleted() throws Exception { DatastoreTestTask task = newTestTask().setup(writeOneTopMultipleNested(FOO, BAR, BAZ)).test( deleteNested(FOO, BAZ)); @@ -120,7 +119,7 @@ public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataCha ExecutionException; @Test - public final void nestedListExistsRootDeleted() throws InterruptedException, ExecutionException { + public final void nestedListExistsRootDeleted() throws Exception { DatastoreTestTask task = newTestTask().cleanup(null).setup(writeOneTopMultipleNested(FOO, BAR, BAZ)) .test(DatastoreTestTask.simpleDelete(TOP_LEVEL)); diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDataStoreTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDataStoreTest.java index 9b105aa306..c609e13e79 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDataStoreTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDataStoreTest.java @@ -7,13 +7,10 @@ */ package org.opendaylight.controller.md.sal.dom.store.impl; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.concurrent.ExecutionException; - +import com.google.common.base.Optional; +import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.MoreExecutors; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -35,9 +32,12 @@ import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes; import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder; import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import com.google.common.base.Optional; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.MoreExecutors; +import java.util.concurrent.ExecutionException; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; public class InMemoryDataStoreTest { @@ -47,7 +47,8 @@ public class InMemoryDataStoreTest { @Before public void setupStore() { - domStore = new InMemoryDOMDataStore("TEST", MoreExecutors.sameThreadExecutor()); + domStore = new InMemoryDOMDataStore("TEST", MoreExecutors.sameThreadExecutor(), + MoreExecutors.sameThreadExecutor()); schemaContext = TestModel.createTestContext(); domStore.onGlobalContextUpdated(schemaContext); } @@ -184,6 +185,74 @@ public class InMemoryDataStoreTest { assertEquals( "After commit read: data", containerNode, afterCommitRead.get() ); } + + @Test + public void testExistsForExistingData() throws Exception { + + DOMStoreReadWriteTransaction writeTx = domStore.newReadWriteTransaction(); + assertNotNull( writeTx ); + + ContainerNode containerNode = ImmutableContainerNodeBuilder.create() + .withNodeIdentifier( new NodeIdentifier( TestModel.TEST_QNAME ) ) + .addChild( ImmutableNodes.mapNodeBuilder( TestModel.OUTER_LIST_QNAME ) + .addChild( ImmutableNodes.mapEntry( TestModel.OUTER_LIST_QNAME, + TestModel.ID_QNAME, 1 ) ).build() ).build(); + + writeTx.merge( TestModel.TEST_PATH, containerNode ); + + CheckedFuture exists = + writeTx.exists(TestModel.TEST_PATH); + + assertEquals(true, exists.checkedGet()); + + DOMStoreThreePhaseCommitCohort ready = writeTx.ready(); + + ready.preCommit().get(); + + ready.commit().get(); + + DOMStoreReadTransaction readTx = domStore.newReadOnlyTransaction(); + assertNotNull( readTx ); + + exists = + readTx.exists(TestModel.TEST_PATH); + + assertEquals(true, exists.checkedGet()); + } + + @Test + public void testExistsForNonExistingData() throws Exception { + + DOMStoreReadWriteTransaction writeTx = domStore.newReadWriteTransaction(); + assertNotNull( writeTx ); + + CheckedFuture exists = + writeTx.exists(TestModel.TEST_PATH); + + assertEquals(false, exists.checkedGet()); + + DOMStoreReadTransaction readTx = domStore.newReadOnlyTransaction(); + assertNotNull( readTx ); + + exists = + readTx.exists(TestModel.TEST_PATH); + + assertEquals(false, exists.checkedGet()); + } + + @Test(expected=ReadFailedException.class) + public void testExistsThrowsReadFailedException() throws Exception { + + DOMStoreReadTransaction readTx = domStore.newReadOnlyTransaction(); + assertNotNull( readTx ); + + readTx.close(); + + readTx.exists(TestModel.TEST_PATH).checkedGet(); + } + + + @Test(expected=ReadFailedException.class) public void testReadWithReadOnlyTransactionClosed() throws Throwable { diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/RootScopeSubtreeTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/RootScopeSubtreeTest.java index 905dc0d19b..43b339e506 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/RootScopeSubtreeTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/RootScopeSubtreeTest.java @@ -23,7 +23,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { @Override public void putTopLevelOneNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), TOP_LEVEL, path(FOO), path(FOO, BAR)); assertEmpty(change.getUpdatedData()); @@ -34,7 +34,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { public void replaceTopLevelNestedChanged(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO, BAZ)); assertContains(change.getUpdatedData(), TOP_LEVEL, path(FOO)); @@ -45,7 +45,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { protected void putTopLevelWithTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), TOP_LEVEL, path(FOO), path(FOO, BAR), path(FOO, BAZ)); assertEmpty(change.getUpdatedData()); @@ -56,7 +56,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { protected void twoNestedExistsOneIsDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertEmpty(change.getCreatedData()); assertContains(change.getUpdatedData(), TOP_LEVEL, path(FOO)); @@ -67,7 +67,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { protected void nestedListExistsRootDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertEmpty(change.getCreatedData()); assertEmpty(change.getUpdatedData()); @@ -76,7 +76,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { @Override protected void existingOneNestedWriteAdditionalNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO,BAZ)); assertNotContains(change.getCreatedData(), path(FOO,BAR)); @@ -86,7 +86,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { @Override protected void existingTopWriteTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO,BAR),path(FOO,BAZ)); assertContains(change.getUpdatedData(), TOP_LEVEL, path(FOO)); @@ -96,7 +96,7 @@ public class RootScopeSubtreeTest extends DefaultDataChangeListenerTestSuite { @Override protected void existingTopWriteSibling(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO_SIBLING)); assertContains(change.getUpdatedData(), TOP_LEVEL); diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/SchemaUpdateForTransactionTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/SchemaUpdateForTransactionTest.java index 5cba93a712..364712c7b3 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/SchemaUpdateForTransactionTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/SchemaUpdateForTransactionTest.java @@ -34,7 +34,8 @@ public class SchemaUpdateForTransactionTest { @Before public void setupStore() { - domStore = new InMemoryDOMDataStore("TEST", MoreExecutors.sameThreadExecutor()); + domStore = new InMemoryDOMDataStore("TEST", MoreExecutors.sameThreadExecutor(), + MoreExecutors.sameThreadExecutor()); loadSchemas(RockTheHouseInput.class); } diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/TestDCLExecutorService.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/TestDCLExecutorService.java new file mode 100644 index 0000000000..f6e6461bf5 --- /dev/null +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/TestDCLExecutorService.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.md.sal.dom.store.impl; + +import java.util.concurrent.ExecutorService; + +import com.google.common.util.concurrent.ForwardingExecutorService; +import com.google.common.util.concurrent.MoreExecutors; + +/** + * A forwarding Executor used by unit tests for DataChangeListener notifications + * + * @author Thomas Pantelis + */ +public class TestDCLExecutorService extends ForwardingExecutorService { + + // Start with a same thread executor to avoid timing issues during test setup. + private volatile ExecutorService currentExecutor = MoreExecutors.sameThreadExecutor(); + + // The real executor to use when test setup is complete. + private final ExecutorService postSetupExecutor; + + + public TestDCLExecutorService( ExecutorService postSetupExecutor ) { + this.postSetupExecutor = postSetupExecutor; + } + + @Override + protected ExecutorService delegate() { + return currentExecutor; + } + + public void afterTestSetup() { + // Test setup complete - switch to the real executor. + currentExecutor = postSetupExecutor; + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeBaseTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeBaseTest.java index 7c8676eff5..cdf465aace 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeBaseTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeBaseTest.java @@ -11,8 +11,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; - import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList; @@ -32,7 +30,7 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite @Override public void putTopLevelOneNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); @@ -48,7 +46,7 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite public void replaceTopLevelNestedChanged(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertContains(change.getCreatedData(), path(FOO, BAZ)); @@ -62,7 +60,7 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite protected void putTopLevelWithTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertFalse(change.getCreatedData().isEmpty()); @@ -77,7 +75,6 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite protected void twoNestedExistsOneIsDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - Future future = task.getChangeEvent(); /* * Base listener should be notified only and only if actual node changed its state, * since deletion of child, did not result in change of node we are listening @@ -85,14 +82,14 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override public void nestedListExistsRootDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertEmpty(change.getCreatedData()); assertEmpty(change.getUpdatedData()); @@ -103,7 +100,6 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite @Override protected void existingOneNestedWriteAdditionalNested(final DatastoreTestTask task) { - Future future = task.getChangeEvent(); /* * One listener should be notified only and only if actual node changed its state, * since deletion of nested child (in this case /nested-list/nested-list[foo], @@ -112,12 +108,11 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override protected void existingTopWriteTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - Future future = task.getChangeEvent(); /* * One listener should be notified only and only if actual node changed its state, * since deletion of nested child (in this case /nested-list/nested-list[foo], @@ -126,12 +121,12 @@ public class WildcardedScopeBaseTest extends DefaultDataChangeListenerTestSuite * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override protected void existingTopWriteSibling(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO_SIBLING)); assertNotContains(change.getUpdatedData(), path(FOO), TOP_LEVEL); diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeOneTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeOneTest.java index ac18d5c976..3407e0ffa4 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeOneTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeOneTest.java @@ -11,8 +11,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; - import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList; @@ -32,7 +30,7 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { @Override public void putTopLevelOneNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); @@ -48,7 +46,7 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { public void replaceTopLevelNestedChanged(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertContains(change.getCreatedData(), path(FOO, BAZ)); @@ -62,7 +60,7 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { protected void putTopLevelWithTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertFalse(change.getCreatedData().isEmpty()); @@ -77,7 +75,6 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { protected void twoNestedExistsOneIsDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - Future future = task.getChangeEvent(); /* * One listener should be notified only and only if actual node changed its state, * since deletion of nested child (in this case /nested-list/nested-list[foo], @@ -86,14 +83,14 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override public void nestedListExistsRootDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertEmpty(change.getCreatedData()); assertEmpty(change.getUpdatedData()); @@ -104,7 +101,6 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { @Override protected void existingOneNestedWriteAdditionalNested(final DatastoreTestTask task) { - Future future = task.getChangeEvent(); /* * One listener should be notified only and only if actual node changed its state, * since deletion of nested child (in this case /nested-list/nested-list[foo], @@ -113,12 +109,11 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override protected void existingTopWriteTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - Future future = task.getChangeEvent(); /* * One listener should be notified only and only if actual node changed its state, * since deletion of nested child (in this case /nested-list/nested-list[foo], @@ -127,12 +122,12 @@ public class WildcardedScopeOneTest extends DefaultDataChangeListenerTestSuite { * and this means settable future containing receivedDataChangeEvent is not done. * */ - assertFalse(future.isDone()); + task.verifyNoChangeEvent(); } @Override protected void existingTopWriteSibling(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO_SIBLING)); assertNotContains(change.getUpdatedData(),path(FOO), TOP_LEVEL); diff --git a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeSubtreeTest.java b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeSubtreeTest.java index 7e67242dd3..a7fa24f293 100644 --- a/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeSubtreeTest.java +++ b/opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeSubtreeTest.java @@ -32,7 +32,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui @Override public void putTopLevelOneNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotContains(change.getCreatedData(), TOP_LEVEL); assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR)); @@ -45,7 +45,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui public void replaceTopLevelNestedChanged(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertContains(change.getCreatedData(), path(FOO, BAZ)); @@ -59,7 +59,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui protected void putTopLevelWithTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertFalse(change.getCreatedData().isEmpty()); @@ -74,7 +74,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui protected void twoNestedExistsOneIsDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertNotNull(change); assertTrue(change.getCreatedData().isEmpty()); assertContains(change.getUpdatedData(), path(FOO)); @@ -86,7 +86,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui public void nestedListExistsRootDeleted(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertEmpty(change.getCreatedData()); assertEmpty(change.getUpdatedData()); @@ -97,7 +97,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui @Override protected void existingOneNestedWriteAdditionalNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO,BAZ)); assertNotContains(change.getCreatedData(), path(FOO,BAR)); @@ -108,7 +108,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui @Override protected void existingTopWriteTwoNested(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO,BAR),path(FOO,BAZ)); assertContains(change.getUpdatedData(), path(FOO)); @@ -118,7 +118,7 @@ public class WildcardedScopeSubtreeTest extends DefaultDataChangeListenerTestSui @Override protected void existingTopWriteSibling(final DatastoreTestTask task) throws InterruptedException, ExecutionException { - AsyncDataChangeEvent> change = task.getChangeEvent().get(); + AsyncDataChangeEvent> change = task.getChangeEvent(); assertContains(change.getCreatedData(), path(FOO_SIBLING)); assertNotContains(change.getUpdatedData(), path(FOO), TOP_LEVEL); diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java index b75df80f4e..bca47af5c0 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java @@ -10,12 +10,10 @@ package org.opendaylight.controller.config.yang.md.sal.connector.netconf; import static org.opendaylight.controller.config.api.JmxAttributeValidationException.checkCondition; import static org.opendaylight.controller.config.api.JmxAttributeValidationException.checkNotNull; -import java.io.File; -import java.io.InputStream; +import com.google.common.base.Optional; import java.net.InetSocketAddress; import java.util.List; import java.util.concurrent.ExecutorService; - import org.opendaylight.controller.config.api.JmxAttributeValidationException; import org.opendaylight.controller.netconf.client.NetconfClientDispatcher; import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration; @@ -25,9 +23,11 @@ import org.opendaylight.controller.netconf.nettyutil.handler.ssh.authentication. import org.opendaylight.controller.sal.binding.api.BindingAwareBroker; import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; import org.opendaylight.controller.sal.connect.netconf.NetconfDevice; +import org.opendaylight.controller.sal.connect.netconf.NetconfStateSchemas; import org.opendaylight.controller.sal.connect.netconf.listener.NetconfDeviceCommunicator; import org.opendaylight.controller.sal.connect.netconf.listener.NetconfSessionCapabilities; import org.opendaylight.controller.sal.connect.netconf.sal.NetconfDeviceSalFacade; +import org.opendaylight.controller.sal.connect.netconf.schema.mapping.NetconfMessageTransformer; import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; import org.opendaylight.controller.sal.core.api.Broker; import org.opendaylight.protocol.framework.ReconnectStrategy; @@ -35,16 +35,12 @@ import org.opendaylight.protocol.framework.ReconnectStrategyFactory; import org.opendaylight.protocol.framework.TimedReconnectStrategy; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Host; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.IpAddress; -import org.opendaylight.yangtools.yang.model.util.repo.AbstractCachingSchemaSourceProvider; -import org.opendaylight.yangtools.yang.model.util.repo.FilesystemSchemaCachingProvider; -import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider; -import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProviders; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceRegistry; import org.osgi.framework.BundleContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Optional; - /** * */ @@ -52,9 +48,10 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co { private static final Logger logger = LoggerFactory.getLogger(NetconfConnectorModule.class); - private static AbstractCachingSchemaSourceProvider GLOBAL_NETCONF_SOURCE_PROVIDER = null; private BundleContext bundleContext; private Optional userCapabilities; + private SchemaSourceRegistry schemaRegistry; + private SchemaContextFactory schemaContextFactory; public NetconfConnectorModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier, final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) { super(identifier, dependencyResolver); @@ -108,8 +105,12 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co final RemoteDeviceHandler salFacade = new NetconfDeviceSalFacade(id, domBroker, bindingBroker, bundleContext, globalProcessingExecutor); + + final NetconfDevice.SchemaResourcesDTO schemaResourcesDTO = + new NetconfDevice.SchemaResourcesDTO(schemaRegistry, schemaContextFactory, new NetconfStateSchemas.NetconfStateSchemasResolverImpl()); + final NetconfDevice device = - NetconfDevice.createNetconfDevice(id, getGlobalNetconfSchemaProvider(), globalProcessingExecutor, salFacade); + new NetconfDevice(schemaResourcesDTO, id, salFacade, globalProcessingExecutor, new NetconfMessageTransformer()); final NetconfDeviceCommunicator listener = userCapabilities.isPresent() ? new NetconfDeviceCommunicator(id, device, userCapabilities.get()) : new NetconfDeviceCommunicator(id, device); @@ -148,17 +149,6 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co return Optional.of(parsedOverrideCapabilities); } - private synchronized AbstractCachingSchemaSourceProvider getGlobalNetconfSchemaProvider() { - if(GLOBAL_NETCONF_SOURCE_PROVIDER == null) { - final String storageFile = "cache/schema"; - // File directory = bundleContext.getDataFile(storageFile); - final File directory = new File(storageFile); - final SchemaSourceProvider defaultProvider = SchemaSourceProviders.noopProvider(); - GLOBAL_NETCONF_SOURCE_PROVIDER = FilesystemSchemaCachingProvider.createFromStringSourceProvider(defaultProvider, directory); - } - return GLOBAL_NETCONF_SOURCE_PROVIDER; - } - public void setBundleContext(final BundleContext bundleContext) { this.bundleContext = bundleContext; } @@ -212,4 +202,12 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co return new InetSocketAddress(ip, getPort().getValue()); } } + + public void setSchemaRegistry(final SchemaSourceRegistry schemaRegistry) { + this.schemaRegistry = schemaRegistry; + } + + public void setSchemaContextFactory(final SchemaContextFactory schemaContextFactory) { + this.schemaContextFactory = schemaContextFactory; + } } diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModuleFactory.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModuleFactory.java index 9842139dab..b6299697cc 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModuleFactory.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModuleFactory.java @@ -7,9 +7,17 @@ */ package org.opendaylight.controller.config.yang.md.sal.connector.netconf; +import java.io.File; + import org.opendaylight.controller.config.api.DependencyResolver; import org.opendaylight.controller.config.api.DynamicMBeanWithInstance; import org.opendaylight.controller.config.spi.Module; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceFilter; +import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.util.FilesystemSchemaSourceCache; +import org.opendaylight.yangtools.yang.parser.repo.SharedSchemaRepository; +import org.opendaylight.yangtools.yang.parser.util.TextToASTTransformer; import org.osgi.framework.BundleContext; /** @@ -18,20 +26,38 @@ import org.osgi.framework.BundleContext; public class NetconfConnectorModuleFactory extends org.opendaylight.controller.config.yang.md.sal.connector.netconf.AbstractNetconfConnectorModuleFactory { + // TODO this should be injected + // Netconf devices have separated schema registry + factory from controller + private final SharedSchemaRepository repository = new SharedSchemaRepository(NAME); + private final SchemaContextFactory schemaContextFactory + = repository.createSchemaContextFactory(SchemaSourceFilter.ALWAYS_ACCEPT); + + public NetconfConnectorModuleFactory() { + // Start cache and Text to AST transformer + final FilesystemSchemaSourceCache cache = new FilesystemSchemaSourceCache<>(repository, YangTextSchemaSource.class, new File("cache/schema")); + repository.registerSchemaSourceListener(cache); + repository.registerSchemaSourceListener(TextToASTTransformer.create(repository, repository)); + } + @Override - public Module createModule(String instanceName, DependencyResolver dependencyResolver, - DynamicMBeanWithInstance old, BundleContext bundleContext) throws Exception { - NetconfConnectorModule module = (NetconfConnectorModule) super.createModule(instanceName, dependencyResolver, + public Module createModule(final String instanceName, final DependencyResolver dependencyResolver, + final DynamicMBeanWithInstance old, final BundleContext bundleContext) throws Exception { + final NetconfConnectorModule module = (NetconfConnectorModule) super.createModule(instanceName, dependencyResolver, old, bundleContext); + module.setBundleContext(bundleContext); + module.setSchemaRegistry(repository); + module.setSchemaContextFactory(schemaContextFactory); return module; } @Override - public Module createModule(String instanceName, DependencyResolver dependencyResolver, BundleContext bundleContext) { - NetconfConnectorModule module = (NetconfConnectorModule) super.createModule(instanceName, dependencyResolver, + public Module createModule(final String instanceName, final DependencyResolver dependencyResolver, final BundleContext bundleContext) { + final NetconfConnectorModule module = (NetconfConnectorModule) super.createModule(instanceName, dependencyResolver, bundleContext); module.setBundleContext(bundleContext); + module.setSchemaRegistry(repository); + module.setSchemaContextFactory(schemaContextFactory); return module; } } diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/api/RemoteDeviceHandler.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/api/RemoteDeviceHandler.java index b2845d5533..269c4af82f 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/api/RemoteDeviceHandler.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/api/RemoteDeviceHandler.java @@ -9,11 +9,11 @@ package org.opendaylight.controller.sal.connect.api; import org.opendaylight.controller.sal.core.api.RpcImplementation; import org.opendaylight.yangtools.yang.data.api.CompositeNode; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; public interface RemoteDeviceHandler extends AutoCloseable { - void onDeviceConnected(SchemaContextProvider remoteSchemaContextProvider, + void onDeviceConnected(SchemaContext remoteSchemaContext, PREF netconfSessionPreferences, RpcImplementation deviceRpc); void onDeviceDisconnected(); diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfDevice.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfDevice.java index 350132cf99..cc9eb5a851 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfDevice.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfDevice.java @@ -7,41 +7,47 @@ */ package org.opendaylight.controller.sal.connect.netconf; -import java.io.InputStream; +import com.google.common.base.Function; +import com.google.common.base.Optional; +import com.google.common.base.Preconditions; +import com.google.common.collect.Collections2; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import java.util.Collection; import java.util.LinkedList; import java.util.List; +import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; - import org.opendaylight.controller.netconf.api.NetconfMessage; -import org.opendaylight.controller.netconf.util.xml.XmlUtil; import org.opendaylight.controller.sal.connect.api.MessageTransformer; import org.opendaylight.controller.sal.connect.api.RemoteDevice; import org.opendaylight.controller.sal.connect.api.RemoteDeviceCommunicator; import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; -import org.opendaylight.controller.sal.connect.api.SchemaContextProviderFactory; -import org.opendaylight.controller.sal.connect.api.SchemaSourceProviderFactory; import org.opendaylight.controller.sal.connect.netconf.listener.NetconfSessionCapabilities; import org.opendaylight.controller.sal.connect.netconf.sal.NetconfDeviceRpc; -import org.opendaylight.controller.sal.connect.netconf.schema.NetconfDeviceSchemaProviderFactory; -import org.opendaylight.controller.sal.connect.netconf.schema.NetconfRemoteSchemaSourceProvider; -import org.opendaylight.controller.sal.connect.netconf.schema.mapping.NetconfMessageTransformer; +import org.opendaylight.controller.sal.connect.netconf.schema.NetconfRemoteSchemaYangSourceProvider; import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; -import org.opendaylight.controller.sal.core.api.RpcImplementation; -import org.opendaylight.yangtools.yang.data.api.CompositeNode; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; -import org.opendaylight.yangtools.yang.model.util.repo.AbstractCachingSchemaSourceProvider; -import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider; +import org.opendaylight.yangtools.yang.common.QName; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.repo.api.MissingSchemaSourceException; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaResolutionException; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceRepresentation; +import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier; +import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceRegistration; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; - /** * This is a mediator between NetconfDeviceCommunicator and NetconfDeviceSalFacade */ @@ -49,51 +55,33 @@ public final class NetconfDevice implements RemoteDevice QNAME_TO_SOURCE_ID_FUNCTION = new Function() { + @Override + public SourceIdentifier apply(final QName input) { + return new SourceIdentifier(input.getLocalName(), Optional.fromNullable(input.getFormattedRevision())); + } + }; + private final RemoteDeviceId id; + private final SchemaContextFactory schemaContextFactory; private final RemoteDeviceHandler salFacade; private final ListeningExecutorService processingExecutor; + private final SchemaSourceRegistry schemaRegistry; private final MessageTransformer messageTransformer; - private final SchemaContextProviderFactory schemaContextProviderFactory; - private final SchemaSourceProviderFactory sourceProviderFactory; private final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver; private final NotificationHandler notificationHandler; + private final List> sourceRegistrations = Lists.newArrayList(); - public static NetconfDevice createNetconfDevice(final RemoteDeviceId id, - final AbstractCachingSchemaSourceProvider schemaSourceProvider, - final ExecutorService executor, final RemoteDeviceHandler salFacade) { - return createNetconfDevice(id, schemaSourceProvider, executor, salFacade, new NetconfStateSchemas.NetconfStateSchemasResolverImpl()); - } - - @VisibleForTesting - protected static NetconfDevice createNetconfDevice(final RemoteDeviceId id, - final AbstractCachingSchemaSourceProvider schemaSourceProvider, - final ExecutorService executor, final RemoteDeviceHandler salFacade, - final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver) { - - return new NetconfDevice(id, salFacade, executor, new NetconfMessageTransformer(), - new NetconfDeviceSchemaProviderFactory(id), new SchemaSourceProviderFactory() { - @Override - public SchemaSourceProvider createSourceProvider(final RpcImplementation deviceRpc) { - return schemaSourceProvider.createInstanceFor(new NetconfRemoteSchemaSourceProvider(id, - deviceRpc)); - } - }, stateSchemasResolver); - } - - @VisibleForTesting - protected NetconfDevice(final RemoteDeviceId id, final RemoteDeviceHandler salFacade, - final ExecutorService processingExecutor, final MessageTransformer messageTransformer, - final SchemaContextProviderFactory schemaContextProviderFactory, - final SchemaSourceProviderFactory sourceProviderFactory, - final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver) { + public NetconfDevice(final SchemaResourcesDTO schemaResourcesDTO, final RemoteDeviceId id, final RemoteDeviceHandler salFacade, + final ExecutorService globalProcessingExecutor, final MessageTransformer messageTransformer) { this.id = id; + this.schemaRegistry = schemaResourcesDTO.getSchemaRegistry(); this.messageTransformer = messageTransformer; + this.schemaContextFactory = schemaResourcesDTO.getSchemaContextFactory(); this.salFacade = salFacade; - this.sourceProviderFactory = sourceProviderFactory; - this.stateSchemasResolver = stateSchemasResolver; - this.processingExecutor = MoreExecutors.listeningDecorator(processingExecutor); - this.schemaContextProviderFactory = schemaContextProviderFactory; + this.stateSchemasResolver = schemaResourcesDTO.getStateSchemasResolver(); + this.processingExecutor = MoreExecutors.listeningDecorator(globalProcessingExecutor); this.notificationHandler = new NotificationHandler(salFacade, messageTransformer, id); } @@ -107,60 +95,73 @@ public final class NetconfDevice implements RemoteDevice salInitializationFuture = processingExecutor.submit(new Runnable() { + final NetconfDeviceRpc deviceRpc = setUpDeviceRpc(listener); + + final DeviceSourcesResolver task = new DeviceSourcesResolver(deviceRpc, remoteSessionCapabilities, id, stateSchemasResolver); + final ListenableFuture sourceResolverFuture = processingExecutor.submit(task); + + final FutureCallback resolvedSourceCallback = new FutureCallback() { @Override - public void run() { - final NetconfDeviceRpc deviceRpc = setUpDeviceRpc(remoteSessionCapabilities, listener); - - final NetconfStateSchemas availableSchemas = stateSchemasResolver.resolve(deviceRpc, remoteSessionCapabilities, id); - logger.warn("{}: Schemas exposed by ietf-netconf-monitoring: {}", id, availableSchemas.getAvailableYangSchemasQNames()); - // TODO use this for shared schema context - - final SchemaSourceProvider delegate = sourceProviderFactory.createSourceProvider(deviceRpc); - final SchemaContextProvider schemaContextProvider = setUpSchemaContext(delegate, remoteSessionCapabilities); - updateMessageTransformer(schemaContextProvider); - salFacade.onDeviceConnected(schemaContextProvider, remoteSessionCapabilities, deviceRpc); - notificationHandler.onRemoteSchemaUp(); + public void onSuccess(final DeviceSources result) { + addProvidedSourcesToSchemaRegistry(deviceRpc, result); + setUpSchema(result); } - }); - Futures.addCallback(salInitializationFuture, new FutureCallback() { - @Override - public void onSuccess(final Object result) { - logger.debug("{}: Initialization in sal successful", id); - logger.info("{}: Netconf connector initialized successfully", id); + private void setUpSchema(final DeviceSources result) { + processingExecutor.submit(new RecursiveSchemaSetup(result, remoteSessionCapabilities, deviceRpc, listener)); } @Override public void onFailure(final Throwable t) { - // Unable to initialize device, set as disconnected - logger.error("{}: Initialization failed", id, t); - salFacade.onDeviceDisconnected(); - // TODO ssh connection is still open if sal initialization fails + logger.warn("{}: Unexpected error resolving device sources: {}", id, t); + handleSalInitializationFailure(t, listener); } - }); + }; + + Futures.addCallback(sourceResolverFuture, resolvedSourceCallback); + } + + private void handleSalInitializationSuccess(final SchemaContext result, final NetconfSessionCapabilities remoteSessionCapabilities, final NetconfDeviceRpc deviceRpc) { + updateMessageTransformer(result); + salFacade.onDeviceConnected(result, remoteSessionCapabilities, deviceRpc); + notificationHandler.onRemoteSchemaUp(); + + logger.debug("{}: Initialization in sal successful", id); + logger.info("{}: Netconf connector initialized successfully", id); + } + + private void handleSalInitializationFailure(final Throwable t, final RemoteDeviceCommunicator listener) { + logger.error("{}: Initialization in sal failed, disconnecting from device", id, t); + listener.close(); + onRemoteSessionDown(); } /** * Update initial message transformer to use retrieved schema + * @param currentSchemaContext */ - private void updateMessageTransformer(final SchemaContextProvider schemaContextProvider) { - messageTransformer.onGlobalContextUpdated(schemaContextProvider.getSchemaContext()); + private void updateMessageTransformer(final SchemaContext currentSchemaContext) { + messageTransformer.onGlobalContextUpdated(currentSchemaContext); } - private SchemaContextProvider setUpSchemaContext(final SchemaSourceProvider sourceProvider, final NetconfSessionCapabilities capabilities) { - return schemaContextProviderFactory.createContextProvider(capabilities.getModuleBasedCaps(), sourceProvider); + private void addProvidedSourcesToSchemaRegistry(final NetconfDeviceRpc deviceRpc, final DeviceSources deviceSources) { + final NetconfRemoteSchemaYangSourceProvider yangProvider = new NetconfRemoteSchemaYangSourceProvider(id, deviceRpc); + for (final SourceIdentifier sourceId : deviceSources.getProvidedSources()) { + sourceRegistrations.add(schemaRegistry.registerSchemaSource(yangProvider, + PotentialSchemaSource.create(sourceId, YangTextSchemaSource.class, PotentialSchemaSource.Costs.REMOTE_IO.getValue()))); + } } - private NetconfDeviceRpc setUpDeviceRpc(final NetconfSessionCapabilities capHolder, final RemoteDeviceCommunicator listener) { - Preconditions.checkArgument(capHolder.isMonitoringSupported(), - "%s: Netconf device does not support netconf monitoring, yang schemas cannot be acquired. Netconf device capabilities", capHolder); - return new NetconfDeviceRpc(listener, messageTransformer); + private NetconfDeviceRpc setUpDeviceRpc(final RemoteDeviceCommunicator listener) { + return new NetconfDeviceRpc(listener, messageTransformer); } @Override public void onRemoteSessionDown() { salFacade.onDeviceDisconnected(); + for (final SchemaSourceRegistration sourceRegistration : sourceRegistrations) { + sourceRegistration.close(); + } } @Override @@ -169,59 +170,181 @@ public final class NetconfDevice implements RemoteDevice salFacade; - private final List cache = new LinkedList<>(); - private final MessageTransformer messageTransformer; - private boolean passNotifications = false; + public NetconfStateSchemas.NetconfStateSchemasResolver getStateSchemasResolver() { + return stateSchemasResolver; + } + } + + /** + * Schema building callable. + */ + private static class DeviceSourcesResolver implements Callable { + private final NetconfDeviceRpc deviceRpc; + private final NetconfSessionCapabilities remoteSessionCapabilities; private final RemoteDeviceId id; + private final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver; - NotificationHandler(final RemoteDeviceHandler salFacade, final MessageTransformer messageTransformer, final RemoteDeviceId id) { - this.salFacade = salFacade; - this.messageTransformer = messageTransformer; + public DeviceSourcesResolver(final NetconfDeviceRpc deviceRpc, final NetconfSessionCapabilities remoteSessionCapabilities, final RemoteDeviceId id, final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver) { + this.deviceRpc = deviceRpc; + this.remoteSessionCapabilities = remoteSessionCapabilities; this.id = id; + this.stateSchemasResolver = stateSchemasResolver; } - synchronized void handleNotification(final NetconfMessage notification) { - if(passNotifications) { - passNotification(messageTransformer.toNotification(notification)); - } else { - cacheNotification(notification); + @Override + public DeviceSources call() throws Exception { + + final Set requiredSources = Sets.newHashSet(Collections2.transform( + remoteSessionCapabilities.getModuleBasedCaps(), QNAME_TO_SOURCE_ID_FUNCTION)); + + // If monitoring is not supported, we will still attempt to create schema, sources might be already provided + final NetconfStateSchemas availableSchemas = stateSchemasResolver.resolve(deviceRpc, remoteSessionCapabilities, id); + logger.debug("{}: Schemas exposed by ietf-netconf-monitoring: {}", id, availableSchemas.getAvailableYangSchemasQNames()); + + final Set providedSources = Sets.newHashSet(Collections2.transform( + availableSchemas.getAvailableYangSchemasQNames(), QNAME_TO_SOURCE_ID_FUNCTION)); + + final Set requiredSourcesNotProvided = Sets.difference(requiredSources, providedSources); + + if (!requiredSourcesNotProvided.isEmpty()) { + logger.warn("{}: Netconf device does not provide all yang models reported in hello message capabilities, required but not provided: {}", + id, requiredSourcesNotProvided); + logger.warn("{}: Attempting to build schema context from required sources", id); } - } - /** - * Forward all cached notifications and pass all notifications from this point directly to sal facade. - */ - synchronized void onRemoteSchemaUp() { - passNotifications = true; - for (final NetconfMessage cachedNotification : cache) { - passNotification(messageTransformer.toNotification(cachedNotification)); + // TODO should we perform this ? We have a mechanism to fix initialization of devices not reporting or required modules in hello + // That is overriding capabilities in configuration using attribute yang-module-capabilities + // This is more user friendly even though it clashes with attribute yang-module-capabilities + // Some devices do not report all required models in hello message, but provide them + final Set providedSourcesNotRequired = Sets.difference(providedSources, requiredSources); + if (!providedSourcesNotRequired.isEmpty()) { + logger.warn("{}: Netconf device provides additional yang models not reported in hello message capabilities: {}", + id, providedSourcesNotRequired); + logger.warn("{}: Adding provided but not required sources as required to prevent failures", id); + requiredSources.addAll(providedSourcesNotRequired); } - cache.clear(); + return new DeviceSources(requiredSources, providedSources); } + } - private void cacheNotification(final NetconfMessage notification) { - Preconditions.checkState(passNotifications == false); + /** + * Contains RequiredSources - sources from capabilities. + * + */ + private static final class DeviceSources { + private final Collection requiredSources; + private final Collection providedSources; - logger.debug("{}: Caching notification {}, remote schema not yet fully built", id, notification); - if(logger.isTraceEnabled()) { - logger.trace("{}: Caching notification {}", id, XmlUtil.toString(notification.getDocument())); - } + public DeviceSources(final Collection requiredSources, final Collection providedSources) { + this.requiredSources = requiredSources; + this.providedSources = providedSources; + } - cache.add(notification); + public Collection getRequiredSources() { + return requiredSources; } - private void passNotification(final CompositeNode parsedNotification) { - logger.debug("{}: Forwarding notification {}", id, parsedNotification); - Preconditions.checkNotNull(parsedNotification); - salFacade.onNotification(parsedNotification); + public Collection getProvidedSources() { + return providedSources; } + } + /** + * Schema builder that tries to build schema context from provided sources or biggest subset of it. + */ + private final class RecursiveSchemaSetup implements Runnable { + private final DeviceSources deviceSources; + private final NetconfSessionCapabilities remoteSessionCapabilities; + private final NetconfDeviceRpc deviceRpc; + private final RemoteDeviceCommunicator listener; + + public RecursiveSchemaSetup(final DeviceSources deviceSources, final NetconfSessionCapabilities remoteSessionCapabilities, final NetconfDeviceRpc deviceRpc, final RemoteDeviceCommunicator listener) { + this.deviceSources = deviceSources; + this.remoteSessionCapabilities = remoteSessionCapabilities; + this.deviceRpc = deviceRpc; + this.listener = listener; + } + + @Override + public void run() { + setUpSchema(deviceSources.getRequiredSources()); + } + + /** + * Recursively build schema context, in case of success or final failure notify device + */ + private void setUpSchema(final Collection requiredSources) { + logger.trace("{}: Trying to build schema context from {}", id, requiredSources); + + // If no more sources, fail + if(requiredSources.isEmpty()) { + handleSalInitializationFailure(new IllegalStateException(id + ": No more sources for schema context"), listener); + return; + } + + final CheckedFuture schemaBuilderFuture = schemaContextFactory.createSchemaContext(requiredSources); + + final FutureCallback RecursiveSchemaBuilderCallback = new FutureCallback() { + + @Override + public void onSuccess(final SchemaContext result) { + logger.debug("{}: Schema context built successfully from {}", id, requiredSources); + handleSalInitializationSuccess(result, remoteSessionCapabilities, deviceRpc); + } + + @Override + public void onFailure(final Throwable t) { + // In case source missing, try without it + if (t instanceof MissingSchemaSourceException) { + final SourceIdentifier missingSource = ((MissingSchemaSourceException) t).getSourceId(); + logger.warn("{}: Unable to build schema context, missing source {}, will reattempt without it", id, missingSource); + setUpSchema(stripMissingSource(requiredSources, missingSource)); + + // In case resolution error, try only with resolved sources + } else if (t instanceof SchemaResolutionException) { + // TODO check for infinite loop + final SchemaResolutionException resolutionException = (SchemaResolutionException) t; + logger.warn("{}: Unable to build schema context, unsatisfied imports {}, will reattempt with resolved only", id, resolutionException.getUnsatisfiedImports()); + setUpSchema(resolutionException.getResolvedSources()); + // unknown error, fail + } else { + handleSalInitializationFailure(t, listener); + } + } + }; + + Futures.addCallback(schemaBuilderFuture, RecursiveSchemaBuilderCallback); + } + + private Collection stripMissingSource(final Collection requiredSources, final SourceIdentifier sIdToRemove) { + final LinkedList sourceIdentifiers = Lists.newLinkedList(requiredSources); + final boolean removed = sourceIdentifiers.remove(sIdToRemove); + Preconditions.checkState(removed, "{}: Trying to remove {} from {} failed", id, sIdToRemove, requiredSources); + return sourceIdentifiers; + } + } } diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemas.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemas.java index b5400347e7..77e342641e 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemas.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemas.java @@ -93,7 +93,7 @@ public final class NetconfStateSchemas { */ private static NetconfStateSchemas create(final NetconfDeviceRpc deviceRpc, final NetconfSessionCapabilities remoteSessionCapabilities, final RemoteDeviceId id) { if(remoteSessionCapabilities.isMonitoringSupported() == false) { - logger.warn("{}: Netconf monitoring not supported on device, cannot detect available schemas"); + logger.warn("{}: Netconf monitoring not supported on device, cannot detect provided schemas"); return EMPTY; } diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NotificationHandler.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NotificationHandler.java new file mode 100644 index 0000000000..cc8960fb4f --- /dev/null +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NotificationHandler.java @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.sal.connect.netconf; + +import com.google.common.base.Preconditions; +import java.util.LinkedList; +import java.util.List; +import org.opendaylight.controller.netconf.api.NetconfMessage; +import org.opendaylight.controller.netconf.util.xml.XmlUtil; +import org.opendaylight.controller.sal.connect.api.MessageTransformer; +import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; +import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; +import org.opendaylight.yangtools.yang.data.api.CompositeNode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Handles incoming notifications. Either caches them(until onRemoteSchemaUp is called) or passes to sal Facade. + */ +final class NotificationHandler { + + private static final Logger logger = LoggerFactory.getLogger(NotificationHandler.class); + + private final RemoteDeviceHandler salFacade; + private final List queue = new LinkedList<>(); + private final MessageTransformer messageTransformer; + private final RemoteDeviceId id; + private boolean passNotifications = false; + + NotificationHandler(final RemoteDeviceHandler salFacade, final MessageTransformer messageTransformer, final RemoteDeviceId id) { + this.salFacade = Preconditions.checkNotNull(salFacade); + this.messageTransformer = Preconditions.checkNotNull(messageTransformer); + this.id = Preconditions.checkNotNull(id); + } + + synchronized void handleNotification(final NetconfMessage notification) { + if(passNotifications) { + passNotification(messageTransformer.toNotification(notification)); + } else { + queueNotification(notification); + } + } + + /** + * Forward all cached notifications and pass all notifications from this point directly to sal facade. + */ + synchronized void onRemoteSchemaUp() { + passNotifications = true; + + for (final NetconfMessage cachedNotification : queue) { + passNotification(messageTransformer.toNotification(cachedNotification)); + } + + queue.clear(); + } + + private void queueNotification(final NetconfMessage notification) { + Preconditions.checkState(passNotifications == false); + + logger.debug("{}: Caching notification {}, remote schema not yet fully built", id, notification); + if(logger.isTraceEnabled()) { + logger.trace("{}: Caching notification {}", id, XmlUtil.toString(notification.getDocument())); + } + + queue.add(notification); + } + + private void passNotification(final CompositeNode parsedNotification) { + logger.debug("{}: Forwarding notification {}", id, parsedNotification); + Preconditions.checkNotNull(parsedNotification); + salFacade.onNotification(parsedNotification); + } +} diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/listener/NetconfDeviceCommunicator.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/listener/NetconfDeviceCommunicator.java index 2f24adcdbe..aadb911f45 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/listener/NetconfDeviceCommunicator.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/listener/NetconfDeviceCommunicator.java @@ -51,8 +51,10 @@ public class NetconfDeviceCommunicator implements NetconfClientSessionListener, private final RemoteDeviceId id; private final Lock sessionLock = new ReentrantLock(); + // TODO implement concurrent message limit private final Queue requests = new ArrayDeque<>(); private NetconfClientSession session; + private Future initFuture; public NetconfDeviceCommunicator(final RemoteDeviceId id, final RemoteDevice remoteDevice, final NetconfSessionCapabilities netconfSessionCapabilities) { @@ -97,9 +99,9 @@ public class NetconfDeviceCommunicator implements NetconfClientSessionListener, public void initializeRemoteConnection(final NetconfClientDispatcher dispatch, final NetconfClientConfiguration config) { if(config instanceof NetconfReconnectingClientConfiguration) { - dispatch.createReconnectingClient((NetconfReconnectingClientConfiguration) config); + initFuture = dispatch.createReconnectingClient((NetconfReconnectingClientConfiguration) config); } else { - dispatch.createClient(config); + initFuture = dispatch.createClient(config); } } @@ -172,7 +174,15 @@ public class NetconfDeviceCommunicator implements NetconfClientSessionListener, @Override public void close() { - tearDown( String.format( "The netconf session to %1$s has been closed", id.getName() ) ); + // Cancel reconnect if in progress + if(initFuture != null) { + initFuture.cancel(false); + } + // Disconnect from device + if(session != null) { + session.close(); + } + tearDown(id + ": Netconf session closed"); } @Override @@ -191,12 +201,12 @@ public class NetconfDeviceCommunicator implements NetconfClientSessionListener, private void processMessage(final NetconfMessage message) { Request request = null; sessionLock.lock(); + try { request = requests.peek(); - if (request.future.isUncancellable()) { + if (request != null && request.future.isUncancellable()) { requests.poll(); - } - else { + } else { request = null; logger.warn("{}: Ignoring unsolicited message {}", id, msgToS(message)); } diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/NetconfDeviceSalFacade.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/NetconfDeviceSalFacade.java index dbef290197..3cc513600d 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/NetconfDeviceSalFacade.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/NetconfDeviceSalFacade.java @@ -7,11 +7,12 @@ */ package org.opendaylight.controller.sal.connect.netconf.sal; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; - import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker; import org.opendaylight.controller.sal.binding.api.BindingAwareBroker; import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; @@ -30,14 +31,10 @@ import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.CompositeNode; import org.opendaylight.yangtools.yang.model.api.RpcDefinition; import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; import org.osgi.framework.BundleContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - public final class NetconfDeviceSalFacade implements AutoCloseable, RemoteDeviceHandler { private static final Logger logger= LoggerFactory.getLogger(NetconfDeviceSalFacade.class); @@ -64,11 +61,9 @@ public final class NetconfDeviceSalFacade implements AutoCloseable, RemoteDevice } @Override - public synchronized void onDeviceConnected(final SchemaContextProvider remoteSchemaContextProvider, + public synchronized void onDeviceConnected(final SchemaContext schemaContext, final NetconfSessionCapabilities netconfSessionPreferences, final RpcImplementation deviceRpc) { - final SchemaContext schemaContext = remoteSchemaContextProvider.getSchemaContext(); - // TODO remove deprecated SchemaContextProvider from SchemaAwareRpcBroker // TODO move SchemaAwareRpcBroker from sal-broker-impl, now we have depend on the whole sal-broker-impl final RpcProvisionRegistry rpcRegistry = new SchemaAwareRpcBroker(id.getPath().toString(), new org.opendaylight.controller.sal.dom.broker.impl.SchemaContextProvider() { @Override diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadOnlyTx.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadOnlyTx.java index 04a99511a1..6c46bed762 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadOnlyTx.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadOnlyTx.java @@ -7,12 +7,6 @@ */ package org.opendaylight.controller.sal.connect.netconf.sal.tx; -import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.CONFIG_SOURCE_RUNNING; -import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_DATA_QNAME; -import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_GET_CONFIG_QNAME; -import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_GET_QNAME; -import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.toFilterStructure; - import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; @@ -35,6 +29,14 @@ import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.concurrent.ExecutionException; + +import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.CONFIG_SOURCE_RUNNING; +import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_DATA_QNAME; +import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_GET_CONFIG_QNAME; +import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.NETCONF_GET_QNAME; +import static org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil.toFilterStructure; + public final class NetconfDeviceReadOnlyTx implements DOMDataReadOnlyTransaction { @@ -55,7 +57,7 @@ public final class NetconfDeviceReadOnlyTx implements DOMDataReadOnlyTransaction final ListenableFuture> future = rpc.invokeRpc(NETCONF_GET_CONFIG_QNAME, NetconfMessageTransformUtil.wrap(NETCONF_GET_CONFIG_QNAME, CONFIG_SOURCE_RUNNING, toFilterStructure(path))); - ListenableFuture>> transformedFuture = Futures.transform(future, new Function, Optional>>() { + final ListenableFuture>> transformedFuture = Futures.transform(future, new Function, Optional>>() { @Override public Optional> apply(final RpcResult result) { checkReadSuccess(result, path); @@ -97,7 +99,7 @@ public final class NetconfDeviceReadOnlyTx implements DOMDataReadOnlyTransaction final YangInstanceIdentifier path) { final ListenableFuture> future = rpc.invokeRpc(NETCONF_GET_QNAME, NetconfMessageTransformUtil.wrap(NETCONF_GET_QNAME, toFilterStructure(path))); - ListenableFuture>> transformedFuture = Futures.transform(future, new Function, Optional>>() { + final ListenableFuture>> transformedFuture = Futures.transform(future, new Function, Optional>>() { @Override public Optional> apply(final RpcResult result) { checkReadSuccess(result, path); @@ -136,6 +138,19 @@ public final class NetconfDeviceReadOnlyTx implements DOMDataReadOnlyTransaction throw new IllegalArgumentException(String.format("%s, Cannot read data %s for %s datastore, unknown datastore type", id, path, store)); } + @Override public CheckedFuture exists( + LogicalDatastoreType store, + YangInstanceIdentifier path) { + CheckedFuture>, ReadFailedException> + data = read(store, path); + + try { + return Futures.immediateCheckedFuture(data.get().isPresent()); + } catch (InterruptedException | ExecutionException e) { + return Futures.immediateFailedCheckedFuture(new ReadFailedException("Exists failed",e)); + } + } + static YangInstanceIdentifier toLegacyPath(final DataNormalizer normalizer, final YangInstanceIdentifier path, final RemoteDeviceId id) { try { return normalizer.toLegacy(path); diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadWriteTx.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadWriteTx.java index 3d2c3b9d44..11362a2f9b 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadWriteTx.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/sal/tx/NetconfDeviceReadWriteTx.java @@ -10,6 +10,7 @@ package org.opendaylight.controller.sal.connect.netconf.sal.tx; import com.google.common.base.Optional; import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.opendaylight.controller.md.sal.common.api.TransactionStatus; @@ -23,6 +24,8 @@ import org.opendaylight.yangtools.yang.common.RpcResult; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; +import java.util.concurrent.ExecutionException; + public class NetconfDeviceReadWriteTx implements DOMDataReadWriteTransaction { private final DOMDataReadTransaction delegateReadTx; @@ -69,6 +72,19 @@ public class NetconfDeviceReadWriteTx implements DOMDataReadWriteTransaction { return delegateReadTx.read(store, path); } + @Override public CheckedFuture exists( + LogicalDatastoreType store, + YangInstanceIdentifier path) { + CheckedFuture>, ReadFailedException> + data = read(store, path); + + try { + return Futures.immediateCheckedFuture(data.get().isPresent()); + } catch (InterruptedException | ExecutionException e) { + return Futures.immediateFailedCheckedFuture(new ReadFailedException("Exists failed",e)); + } + } + @Override public Object getIdentifier() { return this; diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfDeviceSchemaProviderFactory.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfDeviceSchemaProviderFactory.java deleted file mode 100644 index e7d64646ea..0000000000 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfDeviceSchemaProviderFactory.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Public License v1.0 which accompanies this distribution, - * and is available at http://www.eclipse.org/legal/epl-v10.html - */ -package org.opendaylight.controller.sal.connect.netconf.schema; - -import java.io.InputStream; -import java.util.Collection; -import java.util.List; -import java.util.Set; - -import org.opendaylight.controller.sal.connect.api.SchemaContextProviderFactory; -import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; -import org.opendaylight.yangtools.yang.common.QName; -import org.opendaylight.yangtools.yang.model.api.Module; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; -import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider; -import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; -import org.opendaylight.yangtools.yang.parser.impl.util.YangSourceContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Preconditions; - -public final class NetconfDeviceSchemaProviderFactory implements SchemaContextProviderFactory { - - private static final Logger logger = LoggerFactory.getLogger(NetconfDeviceSchemaProviderFactory.class); - - private final RemoteDeviceId id; - - public NetconfDeviceSchemaProviderFactory(final RemoteDeviceId id) { - this.id = id; - } - - @Override - public SchemaContextProvider createContextProvider(final Collection capabilities, final SchemaSourceProvider sourceProvider) { - - final YangSourceContext sourceContext = YangSourceContext.createFrom(capabilities, sourceProvider); - - if (sourceContext.getMissingSources().isEmpty() == false) { - logger.warn("{}: Sources for following models are missing {}", id, sourceContext.getMissingSources()); - } - - logger.debug("{}: Trying to create schema context from {}", id, sourceContext.getValidSources()); - final List modelsToParse = YangSourceContext.getValidInputStreams(sourceContext); - - Preconditions.checkState(sourceContext.getValidSources().isEmpty() == false, - "%s: Unable to create schema context, no sources provided by device", id); - try { - final SchemaContext schemaContext = tryToParseContext(modelsToParse); - logger.debug("{}: Schema context successfully created.", id); - return new NetconfSchemaContextProvider(schemaContext); - } catch (final RuntimeException e) { - logger.error("{}: Unable to create schema context, unexpected error", id, e); - throw new IllegalStateException(id + ": Unable to create schema context", e); - } - } - - private static SchemaContext tryToParseContext(final List modelsToParse) { - final YangParserImpl parser = new YangParserImpl(); - final Set models = parser.parseYangModelsFromStreams(modelsToParse); - return parser.resolveSchemaContext(models); - } - - private static final class NetconfSchemaContextProvider implements SchemaContextProvider { - private final SchemaContext schemaContext; - - public NetconfSchemaContextProvider(final SchemaContext schemaContext) { - this.schemaContext = schemaContext; - } - - @Override - public SchemaContext getSchemaContext() { - return schemaContext; - } - } -} diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaSourceProvider.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaSourceProvider.java deleted file mode 100644 index 44ff2ef985..0000000000 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaSourceProvider.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Public License v1.0 which accompanies this distribution, - * and is available at http://www.eclipse.org/legal/epl-v10.html - */ -package org.opendaylight.controller.sal.connect.netconf.schema; - -import org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil; -import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; -import org.opendaylight.controller.sal.core.api.RpcImplementation; -import org.opendaylight.yangtools.yang.common.QName; -import org.opendaylight.yangtools.yang.common.RpcResult; -import org.opendaylight.yangtools.yang.data.api.CompositeNode; -import org.opendaylight.yangtools.yang.data.api.SimpleNode; -import org.opendaylight.yangtools.yang.data.impl.ImmutableCompositeNode; -import org.opendaylight.yangtools.yang.data.impl.util.CompositeNodeBuilder; -import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Optional; -import com.google.common.base.Preconditions; - -public final class NetconfRemoteSchemaSourceProvider implements SchemaSourceProvider { - - public static final QName GET_SCHEMA_QNAME = QName.create(NetconfMessageTransformUtil.IETF_NETCONF_MONITORING, - "get-schema"); - public static final QName GET_DATA_QNAME = QName - .create(NetconfMessageTransformUtil.IETF_NETCONF_MONITORING, "data"); - - private static final Logger logger = LoggerFactory.getLogger(NetconfRemoteSchemaSourceProvider.class); - - private final RpcImplementation rpc; - private final RemoteDeviceId id; - - public NetconfRemoteSchemaSourceProvider(final RemoteDeviceId id, final RpcImplementation rpc) { - this.id = id; - this.rpc = Preconditions.checkNotNull(rpc); - } - - @Override - public Optional getSchemaSource(final String moduleName, final Optional revision) { - final ImmutableCompositeNode getSchemaRequest = createGetSchemaRequest(moduleName, revision); - - logger.trace("{}: Loading YANG schema source for {}:{}", id, moduleName, revision); - try { - final RpcResult schemaReply = rpc.invokeRpc(GET_SCHEMA_QNAME, getSchemaRequest).get(); - if (schemaReply.isSuccessful()) { - final Optional schemaBody = getSchemaFromRpc(id, schemaReply.getResult()); - if (schemaBody.isPresent()) { - logger.debug("{}: YANG Schema successfully retrieved for {}:{}", id, moduleName, revision); - return schemaBody; - } - } else { - logger.warn("{}: YANG schema was not successfully retrieved for {}:{}. Errors: {}", id, moduleName, - revision, schemaReply.getErrors()); - } - return Optional.absent(); - } catch (final InterruptedException e){ - Thread.currentThread().interrupt(); - throw new IllegalStateException(e); - } catch (final Exception e) { - logger.error("{}: YANG schema was not successfully retrieved for {}:{}", id, moduleName, revision, e); - throw new IllegalStateException(e); - } - } - - private ImmutableCompositeNode createGetSchemaRequest(final String moduleName, final Optional revision) { - final CompositeNodeBuilder request = ImmutableCompositeNode.builder(); - request.setQName(GET_SCHEMA_QNAME).addLeaf("identifier", moduleName); - if (revision.isPresent()) { - request.addLeaf("version", revision.get()); - } - request.addLeaf("format", "yang"); - return request.toInstance(); - } - - private static Optional getSchemaFromRpc(final RemoteDeviceId id, final CompositeNode result) { - if (result == null) { - return Optional.absent(); - } - final SimpleNode simpleNode = result.getFirstSimpleByName(GET_DATA_QNAME.withoutRevision()); - - Preconditions.checkNotNull(simpleNode, - "%s Unexpected response to get-schema, expected response with one child %s, but was %s", - id, GET_DATA_QNAME.withoutRevision(), result); - - final Object potential = simpleNode.getValue(); - return potential instanceof String ? Optional.of((String) potential) : Optional.absent(); - } -} diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaYangSourceProvider.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaYangSourceProvider.java new file mode 100644 index 0000000000..dc90fd3826 --- /dev/null +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/NetconfRemoteSchemaYangSourceProvider.java @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.sal.connect.netconf.schema; + +import com.google.common.base.Function; +import com.google.common.base.Objects; +import com.google.common.base.Optional; +import com.google.common.base.Preconditions; +import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.ExecutionException; +import org.apache.commons.io.IOUtils; +import org.opendaylight.controller.sal.connect.netconf.util.NetconfMessageTransformUtil; +import org.opendaylight.controller.sal.connect.util.RemoteDeviceId; +import org.opendaylight.controller.sal.core.api.RpcImplementation; +import org.opendaylight.yangtools.util.concurrent.ExceptionMapper; +import org.opendaylight.yangtools.yang.common.QName; +import org.opendaylight.yangtools.yang.common.RpcResult; +import org.opendaylight.yangtools.yang.data.api.CompositeNode; +import org.opendaylight.yangtools.yang.data.api.SimpleNode; +import org.opendaylight.yangtools.yang.data.impl.ImmutableCompositeNode; +import org.opendaylight.yangtools.yang.data.impl.util.CompositeNodeBuilder; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceException; +import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier; +import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public final class NetconfRemoteSchemaYangSourceProvider implements SchemaSourceProvider { + + public static final QName GET_SCHEMA_QNAME = QName.create(NetconfMessageTransformUtil.IETF_NETCONF_MONITORING,"get-schema"); + public static final QName GET_DATA_QNAME = QName.create(NetconfMessageTransformUtil.IETF_NETCONF_MONITORING, "data"); + + private static final Logger logger = LoggerFactory.getLogger(NetconfRemoteSchemaYangSourceProvider.class); + + private static final ExceptionMapper MAPPER = new ExceptionMapper( + "schemaDownload", SchemaSourceException.class) { + @Override + protected SchemaSourceException newWithCause(final String s, final Throwable throwable) { + return new SchemaSourceException(s, throwable); + } + }; + + private final RpcImplementation rpc; + private final RemoteDeviceId id; + + public NetconfRemoteSchemaYangSourceProvider(final RemoteDeviceId id, final RpcImplementation rpc) { + this.id = id; + this.rpc = Preconditions.checkNotNull(rpc); + } + + private ImmutableCompositeNode createGetSchemaRequest(final String moduleName, final Optional revision) { + final CompositeNodeBuilder request = ImmutableCompositeNode.builder(); + request.setQName(GET_SCHEMA_QNAME).addLeaf("identifier", moduleName); + if (revision.isPresent()) { + request.addLeaf("version", revision.get()); + } + request.addLeaf("format", "yang"); + return request.toInstance(); + } + + private static Optional getSchemaFromRpc(final RemoteDeviceId id, final CompositeNode result) { + if (result == null) { + return Optional.absent(); + } + final SimpleNode simpleNode = result.getFirstSimpleByName(GET_DATA_QNAME.withoutRevision()); + + Preconditions.checkNotNull(simpleNode, + "%s Unexpected response to get-schema, expected response with one child %s, but was %s", id, + GET_DATA_QNAME.withoutRevision(), result); + + final Object potential = simpleNode.getValue(); + return potential instanceof String ? Optional.of((String) potential) : Optional. absent(); + } + + @Override + public CheckedFuture getSource(final SourceIdentifier sourceIdentifier) { + final String moduleName = sourceIdentifier.getName(); + + // If formatted revision is SourceIdentifier.NOT_PRESENT_FORMATTED_REVISION, we have to omit it from request + final String formattedRevision = sourceIdentifier.getRevision().equals(SourceIdentifier.NOT_PRESENT_FORMATTED_REVISION) ? null : sourceIdentifier.getRevision(); + final Optional revision = Optional.fromNullable(formattedRevision); + final ImmutableCompositeNode getSchemaRequest = createGetSchemaRequest(moduleName, revision); + + logger.trace("{}: Loading YANG schema source for {}:{}", id, moduleName, revision); + + final ListenableFuture transformed = Futures.transform( + rpc.invokeRpc(GET_SCHEMA_QNAME, getSchemaRequest), + new ResultToYangSourceTransformer(id, sourceIdentifier, moduleName, revision)); + + // FIXME remove this get, it is only present to wait until source is retrieved + // (goal is to limit concurrent schema download, since NetconfDevice listener does not handle concurrent messages properly) + try { + logger.trace("{}: Blocking for {}", id, sourceIdentifier); + transformed.get(); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } catch (final ExecutionException e) { + throw new IllegalStateException(id + ": Failed while getting source: " + sourceIdentifier, e); + } + + return Futures.makeChecked(transformed, MAPPER); + } + + /** + * Transform composite node to string schema representation and then to ASTSchemaSource + */ + private static final class ResultToYangSourceTransformer implements + Function, YangTextSchemaSource> { + + private final RemoteDeviceId id; + private final SourceIdentifier sourceIdentifier; + private final String moduleName; + private final Optional revision; + + public ResultToYangSourceTransformer(final RemoteDeviceId id, final SourceIdentifier sourceIdentifier, + final String moduleName, final Optional revision) { + this.id = id; + this.sourceIdentifier = sourceIdentifier; + this.moduleName = moduleName; + this.revision = revision; + } + + @Override + public YangTextSchemaSource apply(final RpcResult input) { + + if (input.isSuccessful()) { + + final Optional schemaString = getSchemaFromRpc(id, input.getResult()); + + Preconditions.checkState(schemaString.isPresent(), + "%s: Unexpected response to get-schema, schema not present in message for: %s", id, sourceIdentifier); + + logger.debug("{}: YANG Schema successfully retrieved for {}:{}", id, moduleName, revision); + + return new NetconfYangTextSchemaSource(id, sourceIdentifier, schemaString); + } + + logger.warn("{}: YANG schema was not successfully retrieved for {}. Errors: {}", id, sourceIdentifier, + input.getErrors()); + + throw new IllegalStateException(String.format( + "%s: YANG schema was not successfully retrieved for %s. Errors: %s", id, sourceIdentifier, + input.getErrors())); + + } + + } + + private static class NetconfYangTextSchemaSource extends YangTextSchemaSource { + private final RemoteDeviceId id; + private final Optional schemaString; + + public NetconfYangTextSchemaSource(final RemoteDeviceId id, final SourceIdentifier sId, final Optional schemaString) { + super(sId); + this.id = id; + this.schemaString = schemaString; + } + + @Override + protected Objects.ToStringHelper addToStringAttributes(final Objects.ToStringHelper toStringHelper) { + return toStringHelper.add("device", id); + } + + @Override + public InputStream openStream() throws IOException { + return IOUtils.toInputStream(schemaString.get()); + } + } +} diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java index 1e3cf4b6fc..893a45aaa2 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java @@ -49,6 +49,8 @@ import org.w3c.dom.Element; public class NetconfMessageTransformUtil { + public static final String MESSAGE_ID_ATTR = "message-id"; + private NetconfMessageTransformUtil() {} public static final QName IETF_NETCONF_MONITORING = QName.create(NetconfState.QNAME, "ietf-netconf-monitoring"); @@ -125,8 +127,8 @@ public class NetconfMessageTransformUtil { public static void checkValidReply(final NetconfMessage input, final NetconfMessage output) throws NetconfDocumentedException { - final String inputMsgId = input.getDocument().getDocumentElement().getAttribute("message-id"); - final String outputMsgId = output.getDocument().getDocumentElement().getAttribute("message-id"); + final String inputMsgId = input.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR); + final String outputMsgId = output.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR); if(inputMsgId.equals(outputMsgId) == false) { Map errorInfo = ImmutableMap.builder() diff --git a/opendaylight/md-sal/sal-netconf-connector/src/test/java/org/opendaylight/controller/sal/connect/netconf/NetconfDeviceTest.java b/opendaylight/md-sal/sal-netconf-connector/src/test/java/org/opendaylight/controller/sal/connect/netconf/NetconfDeviceTest.java index fa488dadd3..218ec0be8d 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/test/java/org/opendaylight/controller/sal/connect/netconf/NetconfDeviceTest.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/test/java/org/opendaylight/controller/sal/connect/netconf/NetconfDeviceTest.java @@ -8,14 +8,18 @@ package org.opendaylight.controller.sal.connect.netconf; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyCollectionOf; import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import com.google.common.base.Optional; +import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; import java.io.InputStream; @@ -28,12 +32,13 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.junit.Test; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.opendaylight.controller.netconf.api.NetconfMessage; import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants; import org.opendaylight.controller.sal.connect.api.MessageTransformer; import org.opendaylight.controller.sal.connect.api.RemoteDeviceCommunicator; import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; -import org.opendaylight.controller.sal.connect.api.SchemaContextProviderFactory; import org.opendaylight.controller.sal.connect.api.SchemaSourceProviderFactory; import org.opendaylight.controller.sal.connect.netconf.listener.NetconfSessionCapabilities; import org.opendaylight.controller.sal.connect.netconf.sal.NetconfDeviceRpc; @@ -45,8 +50,15 @@ import org.opendaylight.yangtools.yang.common.RpcResult; import org.opendaylight.yangtools.yang.common.RpcResultBuilder; import org.opendaylight.yangtools.yang.data.api.CompositeNode; import org.opendaylight.yangtools.yang.model.api.Module; +import org.opendaylight.yangtools.yang.model.api.ModuleImport; import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; +import org.opendaylight.yangtools.yang.model.repo.api.MissingSchemaSourceException; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaResolutionException; +import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier; +import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceRegistration; +import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceRegistry; import org.opendaylight.yangtools.yang.model.util.repo.SchemaSourceProvider; import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; @@ -70,7 +82,13 @@ public class NetconfDeviceTest { public static final String TEST_NAMESPACE = "test:namespace"; public static final String TEST_MODULE = "test-module"; public static final String TEST_REVISION = "2013-07-22"; - private NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver = new NetconfStateSchemas.NetconfStateSchemasResolver() { + public static final SourceIdentifier TEST_SID = new SourceIdentifier(TEST_MODULE, Optional.of(TEST_REVISION)); + public static final String TEST_CAPABILITY = TEST_NAMESPACE + "?module=" + TEST_MODULE + "&revision=" + TEST_REVISION; + + public static final SourceIdentifier TEST_SID2 = new SourceIdentifier(TEST_MODULE + "2", Optional.of(TEST_REVISION)); + public static final String TEST_CAPABILITY2 = TEST_NAMESPACE + "?module=" + TEST_MODULE + "2" + "&revision=" + TEST_REVISION; + + private static final NetconfStateSchemas.NetconfStateSchemasResolver stateSchemasResolver = new NetconfStateSchemas.NetconfStateSchemasResolver() { @Override public NetconfStateSchemas resolve(final NetconfDeviceRpc deviceRpc, final NetconfSessionCapabilities remoteSessionCapabilities, final RemoteDeviceId id) { @@ -79,14 +97,71 @@ public class NetconfDeviceTest { }; @Test - public void testNetconfDeviceWithoutMonitoring() throws Exception { + public void testNetconfDeviceFailFirstSchemaFailSecondEmpty() throws Exception { + final ArrayList capList = Lists.newArrayList(TEST_CAPABILITY); + final RemoteDeviceHandler facade = getFacade(); final RemoteDeviceCommunicator listener = getListener(); - final NetconfDevice device = new NetconfDevice(getId(), facade, getExecutor(), getMessageTransformer(), getSchemaContextProviderFactory(), getSourceProviderFactory(), stateSchemasResolver); - device.onRemoteSessionUp(getSessionCaps(false, Collections.emptyList()), listener); + final SchemaContextFactory schemaFactory = getSchemaFactory(); + + // Make fallback attempt to fail due to empty resolved sources + final SchemaResolutionException schemaResolutionException + = new SchemaResolutionException("fail first", + Collections.emptyList(), HashMultimap.create()); + doReturn(Futures.immediateFailedCheckedFuture( + schemaResolutionException)) + .when(schemaFactory).createSchemaContext(anyCollectionOf(SourceIdentifier.class)); + + final NetconfDevice.SchemaResourcesDTO schemaResourcesDTO + = new NetconfDevice.SchemaResourcesDTO(getSchemaRegistry(), schemaFactory, stateSchemasResolver); + final NetconfDevice device = new NetconfDevice(schemaResourcesDTO, getId(), facade, getExecutor(), getMessageTransformer()); + // Monitoring not supported + final NetconfSessionCapabilities sessionCaps = getSessionCaps(false, capList); + device.onRemoteSessionUp(sessionCaps, listener); Mockito.verify(facade, Mockito.timeout(5000)).onDeviceDisconnected(); + Mockito.verify(listener, Mockito.timeout(5000)).close(); + Mockito.verify(schemaFactory, times(1)).createSchemaContext(anyCollectionOf(SourceIdentifier.class)); + } + + @Test + public void testNetconfDeviceMissingSource() throws Exception { + final RemoteDeviceHandler facade = getFacade(); + final RemoteDeviceCommunicator listener = getListener(); + + final SchemaContextFactory schemaFactory = getSchemaFactory(); + + // Make fallback attempt to fail due to empty resolved sources + final MissingSchemaSourceException schemaResolutionException = new MissingSchemaSourceException("fail first", TEST_SID); + doAnswer(new Answer() { + @Override + public Object answer(final InvocationOnMock invocation) throws Throwable { + if(((Collection) invocation.getArguments()[0]).size() == 2) { + return Futures.immediateFailedCheckedFuture(schemaResolutionException); + } else { + return Futures.immediateCheckedFuture(getSchema()); + } + } + }).when(schemaFactory).createSchemaContext(anyCollectionOf(SourceIdentifier.class)); + + final NetconfDevice.SchemaResourcesDTO schemaResourcesDTO + = new NetconfDevice.SchemaResourcesDTO(getSchemaRegistry(), schemaFactory, stateSchemasResolver); + final NetconfDevice device = new NetconfDevice(schemaResourcesDTO, getId(), facade, getExecutor(), getMessageTransformer()); + // Monitoring supported + final NetconfSessionCapabilities sessionCaps = getSessionCaps(true, Lists.newArrayList(TEST_CAPABILITY, TEST_CAPABILITY2)); + device.onRemoteSessionUp(sessionCaps, listener); + + Mockito.verify(facade, Mockito.timeout(5000)).onDeviceConnected(any(SchemaContext.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); + Mockito.verify(schemaFactory, times(2)).createSchemaContext(anyCollectionOf(SourceIdentifier.class)); + } + + private SchemaSourceRegistry getSchemaRegistry() { + final SchemaSourceRegistry mock = mock(SchemaSourceRegistry.class); + final SchemaSourceRegistration mockReg = mock(SchemaSourceRegistration.class); + doNothing().when(mockReg).close(); + doReturn(mockReg).when(mock).registerSchemaSource(any(org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceProvider.class), any(PotentialSchemaSource.class)); + return mock; } @Test @@ -95,7 +170,10 @@ public class NetconfDeviceTest { final RemoteDeviceCommunicator listener = getListener(); final MessageTransformer messageTransformer = getMessageTransformer(); - final NetconfDevice device = new NetconfDevice(getId(), facade, getExecutor(), messageTransformer, getSchemaContextProviderFactory(), getSourceProviderFactory(), stateSchemasResolver); + + final NetconfDevice.SchemaResourcesDTO schemaResourcesDTO + = new NetconfDevice.SchemaResourcesDTO(getSchemaRegistry(), getSchemaFactory(), stateSchemasResolver); + final NetconfDevice device = new NetconfDevice(schemaResourcesDTO, getId(), facade, getExecutor(), messageTransformer); device.onNotification(netconfMessage); device.onNotification(netconfMessage); @@ -103,7 +181,7 @@ public class NetconfDeviceTest { verify(facade, times(0)).onNotification(any(CompositeNode.class)); final NetconfSessionCapabilities sessionCaps = getSessionCaps(true, - Lists.newArrayList(TEST_NAMESPACE + "?module=" + TEST_MODULE + "&revision=" + TEST_REVISION)); + Lists.newArrayList(TEST_CAPABILITY)); device.onRemoteSessionUp(sessionCaps, listener); @@ -120,40 +198,34 @@ public class NetconfDeviceTest { final RemoteDeviceHandler facade = getFacade(); final RemoteDeviceCommunicator listener = getListener(); - final SchemaContextProviderFactory schemaContextProviderFactory = getSchemaContextProviderFactory(); - final SchemaSourceProviderFactory sourceProviderFactory = getSourceProviderFactory(); + final SchemaContextFactory schemaContextProviderFactory = getSchemaFactory(); final MessageTransformer messageTransformer = getMessageTransformer(); - final NetconfDevice device = new NetconfDevice(getId(), facade, getExecutor(), messageTransformer, schemaContextProviderFactory, sourceProviderFactory, stateSchemasResolver); + final NetconfDevice.SchemaResourcesDTO schemaResourcesDTO + = new NetconfDevice.SchemaResourcesDTO(getSchemaRegistry(), schemaContextProviderFactory, stateSchemasResolver); + final NetconfDevice device = new NetconfDevice(schemaResourcesDTO, getId(), facade, getExecutor(), messageTransformer); final NetconfSessionCapabilities sessionCaps = getSessionCaps(true, Lists.newArrayList(TEST_NAMESPACE + "?module=" + TEST_MODULE + "&revision=" + TEST_REVISION)); device.onRemoteSessionUp(sessionCaps, listener); - verify(sourceProviderFactory, timeout(5000)).createSourceProvider(any(RpcImplementation.class)); - verify(schemaContextProviderFactory, timeout(5000)).createContextProvider(any(Collection.class), any(SchemaSourceProvider.class)); + verify(schemaContextProviderFactory, timeout(5000)).createSchemaContext(any(Collection.class)); verify(messageTransformer, timeout(5000)).onGlobalContextUpdated(any(SchemaContext.class)); - verify(facade, timeout(5000)).onDeviceConnected(any(SchemaContextProvider.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); + verify(facade, timeout(5000)).onDeviceConnected(any(SchemaContext.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); device.onRemoteSessionDown(); verify(facade, timeout(5000)).onDeviceDisconnected(); device.onRemoteSessionUp(sessionCaps, listener); - verify(sourceProviderFactory, timeout(5000).times(2)).createSourceProvider(any(RpcImplementation.class)); - verify(schemaContextProviderFactory, timeout(5000).times(2)).createContextProvider(any(Collection.class), any(SchemaSourceProvider.class)); + verify(schemaContextProviderFactory, timeout(5000).times(2)).createSchemaContext(any(Collection.class)); verify(messageTransformer, timeout(5000).times(2)).onGlobalContextUpdated(any(SchemaContext.class)); - verify(facade, timeout(5000).times(2)).onDeviceConnected(any(SchemaContextProvider.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); + verify(facade, timeout(5000).times(2)).onDeviceConnected(any(SchemaContext.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); } - private SchemaContextProviderFactory getSchemaContextProviderFactory() { - final SchemaContextProviderFactory schemaContextProviderFactory = mockClass(SchemaContextProviderFactory.class); - doReturn(new SchemaContextProvider() { - @Override - public SchemaContext getSchemaContext() { - return getSchema(); - } - }).when(schemaContextProviderFactory).createContextProvider(any(Collection.class), any(SchemaSourceProvider.class)); - return schemaContextProviderFactory; + private SchemaContextFactory getSchemaFactory() { + final SchemaContextFactory schemaFactory = mockClass(SchemaContextFactory.class); + doReturn(Futures.immediateCheckedFuture(getSchema())).when(schemaFactory).createSchemaContext(any(Collection.class)); + return schemaFactory; } public static SchemaContext getSchema() { @@ -167,7 +239,7 @@ public class NetconfDeviceTest { private RemoteDeviceHandler getFacade() throws Exception { final RemoteDeviceHandler remoteDeviceHandler = mockCloseableClass(RemoteDeviceHandler.class); - doNothing().when(remoteDeviceHandler).onDeviceConnected(any(SchemaContextProvider.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); + doNothing().when(remoteDeviceHandler).onDeviceConnected(any(SchemaContext.class), any(NetconfSessionCapabilities.class), any(RpcImplementation.class)); doNothing().when(remoteDeviceHandler).onDeviceDisconnected(); doNothing().when(remoteDeviceHandler).onNotification(any(CompositeNode.class)); return remoteDeviceHandler; @@ -190,7 +262,7 @@ public class NetconfDeviceTest { } private static T mockClass(final Class remoteDeviceHandlerClass) { - final T mock = Mockito.mock(remoteDeviceHandlerClass); + final T mock = mock(remoteDeviceHandlerClass); Mockito.doReturn(remoteDeviceHandlerClass.getSimpleName()).when(mock).toString(); return mock; } diff --git a/opendaylight/md-sal/sal-remoterpc-connector/pom.xml b/opendaylight/md-sal/sal-remoterpc-connector/pom.xml index a2bee8ffee..674c5bf5a5 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/pom.xml +++ b/opendaylight/md-sal/sal-remoterpc-connector/pom.xml @@ -38,6 +38,11 @@ com.typesafe.akka akka-osgi_${scala.version} + + + com.typesafe.akka + akka-slf4j_${scala.version} + @@ -119,7 +124,7 @@ test - + org.slf4j slf4j-simple ${slf4j.version} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcManager.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcManager.java index 5c56455bd0..514a2f141d 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcManager.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcManager.java @@ -17,7 +17,7 @@ import akka.japi.Creator; import akka.japi.Function; import org.opendaylight.controller.remote.rpc.messages.UpdateSchemaContext; import org.opendaylight.controller.remote.rpc.registry.ClusterWrapper; -import org.opendaylight.controller.remote.rpc.registry.RpcRegistry; +import org.opendaylight.controller.remote.rpc.registry.RpcRegistryOld; import org.opendaylight.controller.sal.core.api.Broker; import org.opendaylight.controller.sal.core.api.RpcProvisionRegistry; import org.opendaylight.yangtools.yang.common.QName; @@ -72,7 +72,7 @@ public class RpcManager extends AbstractUntypedActor { private void createRpcActors() { LOG.debug("Create rpc registry and broker actors"); - rpcRegistry = getContext().actorOf(RpcRegistry.props(clusterWrapper), ActorConstants.RPC_REGISTRY); + rpcRegistry = getContext().actorOf(RpcRegistryOld.props(clusterWrapper), ActorConstants.RPC_REGISTRY); rpcBroker = getContext().actorOf(RpcBroker.props(brokerSession, rpcRegistry, schemaContext), ActorConstants.RPC_BROKER); } diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTable.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTable.java index 5e19653a22..d21d05d7fe 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTable.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTable.java @@ -5,167 +5,81 @@ * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ - package org.opendaylight.controller.remote.rpc.registry; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableSet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -public class RoutingTable { - - private final Logger LOG = LoggerFactory.getLogger(RoutingTable.class); +import akka.actor.ActorRef; +import akka.japi.Option; +import akka.japi.Pair; +import org.opendaylight.controller.remote.rpc.registry.gossip.Copier; +import org.opendaylight.controller.sal.connector.api.RpcRouter; - private ConcurrentMap globalRpcMap = new ConcurrentHashMap<>(); - private ConcurrentMap> routedRpcMap = new ConcurrentHashMap<>(); +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; - public ConcurrentMap getGlobalRpcMap() { - return globalRpcMap; - } +public class RoutingTable implements Copier, Serializable { - public ConcurrentMap> getRoutedRpcMap() { - return routedRpcMap; - } + private Map, Long> table = new HashMap<>(); + private ActorRef router; - public R getGlobalRoute(final I routeId) { - Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!"); - return globalRpcMap.get(routeId); - } + @Override + public RoutingTable copy() { + RoutingTable copy = new RoutingTable(); + copy.setTable(new HashMap<>(table)); + copy.setRouter(this.getRouter()); - public void addGlobalRoute(final I routeId, final R route) { - Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!"); - Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!"); - LOG.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route); - if(globalRpcMap.putIfAbsent(routeId, route) != null) { - LOG.debug("A route already exist for route id [{}] ", routeId); + return copy; } - } - public void removeGlobalRoute(final I routeId) { - Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!"); - LOG.debug("removeGlobalRoute: removing a new route with id [{}]", routeId); - globalRpcMap.remove(routeId); - } + public Option> getRouterFor(RpcRouter.RouteIdentifier routeId){ + Long updatedTime = table.get(routeId); - public Set getRoutedRpc(final I routeId) { - Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!"); - Set routes = routedRpcMap.get(routeId); - - if (routes == null) { - return Collections.emptySet(); + if (updatedTime == null || router == null) + return Option.none(); + else + return Option.option(new Pair<>(router, updatedTime)); } - return ImmutableSet.copyOf(routes); - } - - public R getLastAddedRoutedRpc(final I routeId) { - - Set routes = getRoutedRpc(routeId); - - if (routes.isEmpty()) { - return null; + public void addRoute(RpcRouter.RouteIdentifier routeId){ + table.put(routeId, System.currentTimeMillis()); } - R route = null; - Iterator iter = routes.iterator(); - while (iter.hasNext()) { - route = iter.next(); + public void removeRoute(RpcRouter.RouteIdentifier routeId){ + table.remove(routeId); } - return route; - } - - public void addRoutedRpc(final I routeId, final R route) { - Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null"); - Preconditions.checkNotNull(route, "addRoute: route cannot be null"); - LOG.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route); - threadSafeAdd(routeId, route); - } - - public void addRoutedRpcs(final Set routeIds, final R route) { - Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null"); - for (I routeId : routeIds){ - addRoutedRpc(routeId, route); + public Boolean contains(RpcRouter.RouteIdentifier routeId){ + return table.containsKey(routeId); } - } - public void removeRoute(final I routeId, final R route) { - Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!"); - Preconditions.checkNotNull(route, "removeRoute: route cannot be null!"); - - LinkedHashSet routes = routedRpcMap.get(routeId); - if (routes == null) { - return; + public Boolean isEmpty(){ + return table.isEmpty(); } - LOG.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route); - threadSafeRemove(routeId, route); - } - - public void removeRoutes(final Set routeIds, final R route) { - Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null"); - for (I routeId : routeIds){ - removeRoute(routeId, route); + /// + /// Getter, Setters + /// + //TODO: Remove public + public Map, Long> getTable() { + return table; } - } - - /** - * This method guarantees that no 2 thread over write each other's changes. - * Just so that we dont end up in infinite loop, it tries for 100 times then throw - */ - private void threadSafeAdd(final I routeId, final R route) { - for (int i=0;i<100;i++){ - - LinkedHashSet updatedRoutes = new LinkedHashSet<>(); - updatedRoutes.add(route); - LinkedHashSet oldRoutes = routedRpcMap.putIfAbsent(routeId, updatedRoutes); - if (oldRoutes == null) { - return; - } + void setTable(Map, Long> table) { + this.table = table; + } - updatedRoutes = new LinkedHashSet<>(oldRoutes); - updatedRoutes.add(route); + public ActorRef getRouter() { + return router; + } - if (routedRpcMap.replace(routeId, oldRoutes, updatedRoutes)) { - return; - } + public void setRouter(ActorRef router) { + this.router = router; } - //the method did not already return means it failed to add route in 100 attempts - throw new IllegalStateException("Failed to add route [" + routeId + "]"); - } - - /** - * This method guarantees that no 2 thread over write each other's changes. - * Just so that we dont end up in infinite loop, it tries for 100 times then throw - */ - private void threadSafeRemove(final I routeId, final R route) { - LinkedHashSet updatedRoutes = null; - for (int i=0;i<100;i++){ - LinkedHashSet oldRoutes = routedRpcMap.get(routeId); - - // if route to be deleted is the only entry in the set then remove routeId from the cache - if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){ - routedRpcMap.remove(routeId); - return; - } - - // if there are multiple routes for this routeId, remove the route to be deleted only from the set. - updatedRoutes = new LinkedHashSet<>(oldRoutes); - updatedRoutes.remove(route); - if (routedRpcMap.replace(routeId, oldRoutes, updatedRoutes)) { - return; - } + @Override + public String toString() { + return "RoutingTable{" + + "table=" + table + + ", router=" + router + + '}'; } - //the method did not already return means it failed to remove route in 100 attempts - throw new IllegalStateException("Failed to remove route [" + routeId + "]"); - } } diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOld.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOld.java new file mode 100644 index 0000000000..5951776f2c --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOld.java @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.remote.rpc.registry; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableSet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +public class RoutingTableOld { + + private final Logger LOG = LoggerFactory.getLogger(RoutingTableOld.class); + + private ConcurrentMap globalRpcMap = new ConcurrentHashMap<>(); + private ConcurrentMap> routedRpcMap = new ConcurrentHashMap<>(); + + public ConcurrentMap getGlobalRpcMap() { + return globalRpcMap; + } + + public ConcurrentMap> getRoutedRpcMap() { + return routedRpcMap; + } + + public R getGlobalRoute(final I routeId) { + Preconditions.checkNotNull(routeId, "getGlobalRoute: routeId cannot be null!"); + return globalRpcMap.get(routeId); + } + + public void addGlobalRoute(final I routeId, final R route) { + Preconditions.checkNotNull(routeId, "addGlobalRoute: routeId cannot be null!"); + Preconditions.checkNotNull(route, "addGlobalRoute: route cannot be null!"); + LOG.debug("addGlobalRoute: adding a new route with id[{}] and value [{}]", routeId, route); + if(globalRpcMap.putIfAbsent(routeId, route) != null) { + LOG.debug("A route already exist for route id [{}] ", routeId); + } + } + + public void removeGlobalRoute(final I routeId) { + Preconditions.checkNotNull(routeId, "removeGlobalRoute: routeId cannot be null!"); + LOG.debug("removeGlobalRoute: removing a new route with id [{}]", routeId); + globalRpcMap.remove(routeId); + } + + public Set getRoutedRpc(final I routeId) { + Preconditions.checkNotNull(routeId, "getRoutes: routeId cannot be null!"); + Set routes = routedRpcMap.get(routeId); + + if (routes == null) { + return Collections.emptySet(); + } + + return ImmutableSet.copyOf(routes); + } + + public R getLastAddedRoutedRpc(final I routeId) { + + Set routes = getRoutedRpc(routeId); + + if (routes.isEmpty()) { + return null; + } + + R route = null; + Iterator iter = routes.iterator(); + while (iter.hasNext()) { + route = iter.next(); + } + + return route; + } + + public void addRoutedRpc(final I routeId, final R route) { + Preconditions.checkNotNull(routeId, "addRoute: routeId cannot be null"); + Preconditions.checkNotNull(route, "addRoute: route cannot be null"); + LOG.debug("addRoute: adding a route with k/v [{}/{}]", routeId, route); + threadSafeAdd(routeId, route); + } + + public void addRoutedRpcs(final Set routeIds, final R route) { + Preconditions.checkNotNull(routeIds, "addRoutes: routeIds must not be null"); + for (I routeId : routeIds){ + addRoutedRpc(routeId, route); + } + } + + public void removeRoute(final I routeId, final R route) { + Preconditions.checkNotNull(routeId, "removeRoute: routeId cannot be null!"); + Preconditions.checkNotNull(route, "removeRoute: route cannot be null!"); + + LinkedHashSet routes = routedRpcMap.get(routeId); + if (routes == null) { + return; + } + LOG.debug("removeRoute: removing a new route with k/v [{}/{}]", routeId, route); + threadSafeRemove(routeId, route); + } + + public void removeRoutes(final Set routeIds, final R route) { + Preconditions.checkNotNull(routeIds, "removeRoutes: routeIds must not be null"); + for (I routeId : routeIds){ + removeRoute(routeId, route); + } + } + + /** + * This method guarantees that no 2 thread over write each other's changes. + * Just so that we dont end up in infinite loop, it tries for 100 times then throw + */ + private void threadSafeAdd(final I routeId, final R route) { + + for (int i=0;i<100;i++){ + + LinkedHashSet updatedRoutes = new LinkedHashSet<>(); + updatedRoutes.add(route); + LinkedHashSet oldRoutes = routedRpcMap.putIfAbsent(routeId, updatedRoutes); + if (oldRoutes == null) { + return; + } + + updatedRoutes = new LinkedHashSet<>(oldRoutes); + updatedRoutes.add(route); + + if (routedRpcMap.replace(routeId, oldRoutes, updatedRoutes)) { + return; + } + } + //the method did not already return means it failed to add route in 100 attempts + throw new IllegalStateException("Failed to add route [" + routeId + "]"); + } + + /** + * This method guarantees that no 2 thread over write each other's changes. + * Just so that we dont end up in infinite loop, it tries for 100 times then throw + */ + private void threadSafeRemove(final I routeId, final R route) { + LinkedHashSet updatedRoutes = null; + for (int i=0;i<100;i++){ + LinkedHashSet oldRoutes = routedRpcMap.get(routeId); + + // if route to be deleted is the only entry in the set then remove routeId from the cache + if ((oldRoutes.size() == 1) && oldRoutes.contains(route)){ + routedRpcMap.remove(routeId); + return; + } + + // if there are multiple routes for this routeId, remove the route to be deleted only from the set. + updatedRoutes = new LinkedHashSet<>(oldRoutes); + updatedRoutes.remove(route); + if (routedRpcMap.replace(routeId, oldRoutes, updatedRoutes)) { + return; + } + + } + //the method did not already return means it failed to remove route in 100 attempts + throw new IllegalStateException("Failed to remove route [" + routeId + "]"); + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistry.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistry.java index e36060cc13..e2ebcb2b25 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistry.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistry.java @@ -7,197 +7,386 @@ */ package org.opendaylight.controller.remote.rpc.registry; -import akka.actor.ActorSelection; +import akka.actor.ActorRef; import akka.actor.Address; import akka.actor.Props; -import akka.cluster.ClusterEvent; -import akka.cluster.Member; -import akka.japi.Creator; -import org.opendaylight.controller.remote.rpc.AbstractUntypedActor; -import org.opendaylight.controller.remote.rpc.ActorConstants; -import org.opendaylight.controller.remote.rpc.messages.AddRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.AddRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpcReply; -import org.opendaylight.controller.remote.rpc.messages.GetRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRpcReply; -import org.opendaylight.controller.remote.rpc.messages.RemoveRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.RemoveRpc; -import org.opendaylight.controller.remote.rpc.messages.RoutingTableData; +import akka.actor.UntypedActor; +import akka.dispatch.Mapper; +import akka.event.Logging; +import akka.event.LoggingAdapter; +import akka.japi.Option; +import akka.japi.Pair; +import akka.pattern.Patterns; +import com.google.common.base.Preconditions; +import org.opendaylight.controller.remote.rpc.registry.gossip.Bucket; +import org.opendaylight.controller.remote.rpc.registry.gossip.BucketStore; import org.opendaylight.controller.sal.connector.api.RpcRouter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import scala.collection.JavaConversions; +import scala.concurrent.Future; -import java.util.LinkedHashSet; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; + +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.AddOrUpdateRoutes; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.RemoveRoutes; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRouters; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetAllBuckets; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetAllBucketsReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetLocalBucket; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetLocalBucketReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.UpdateBucket; /** - * This Actor maintains the routing table state and sync it with other nodes in the cluster. - * - * A scheduler runs after an interval of time, which pick a random member from the cluster - * and send the current state of routing table to the member. - * - * when a message of routing table data is received, it gets merged with the local routing table - * to keep the latest data. + * Registry to look up cluster nodes that have registered for a given rpc. + *

+ * It uses {@link org.opendaylight.controller.remote.rpc.registry.gossip.BucketStore} to maintain this + * cluster wide information. */ +public class RpcRegistry extends UntypedActor { + + final LoggingAdapter log = Logging.getLogger(getContext().system(), this); + + /** + * Store to keep the registry. Bucket store sync's it across nodes in the cluster + */ + private ActorRef bucketStore; -public class RpcRegistry extends AbstractUntypedActor { - - private static final Logger LOG = LoggerFactory.getLogger(RpcRegistry.class); - private RoutingTable, String> routingTable; - private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); - private final ClusterWrapper clusterWrapper; - private final ScheduledFuture syncScheduler; - - private RpcRegistry(ClusterWrapper clusterWrapper){ - this.routingTable = new RoutingTable<>(); - this.clusterWrapper = clusterWrapper; - this.syncScheduler = scheduler.scheduleAtFixedRate(new SendRoutingTable(), 10, 10, TimeUnit.SECONDS); - } - - public static Props props(final ClusterWrapper clusterWrapper){ - return Props.create(new Creator(){ - - @Override - public RpcRegistry create() throws Exception { - return new RpcRegistry(clusterWrapper); - } - }); - } - - @Override - protected void handleReceive(Object message) throws Exception { - LOG.debug("Received message {}", message); - if(message instanceof RoutingTableData) { - syncRoutingTable((RoutingTableData) message); - } else if(message instanceof GetRoutedRpc) { - getRoutedRpc((GetRoutedRpc) message); - } else if(message instanceof GetRpc) { - getRpc((GetRpc) message); - } else if(message instanceof AddRpc) { - addRpc((AddRpc) message); - } else if(message instanceof RemoveRpc) { - removeRpc((RemoveRpc) message); - } else if(message instanceof AddRoutedRpc) { - addRoutedRpc((AddRoutedRpc) message); - } else if(message instanceof RemoveRoutedRpc) { - removeRoutedRpc((RemoveRoutedRpc) message); + /** + * Rpc broker that would use the registry to route requests. + */ + private ActorRef localRouter; + + public RpcRegistry() { + bucketStore = getContext().actorOf(Props.create(BucketStore.class), "store"); + } + + public RpcRegistry(ActorRef bucketStore) { + this.bucketStore = bucketStore; } - } - private void getRoutedRpc(GetRoutedRpc rpcMsg){ - LOG.debug("Get latest routed Rpc location from routing table {}", rpcMsg); - String remoteActorPath = routingTable.getLastAddedRoutedRpc(rpcMsg.getRouteId()); - GetRoutedRpcReply routedRpcReply = new GetRoutedRpcReply(remoteActorPath); + @Override + public void onReceive(Object message) throws Exception { + + log.debug("Received message: message [{}]", message); - getSender().tell(routedRpcReply, self()); - } + //TODO: if sender is remote, reject message - private void getRpc(GetRpc rpcMsg) { - LOG.debug("Get global Rpc location from routing table {}", rpcMsg); - String remoteActorPath = routingTable.getGlobalRoute(rpcMsg.getRouteId()); - GetRpcReply rpcReply = new GetRpcReply(remoteActorPath); + if (message instanceof SetLocalRouter) + receiveSetLocalRouter((SetLocalRouter) message); - getSender().tell(rpcReply, self()); - } + if (message instanceof AddOrUpdateRoutes) + receiveAddRoutes((AddOrUpdateRoutes) message); - private void addRpc(AddRpc rpcMsg) { - LOG.debug("Add Rpc to routing table {}", rpcMsg); - routingTable.addGlobalRoute(rpcMsg.getRouteId(), rpcMsg.getActorPath()); + else if (message instanceof RemoveRoutes) + receiveRemoveRoutes((RemoveRoutes) message); - getSender().tell("Success", self()); - } + else if (message instanceof Messages.FindRouters) + receiveGetRouter((FindRouters) message); - private void removeRpc(RemoveRpc rpcMsg) { - LOG.debug("Removing Rpc to routing table {}", rpcMsg); - routingTable.removeGlobalRoute(rpcMsg.getRouteId()); + else + unhandled(message); + } + + /** + * Register's rpc broker + * + * @param message contains {@link akka.actor.ActorRef} for rpc broker + */ + private void receiveSetLocalRouter(SetLocalRouter message) { + localRouter = message.getRouter(); + } - getSender().tell("Success", self()); - } + /** + * @param msg + */ + private void receiveAddRoutes(AddOrUpdateRoutes msg) { - private void addRoutedRpc(AddRoutedRpc rpcMsg) { - routingTable.addRoutedRpcs(rpcMsg.getAnnouncements(), rpcMsg.getActorPath()); - getSender().tell("Success", self()); - } + Preconditions.checkState(localRouter != null, "Router must be set first"); + + Future futureReply = Patterns.ask(bucketStore, new GetLocalBucket(), 1000); + futureReply.map(getMapperToAddRoutes(msg.getRouteIdentifiers()), getContext().dispatcher()); + } - private void removeRoutedRpc(RemoveRoutedRpc rpcMsg) { - routingTable.removeRoutes(rpcMsg.getAnnouncements(), rpcMsg.getActorPath()); - getSender().tell("Success", self()); - } + /** + * @param msg contains list of route ids to remove + */ + private void receiveRemoveRoutes(RemoveRoutes msg) { - private void syncRoutingTable(RoutingTableData routingTableData) { - LOG.debug("Syncing routing table {}", routingTableData); + Future futureReply = Patterns.ask(bucketStore, new GetLocalBucket(), 1000); + futureReply.map(getMapperToRemoveRoutes(msg.getRouteIdentifiers()), getContext().dispatcher()); - Map, String> newRpcMap = routingTableData.getRpcMap(); - Set> routeIds = newRpcMap.keySet(); - for(RpcRouter.RouteIdentifier routeId : routeIds) { - routingTable.addGlobalRoute(routeId, newRpcMap.get(routeId)); } - Map, LinkedHashSet> newRoutedRpcMap = - routingTableData.getRoutedRpcMap(); - routeIds = newRoutedRpcMap.keySet(); + /** + * Finds routers for the given rpc. + * + * @param msg + */ + private void receiveGetRouter(FindRouters msg) { + final ActorRef sender = getSender(); - for(RpcRouter.RouteIdentifier routeId : routeIds) { - Set routeAddresses = newRoutedRpcMap.get(routeId); - for(String routeAddress : routeAddresses) { - routingTable.addRoutedRpc(routeId, routeAddress); - } + Future futureReply = Patterns.ask(bucketStore, new GetAllBuckets(), 1000); + futureReply.map(getMapperToGetRouter(msg.getRouteIdentifier(), sender), getContext().dispatcher()); } - } - - private ActorSelection getRandomRegistryActor() { - ClusterEvent.CurrentClusterState clusterState = clusterWrapper.getState(); - ActorSelection actor = null; - Set members = JavaConversions.asJavaSet(clusterState.members()); - int memberSize = members.size(); - // Don't select yourself - if(memberSize > 1) { - Address currentNodeAddress = clusterWrapper.getAddress(); - int index = new Random().nextInt(memberSize); - int i = 0; - // keeping previous member, in case when random index member is same as current actor - // and current actor member is last in set - Member previousMember = null; - for(Member member : members){ - if(i == index-1) { - previousMember = member; - } - if(i == index) { - if(!currentNodeAddress.equals(member.address())) { - actor = this.context().actorSelection(member.address() + ActorConstants.RPC_REGISTRY_PATH); - break; - } else if(index < memberSize-1){ // pick the next element in the set - index++; - } + + /** + * Helper to create empty reply when no routers are found + * + * @return + */ + private Messages.FindRoutersReply createEmptyReply() { + List> routerWithUpdateTime = Collections.emptyList(); + return new Messages.FindRoutersReply(routerWithUpdateTime); + } + + /** + * Helper to create a reply when routers are found for the given rpc + * + * @param buckets + * @param routeId + * @return + */ + private Messages.FindRoutersReply createReplyWithRouters(Map buckets, RpcRouter.RouteIdentifier routeId) { + + List> routers = new ArrayList<>(); + Option> routerWithUpdateTime = null; + + for (Bucket bucket : buckets.values()) { + + RoutingTable table = (RoutingTable) bucket.getData(); + if (table == null) + continue; + + routerWithUpdateTime = table.getRouterFor(routeId); + if (routerWithUpdateTime.isEmpty()) + continue; + + routers.add(routerWithUpdateTime.get()); } - i++; - } - if(actor == null && previousMember != null) { - actor = this.context().actorSelection(previousMember.address() + ActorConstants.RPC_REGISTRY_PATH); - } + + return new Messages.FindRoutersReply(routers); } - return actor; - } - private class SendRoutingTable implements Runnable { - @Override - public void run() { - RoutingTableData routingTableData = - new RoutingTableData(routingTable.getGlobalRpcMap(), routingTable.getRoutedRpcMap()); - LOG.debug("Sending routing table for sync {}", routingTableData); - ActorSelection actor = getRandomRegistryActor(); - if(actor != null) { - actor.tell(routingTableData, self()); - } + /// + ///private factories to create Mapper + /// + + /** + * Receives all buckets returned from bucket store and finds routers for the buckets where given rpc(routeId) is found + * + * @param routeId the rpc + * @param sender client who asked to find the routers. + * @return + */ + private Mapper getMapperToGetRouter(final RpcRouter.RouteIdentifier routeId, final ActorRef sender) { + return new Mapper() { + @Override + public Void apply(Object replyMessage) { + + if (replyMessage instanceof GetAllBucketsReply) { + + GetAllBucketsReply reply = (GetAllBucketsReply) replyMessage; + Map buckets = reply.getBuckets(); + + if (buckets == null || buckets.isEmpty()) { + sender.tell(createEmptyReply(), getSelf()); + return null; + } + + sender.tell(createReplyWithRouters(buckets, routeId), getSelf()); + } + return null; + } + }; + } + + /** + * Receives local bucket from bucket store and updates routing table in it by removing the route. Subsequently, + * it updates the local bucket in bucket store. + * + * @param routeIds rpc to remote + * @return + */ + private Mapper getMapperToRemoveRoutes(final List> routeIds) { + return new Mapper() { + @Override + public Void apply(Object replyMessage) { + if (replyMessage instanceof GetLocalBucketReply) { + + GetLocalBucketReply reply = (GetLocalBucketReply) replyMessage; + Bucket bucket = reply.getBucket(); + + if (bucket == null) { + log.debug("Local bucket is null"); + return null; + } + + RoutingTable table = bucket.getData(); + if (table == null) + table = new RoutingTable(); + + table.setRouter(localRouter); + + if (!table.isEmpty()) { + for (RpcRouter.RouteIdentifier routeId : routeIds) { + table.removeRoute(routeId); + } + } + bucket.setData(table); + + UpdateBucket updateBucketMessage = new UpdateBucket(bucket); + bucketStore.tell(updateBucketMessage, getSelf()); + } + return null; + } + }; + } + + /** + * Receives local bucket from bucket store and updates routing table in it by adding the route. Subsequently, + * it updates the local bucket in bucket store. + * + * @param routeIds rpc to add + * @return + */ + private Mapper getMapperToAddRoutes(final List> routeIds) { + + return new Mapper() { + @Override + public Void apply(Object replyMessage) { + if (replyMessage instanceof GetLocalBucketReply) { + + GetLocalBucketReply reply = (GetLocalBucketReply) replyMessage; + Bucket bucket = reply.getBucket(); + + if (bucket == null) { + log.debug("Local bucket is null"); + return null; + } + + RoutingTable table = bucket.getData(); + if (table == null) + table = new RoutingTable(); + + table.setRouter(localRouter); + for (RpcRouter.RouteIdentifier routeId : routeIds) { + table.addRoute(routeId); + } + + bucket.setData(table); + + UpdateBucket updateBucketMessage = new UpdateBucket(bucket); + bucketStore.tell(updateBucketMessage, getSelf()); + } + + return null; + } + }; + } + + /** + * All messages used by the RpcRegistry + */ + public static class Messages { + + + public static class ContainsRoute { + final List> routeIdentifiers; + + public ContainsRoute(List> routeIdentifiers) { + Preconditions.checkArgument(routeIdentifiers != null && + !routeIdentifiers.isEmpty(), + "Route Identifiers must be supplied"); + this.routeIdentifiers = routeIdentifiers; + } + + public List> getRouteIdentifiers() { + return this.routeIdentifiers; + } + + @Override + public String toString() { + return "ContainsRoute{" + + "routeIdentifiers=" + routeIdentifiers + + '}'; + } + } + + public static class AddOrUpdateRoutes extends ContainsRoute { + + public AddOrUpdateRoutes(List> routeIdentifiers) { + super(routeIdentifiers); + } + } + + public static class RemoveRoutes extends ContainsRoute { + + public RemoveRoutes(List> routeIdentifiers) { + super(routeIdentifiers); + } + } + + public static class SetLocalRouter { + private final ActorRef router; + + public SetLocalRouter(ActorRef router) { + Preconditions.checkArgument(router != null, "Router must not be null"); + this.router = router; + } + + public ActorRef getRouter() { + return this.router; + } + + @Override + public String toString() { + return "SetLocalRouter{" + + "router=" + router + + '}'; + } + } + + public static class FindRouters { + private final RpcRouter.RouteIdentifier routeIdentifier; + + public FindRouters(RpcRouter.RouteIdentifier routeIdentifier) { + Preconditions.checkArgument(routeIdentifier != null, "Route must not be null"); + this.routeIdentifier = routeIdentifier; + } + + public RpcRouter.RouteIdentifier getRouteIdentifier() { + return routeIdentifier; + } + + @Override + public String toString() { + return "FindRouters{" + + "routeIdentifier=" + routeIdentifier + + '}'; + } + } + + public static class FindRoutersReply { + final List> routerWithUpdateTime; + + public FindRoutersReply(List> routerWithUpdateTime) { + Preconditions.checkArgument(routerWithUpdateTime != null, "List of routers found must not be null"); + this.routerWithUpdateTime = routerWithUpdateTime; + } + + public List> getRouterWithUpdateTime() { + return routerWithUpdateTime; + } + + @Override + public String toString() { + return "FindRoutersReply{" + + "routerWithUpdateTime=" + routerWithUpdateTime + + '}'; + } + } } - } } diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOld.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOld.java new file mode 100644 index 0000000000..96c8802ce6 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOld.java @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry; + +import akka.actor.ActorSelection; +import akka.actor.Address; +import akka.actor.Props; +import akka.cluster.ClusterEvent; +import akka.cluster.Member; +import akka.japi.Creator; +import org.opendaylight.controller.remote.rpc.AbstractUntypedActor; +import org.opendaylight.controller.remote.rpc.ActorConstants; +import org.opendaylight.controller.remote.rpc.messages.AddRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.AddRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpcReply; +import org.opendaylight.controller.remote.rpc.messages.GetRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRpcReply; +import org.opendaylight.controller.remote.rpc.messages.RemoveRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.RemoveRpc; +import org.opendaylight.controller.remote.rpc.messages.RoutingTableData; +import org.opendaylight.controller.sal.connector.api.RpcRouter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import scala.collection.JavaConversions; + +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +/** + * This Actor maintains the routing table state and sync it with other nodes in the cluster. + * + * A scheduler runs after an interval of time, which pick a random member from the cluster + * and send the current state of routing table to the member. + * + * when a message of routing table data is received, it gets merged with the local routing table + * to keep the latest data. + */ + +public class RpcRegistryOld extends AbstractUntypedActor { + + private static final Logger LOG = LoggerFactory.getLogger(RpcRegistryOld.class); + private RoutingTableOld, String> routingTable; + private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); + private final ClusterWrapper clusterWrapper; + private final ScheduledFuture syncScheduler; + + private RpcRegistryOld(ClusterWrapper clusterWrapper){ + this.routingTable = new RoutingTableOld<>(); + this.clusterWrapper = clusterWrapper; + this.syncScheduler = scheduler.scheduleAtFixedRate(new SendRoutingTable(), 10, 10, TimeUnit.SECONDS); + } + + public static Props props(final ClusterWrapper clusterWrapper){ + return Props.create(new Creator(){ + + @Override + public RpcRegistryOld create() throws Exception { + return new RpcRegistryOld(clusterWrapper); + } + }); + } + + @Override + protected void handleReceive(Object message) throws Exception { + LOG.debug("Received message {}", message); + if(message instanceof RoutingTableData) { + syncRoutingTable((RoutingTableData) message); + } else if(message instanceof GetRoutedRpc) { + getRoutedRpc((GetRoutedRpc) message); + } else if(message instanceof GetRpc) { + getRpc((GetRpc) message); + } else if(message instanceof AddRpc) { + addRpc((AddRpc) message); + } else if(message instanceof RemoveRpc) { + removeRpc((RemoveRpc) message); + } else if(message instanceof AddRoutedRpc) { + addRoutedRpc((AddRoutedRpc) message); + } else if(message instanceof RemoveRoutedRpc) { + removeRoutedRpc((RemoveRoutedRpc) message); + } + } + + private void getRoutedRpc(GetRoutedRpc rpcMsg){ + LOG.debug("Get latest routed Rpc location from routing table {}", rpcMsg); + String remoteActorPath = routingTable.getLastAddedRoutedRpc(rpcMsg.getRouteId()); + GetRoutedRpcReply routedRpcReply = new GetRoutedRpcReply(remoteActorPath); + + getSender().tell(routedRpcReply, self()); + } + + private void getRpc(GetRpc rpcMsg) { + LOG.debug("Get global Rpc location from routing table {}", rpcMsg); + String remoteActorPath = routingTable.getGlobalRoute(rpcMsg.getRouteId()); + GetRpcReply rpcReply = new GetRpcReply(remoteActorPath); + + getSender().tell(rpcReply, self()); + } + + private void addRpc(AddRpc rpcMsg) { + LOG.debug("Add Rpc to routing table {}", rpcMsg); + routingTable.addGlobalRoute(rpcMsg.getRouteId(), rpcMsg.getActorPath()); + + getSender().tell("Success", self()); + } + + private void removeRpc(RemoveRpc rpcMsg) { + LOG.debug("Removing Rpc to routing table {}", rpcMsg); + routingTable.removeGlobalRoute(rpcMsg.getRouteId()); + + getSender().tell("Success", self()); + } + + private void addRoutedRpc(AddRoutedRpc rpcMsg) { + routingTable.addRoutedRpcs(rpcMsg.getAnnouncements(), rpcMsg.getActorPath()); + getSender().tell("Success", self()); + } + + private void removeRoutedRpc(RemoveRoutedRpc rpcMsg) { + routingTable.removeRoutes(rpcMsg.getAnnouncements(), rpcMsg.getActorPath()); + getSender().tell("Success", self()); + } + + private void syncRoutingTable(RoutingTableData routingTableData) { + LOG.debug("Syncing routing table {}", routingTableData); + + Map, String> newRpcMap = routingTableData.getRpcMap(); + Set> routeIds = newRpcMap.keySet(); + for(RpcRouter.RouteIdentifier routeId : routeIds) { + routingTable.addGlobalRoute(routeId, newRpcMap.get(routeId)); + } + + Map, LinkedHashSet> newRoutedRpcMap = + routingTableData.getRoutedRpcMap(); + routeIds = newRoutedRpcMap.keySet(); + + for(RpcRouter.RouteIdentifier routeId : routeIds) { + Set routeAddresses = newRoutedRpcMap.get(routeId); + for(String routeAddress : routeAddresses) { + routingTable.addRoutedRpc(routeId, routeAddress); + } + } + } + + private ActorSelection getRandomRegistryActor() { + ClusterEvent.CurrentClusterState clusterState = clusterWrapper.getState(); + ActorSelection actor = null; + Set members = JavaConversions.asJavaSet(clusterState.members()); + int memberSize = members.size(); + // Don't select yourself + if(memberSize > 1) { + Address currentNodeAddress = clusterWrapper.getAddress(); + int index = new Random().nextInt(memberSize); + int i = 0; + // keeping previous member, in case when random index member is same as current actor + // and current actor member is last in set + Member previousMember = null; + for(Member member : members){ + if(i == index-1) { + previousMember = member; + } + if(i == index) { + if(!currentNodeAddress.equals(member.address())) { + actor = this.context().actorSelection(member.address() + ActorConstants.RPC_REGISTRY_PATH); + break; + } else if(index < memberSize-1){ // pick the next element in the set + index++; + } + } + i++; + } + if(actor == null && previousMember != null) { + actor = this.context().actorSelection(previousMember.address() + ActorConstants.RPC_REGISTRY_PATH); + } + } + return actor; + } + + private class SendRoutingTable implements Runnable { + + @Override + public void run() { + RoutingTableData routingTableData = + new RoutingTableData(routingTable.getGlobalRpcMap(), routingTable.getRoutedRpcMap()); + LOG.debug("Sending routing table for sync {}", routingTableData); + ActorSelection actor = getRandomRegistryActor(); + if(actor != null) { + actor.tell(routingTableData, self()); + } + } + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Bucket.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Bucket.java new file mode 100644 index 0000000000..f5dfbc5650 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Bucket.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + + +public interface Bucket> { + public Long getVersion(); + public T getData(); + public void setData(T data); +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketImpl.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketImpl.java new file mode 100644 index 0000000000..3cdd924e85 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketImpl.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import java.io.Serializable; + +public class BucketImpl> implements Bucket, Serializable { + + private Long version = System.currentTimeMillis();; + + private T data; + + @Override + public Long getVersion() { + return version; + } + + @Override + public T getData() { + if (this.data == null) + return null; + + return data.copy(); + } + + public void setData(T data){ + this.version = System.currentTimeMillis()+1; + this.data = data; + } + + @Override + public String toString() { + return "BucketImpl{" + + "version=" + version + + ", data=" + data + + '}'; + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStore.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStore.java new file mode 100644 index 0000000000..2f634ce1fa --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStore.java @@ -0,0 +1,273 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import akka.actor.ActorRef; +import akka.actor.Address; +import akka.actor.Props; +import akka.actor.UntypedActor; +import akka.cluster.Cluster; +import akka.event.Logging; +import akka.event.LoggingAdapter; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetAllBuckets; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetAllBucketsReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketVersions; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketVersionsReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketsByMembers; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketsByMembersReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetLocalBucket; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetLocalBucketReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.UpdateBucket; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.UpdateRemoteBuckets; + +/** + * A store that syncs its data across nodes in the cluster. + * It maintains a {@link org.opendaylight.controller.remote.rpc.registry.gossip.Bucket} per node. Buckets are versioned. + * A node can write ONLY to its bucket. This way, write conflicts are avoided. + *

+ * Buckets are sync'ed across nodes using Gossip protocol (http://en.wikipedia.org/wiki/Gossip_protocol)

+ * This store uses a {@link org.opendaylight.controller.remote.rpc.registry.gossip.Gossiper}. + * + */ +public class BucketStore extends UntypedActor { + + final LoggingAdapter log = Logging.getLogger(getContext().system(), this); + + /** + * Bucket owned by the node + */ + private BucketImpl localBucket = new BucketImpl();; + + /** + * Buckets ownded by other known nodes in the cluster + */ + private ConcurrentMap remoteBuckets = new ConcurrentHashMap<>(); + + /** + * Bucket version for every known node in the cluster including this node + */ + private ConcurrentMap versions = new ConcurrentHashMap<>(); + + /** + * Cluster address for this node + */ + private final Address selfAddress = Cluster.get(getContext().system()).selfAddress(); + + /** + * Our private gossiper + */ + private ActorRef gossiper; + + public BucketStore(){ + gossiper = getContext().actorOf(Props.create(Gossiper.class), "gossiper"); + } + + /** + * This constructor is useful for testing. + * TODO: Pass Props instead of ActorRef + * + * @param gossiper + */ + public BucketStore(ActorRef gossiper){ + this.gossiper = gossiper; + } + + @Override + public void onReceive(Object message) throws Exception { + + log.debug("Received message: node[{}], message[{}]", selfAddress, message); + + if (message instanceof UpdateBucket) + receiveUpdateBucket(((UpdateBucket) message).getBucket()); + + else if (message instanceof GetAllBuckets) + receiveGetAllBucket(); + + else if (message instanceof GetLocalBucket) + receiveGetLocalBucket(); + + else if (message instanceof GetBucketsByMembers) + receiveGetBucketsByMembers(((GetBucketsByMembers) message).getMembers()); + + else if (message instanceof GetBucketVersions) + receiveGetBucketVersions(); + + else if (message instanceof UpdateRemoteBuckets) + receiveUpdateRemoteBuckets(((UpdateRemoteBuckets) message).getBuckets()); + + else { + log.debug("Unhandled message [{}]", message); + unhandled(message); + } + + } + + /** + * Returns a copy of bucket owned by this node + */ + private void receiveGetLocalBucket() { + final ActorRef sender = getSender(); + GetLocalBucketReply reply = new GetLocalBucketReply(localBucket); + sender.tell(reply, getSelf()); + } + + /** + * Updates the bucket owned by this node + * + * @param updatedBucket + */ + void receiveUpdateBucket(Bucket updatedBucket){ + + localBucket = (BucketImpl) updatedBucket; + versions.put(selfAddress, localBucket.getVersion()); + } + + /** + * Returns all the buckets the this node knows about, self owned + remote + */ + void receiveGetAllBucket(){ + final ActorRef sender = getSender(); + sender.tell(new GetAllBucketsReply(getAllBuckets()), getSelf()); + } + + /** + * Helper to collect all known buckets + * + * @return self owned + remote buckets + */ + Map getAllBuckets(){ + Map all = new HashMap<>(remoteBuckets.size() + 1); + + //first add the local bucket + all.put(selfAddress, localBucket); + + //then get all remote buckets + all.putAll(remoteBuckets); + + return all; + } + + /** + * Returns buckets for requested members that this node knows about + * + * @param members requested members + */ + void receiveGetBucketsByMembers(Set

members){ + final ActorRef sender = getSender(); + Map buckets = getBucketsByMembers(members); + sender.tell(new GetBucketsByMembersReply(buckets), getSelf()); + } + + /** + * Helper to collect buckets for requested memebers + * + * @param members requested members + * @return buckets for requested memebers + */ + Map getBucketsByMembers(Set
members) { + Map buckets = new HashMap<>(); + + //first add the local bucket if asked + if (members.contains(selfAddress)) + buckets.put(selfAddress, localBucket); + + //then get buckets for requested remote nodes + for (Address address : members){ + if (remoteBuckets.containsKey(address)) + buckets.put(address, remoteBuckets.get(address)); + } + + return buckets; + } + + /** + * Returns versions for all buckets known + */ + void receiveGetBucketVersions(){ + final ActorRef sender = getSender(); + GetBucketVersionsReply reply = new GetBucketVersionsReply(versions); + sender.tell(reply, getSelf()); + } + + /** + * Update local copy of remote buckets where local copy's version is older + * + * @param receivedBuckets buckets sent by remote + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Gossiper} + */ + void receiveUpdateRemoteBuckets(Map receivedBuckets){ + + if (receivedBuckets == null || receivedBuckets.isEmpty()) + return; //nothing to do + + //Remote cant update self's bucket + receivedBuckets.remove(selfAddress); + + for (Map.Entry entry : receivedBuckets.entrySet()){ + + Long localVersion = versions.get(entry.getKey()); + if (localVersion == null) localVersion = -1L; + + Bucket receivedBucket = entry.getValue(); + + if (receivedBucket == null) + continue; + + Long remoteVersion = receivedBucket.getVersion(); + if (remoteVersion == null) remoteVersion = -1L; + + //update only if remote version is newer + if ( remoteVersion > localVersion ) { + remoteBuckets.put(entry.getKey(), receivedBucket); + versions.put(entry.getKey(), remoteVersion); + } + } + + log.debug("State after update - Local Bucket [{}], Remote Buckets [{}]", localBucket, remoteBuckets); + } + + /// + ///Getter Setters + /// + + BucketImpl getLocalBucket() { + return localBucket; + } + + void setLocalBucket(BucketImpl localBucket) { + this.localBucket = localBucket; + } + + ConcurrentMap getRemoteBuckets() { + return remoteBuckets; + } + + void setRemoteBuckets(ConcurrentMap remoteBuckets) { + this.remoteBuckets = remoteBuckets; + } + + ConcurrentMap getVersions() { + return versions; + } + + void setVersions(ConcurrentMap versions) { + this.versions = versions; + } + + Address getSelfAddress() { + return selfAddress; + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Copier.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Copier.java new file mode 100644 index 0000000000..45279eb9a2 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Copier.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +/** + * Type of data that goes in {@link org.opendaylight.controller.remote.rpc.registry.gossip.Bucket}. + * The implementers should do deep cloning in copy() method. + */ +public interface Copier { + public T copy(); +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Gossiper.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Gossiper.java new file mode 100644 index 0000000000..2320789d59 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Gossiper.java @@ -0,0 +1,433 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import akka.actor.ActorRef; +import akka.actor.ActorSelection; +import akka.actor.Address; +import akka.actor.Cancellable; +import akka.actor.UntypedActor; +import akka.cluster.Cluster; +import akka.cluster.ClusterEvent; +import akka.cluster.Member; +import akka.dispatch.Mapper; +import akka.event.Logging; +import akka.event.LoggingAdapter; +import akka.pattern.Patterns; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; + +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketVersions; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketVersionsReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketsByMembers; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.GetBucketsByMembersReply; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.UpdateRemoteBuckets; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipEnvelope; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipStatus; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipTick; + +/** + * Gossiper that syncs bucket store across nodes in the cluster. + *

+ * It keeps a local scheduler that periodically sends Gossip ticks to + * itself to send bucket store's bucket versions to a randomly selected remote + * gossiper. + *

+ * When bucket versions are received from a remote gossiper, it is compared + * with bucket store's bucket versions. Which ever buckets are newer + * locally, are sent to remote gossiper. If any bucket is older in bucket store, + * a gossip status is sent to remote gossiper so that it can send the newer buckets. + *

+ * When a bucket is received from a remote gossiper, its sent to the bucket store + * for update. + * + */ + +public class Gossiper extends UntypedActor { + + final LoggingAdapter log = Logging.getLogger(getContext().system(), this); + + Cluster cluster = Cluster.get(getContext().system()); + + /** + * ActorSystem's address for the current cluster node. + */ + private Address selfAddress = cluster.selfAddress(); + + /** + * All known cluster members + */ + private List

clusterMembers = new ArrayList<>(); + + private Cancellable gossipTask; + + private Boolean autoStartGossipTicks = true; + + public Gossiper(){} + + /** + * Helpful for testing + * @param autoStartGossipTicks used for turning off gossip ticks during testing. + * Gossip tick can be manually sent. + */ + public Gossiper(Boolean autoStartGossipTicks){ + this.autoStartGossipTicks = autoStartGossipTicks; + } + + @Override + public void preStart(){ + + cluster.subscribe(getSelf(), + ClusterEvent.initialStateAsEvents(), + ClusterEvent.MemberEvent.class, + ClusterEvent.UnreachableMember.class); + + if (autoStartGossipTicks) { + gossipTask = getContext().system().scheduler().schedule( + new FiniteDuration(1, TimeUnit.SECONDS), //initial delay + new FiniteDuration(500, TimeUnit.MILLISECONDS), //interval + getSelf(), //target + new Messages.GossiperMessages.GossipTick(), //message + getContext().dispatcher(), //execution context + getSelf() //sender + ); + } + } + + @Override + public void postStop(){ + if (cluster != null) + cluster.unsubscribe(getSelf()); + if (gossipTask != null) + gossipTask.cancel(); + } + + @Override + public void onReceive(Object message) throws Exception { + + log.debug("Received message: node[{}], message[{}]", selfAddress, message); + + //Usually sent by self via gossip task defined above. But its not enforced. + //These ticks can be sent by another actor as well which is esp. useful while testing + if (message instanceof GossipTick) + receiveGossipTick(); + + //Message from remote gossiper with its bucket versions + else if (message instanceof GossipStatus) + receiveGossipStatus((GossipStatus) message); + + //Message from remote gossiper with buckets. This is usually in response to GossipStatus message + //The contained buckets are newer as determined by the remote gossiper by comparing the GossipStatus + //message with its local versions + else if (message instanceof GossipEnvelope) + receiveGossip((GossipEnvelope) message); + + else if (message instanceof ClusterEvent.MemberUp) { + receiveMemberUp(((ClusterEvent.MemberUp) message).member()); + + } else if (message instanceof ClusterEvent.MemberRemoved) { + receiveMemberRemoveOrUnreachable(((ClusterEvent.MemberRemoved) message).member()); + + } else if ( message instanceof ClusterEvent.UnreachableMember){ + receiveMemberRemoveOrUnreachable(((ClusterEvent.UnreachableMember) message).member()); + + } else + unhandled(message); + } + + /** + * Remove member from local copy of member list. If member down is self, then stop the actor + * + * @param member who went down + */ + void receiveMemberRemoveOrUnreachable(Member member) { + //if its self, then stop itself + if (selfAddress.equals(member.address())){ + getContext().stop(getSelf()); + return; + } + + clusterMembers.remove(member.address()); + log.debug("Removed member [{}], Active member list [{}]", member.address(), clusterMembers); + } + + /** + * Add member to the local copy of member list if it doesnt already + * @param member + */ + void receiveMemberUp(Member member) { + + if (selfAddress.equals(member.address())) + return; //ignore up notification for self + + if (!clusterMembers.contains(member.address())) + clusterMembers.add(member.address()); + + log.debug("Added member [{}], Active member list [{}]", member.address(), clusterMembers); + } + + /** + * Sends Gossip status to other members in the cluster.
+ * 1. If there are no member, ignore the tick.
+ * 2. If there's only 1 member, send gossip status (bucket versions) to it.
+ * 3. If there are more than one member, randomly pick one and send gossip status (bucket versions) to it. + */ + void receiveGossipTick(){ + if (clusterMembers.size() == 0) return; //no members to send gossip status to + + Address remoteMemberToGossipTo = null; + + if (clusterMembers.size() == 1) + remoteMemberToGossipTo = clusterMembers.get(0); + else { + Integer randomIndex = ThreadLocalRandom.current().nextInt(0, clusterMembers.size()); + remoteMemberToGossipTo = clusterMembers.get(randomIndex); + } + + log.debug("Gossiping to [{}]", remoteMemberToGossipTo); + getLocalStatusAndSendTo(remoteMemberToGossipTo); + } + + /** + * Process gossip status received from a remote gossiper. Remote versions are compared with + * the local copy.

+ * + * For each bucket + *

    + *
  • If local copy is newer, the newer buckets are sent in GossipEnvelope to remote
  • + *
  • If local is older, GossipStatus is sent to remote so that it can reply with GossipEnvelope
  • + *
  • If both are same, noop
  • + *
+ * + * @param status bucket versions from a remote member + */ + void receiveGossipStatus(GossipStatus status){ + //Don't accept messages from non-members + if (!clusterMembers.contains(status.from())) + return; + + final ActorRef sender = getSender(); + Future futureReply = Patterns.ask(getContext().parent(), new GetBucketVersions(), 1000); + futureReply.map(getMapperToProcessRemoteStatus(sender, status), getContext().dispatcher()); + + } + + /** + * Sends the received buckets in the envelope to the parent Bucket store. + * + * @param envelope contains buckets from a remote gossiper + */ + void receiveGossip(GossipEnvelope envelope){ + //TODO: Add more validations + if (!selfAddress.equals(envelope.to())) { + log.debug("Ignoring message intended for someone else. From [{}] to [{}]", envelope.from(), envelope.to()); + return; + } + + updateRemoteBuckets(envelope.getBuckets()); + + } + + /** + * Helper to send received buckets to bucket store + * + * @param buckets + */ + void updateRemoteBuckets(Map buckets) { + + UpdateRemoteBuckets updateRemoteBuckets = new UpdateRemoteBuckets(buckets); + getContext().parent().tell(updateRemoteBuckets, getSelf()); + } + + /** + * Gets the buckets from bucket store for the given node addresses and sends them to remote gossiper + * + * @param remote remote node to send Buckets to + * @param addresses node addresses whose buckets needs to be sent + */ + void sendGossipTo(final ActorRef remote, final Set
addresses){ + + Future futureReply = Patterns.ask(getContext().parent(), new GetBucketsByMembers(addresses), 1000); + futureReply.map(getMapperToSendGossip(remote), getContext().dispatcher()); + } + + /** + * Gets bucket versions from bucket store and sends to the supplied address + * + * @param remoteActorSystemAddress remote gossiper to send to + */ + void getLocalStatusAndSendTo(Address remoteActorSystemAddress){ + + //Get local status from bucket store and send to remote + Future futureReply = Patterns.ask(getContext().parent(), new GetBucketVersions(), 1000); + ActorSelection remoteRef = getContext().system().actorSelection( + remoteActorSystemAddress.toString() + getSelf().path().toStringWithoutAddress()); + + log.debug("Sending bucket versions to [{}]", remoteRef); + + futureReply.map(getMapperToSendLocalStatus(remoteRef), getContext().dispatcher()); + + } + + /** + * Helper to send bucket versions received from local store + * @param remote remote gossiper to send versions to + * @param localVersions bucket versions received from local store + */ + void sendGossipStatusTo(ActorRef remote, Map localVersions){ + + GossipStatus status = new GossipStatus(selfAddress, localVersions); + remote.tell(status, getSelf()); + } + + void sendGossipStatusTo(ActorSelection remote, Map localVersions){ + + GossipStatus status = new GossipStatus(selfAddress, localVersions); + remote.tell(status, getSelf()); + } + + /// + /// Private factories to create mappers + /// + + private Mapper getMapperToSendLocalStatus(final ActorSelection remote){ + + return new Mapper() { + @Override + public Void apply(Object replyMessage) { + if (replyMessage instanceof GetBucketVersionsReply) { + GetBucketVersionsReply reply = (GetBucketVersionsReply) replyMessage; + Map localVersions = reply.getVersions(); + + sendGossipStatusTo(remote, localVersions); + + } + return null; + } + }; + } + + /** + * Process bucket versions received from + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.BucketStore}. + * Then this method compares remote bucket versions with local bucket versions. + *
    + *
  • The buckets that are newer locally, send + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipEnvelope} + * to remote + *
  • The buckets that are older locally, send + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipStatus} + * to remote so that remote sends GossipEnvelop. + *
+ * + * @param sender the remote member + * @param status bucket versions from a remote member + * @return a {@link akka.dispatch.Mapper} that gets evaluated in future + * + */ + private Mapper getMapperToProcessRemoteStatus(final ActorRef sender, final GossipStatus status){ + + final Map remoteVersions = status.getVersions(); + + return new Mapper() { + @Override + public Void apply(Object replyMessage) { + if (replyMessage instanceof GetBucketVersionsReply) { + GetBucketVersionsReply reply = (GetBucketVersionsReply) replyMessage; + Map localVersions = reply.getVersions(); + + //diff between remote list and local + Set
localIsOlder = new HashSet<>(); + localIsOlder.addAll(remoteVersions.keySet()); + localIsOlder.removeAll(localVersions.keySet()); + + //diff between local list and remote + Set
localIsNewer = new HashSet<>(); + localIsNewer.addAll(localVersions.keySet()); + localIsNewer.removeAll(remoteVersions.keySet()); + + + for (Address address : remoteVersions.keySet()){ + + if (localVersions.get(address) == null || remoteVersions.get(address) == null) + continue; //this condition is taken care of by above diffs + if (localVersions.get(address) < remoteVersions.get(address)) + localIsOlder.add(address); + else if (localVersions.get(address) > remoteVersions.get(address)) + localIsNewer.add(address); + else + continue; + } + + if (!localIsOlder.isEmpty()) + sendGossipStatusTo(sender, localVersions ); + + if (!localIsNewer.isEmpty()) + sendGossipTo(sender, localIsNewer);//send newer buckets to remote + + } + return null; + } + }; + } + + /** + * Processes the message from {@link org.opendaylight.controller.remote.rpc.registry.gossip.BucketStore} + * that contains {@link org.opendaylight.controller.remote.rpc.registry.gossip.Bucket}. + * These buckets are sent to a remote member encapsulated in + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipEnvelope} + * + * @param sender the remote member that sent + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipStatus} + * in reply to which bucket is being sent back + * @return a {@link akka.dispatch.Mapper} that gets evaluated in future + * + */ + private Mapper getMapperToSendGossip(final ActorRef sender) { + + return new Mapper() { + @Override + public Void apply(Object msg) { + if (msg instanceof GetBucketsByMembersReply) { + Map buckets = ((GetBucketsByMembersReply) msg).getBuckets(); + log.debug("Buckets to send from {}: {}", selfAddress, buckets); + GossipEnvelope envelope = new GossipEnvelope(selfAddress, sender.path().address(), buckets); + sender.tell(envelope, getSelf()); + } + return null; + } + }; + } + + /// + ///Getter Setters + /// + List
getClusterMembers() { + return clusterMembers; + } + + void setClusterMembers(List
clusterMembers) { + this.clusterMembers = clusterMembers; + } + + Address getSelfAddress() { + return selfAddress; + } + + void setSelfAddress(Address selfAddress) { + this.selfAddress = selfAddress; + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Messages.java b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Messages.java new file mode 100644 index 0000000000..bf8b20213b --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/Messages.java @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import akka.actor.Address; +import com.google.common.base.Preconditions; + +import java.io.Serializable; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.ContainsBucketVersions; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.BucketStoreMessages.ContainsBuckets; + + +/** + * These messages are used by {@link org.opendaylight.controller.remote.rpc.registry.gossip.BucketStore} and + * {@link org.opendaylight.controller.remote.rpc.registry.gossip.Gossiper} actors. + */ +public class Messages { + + public static class BucketStoreMessages{ + + public static class GetLocalBucket implements Serializable{} + + public static class ContainsBucket implements Serializable { + final private Bucket bucket; + + public ContainsBucket(Bucket bucket){ + Preconditions.checkArgument(bucket != null, "bucket can not be null"); + this.bucket = bucket; + } + + public Bucket getBucket(){ + return bucket; + } + + } + + public static class UpdateBucket extends ContainsBucket implements Serializable { + public UpdateBucket(Bucket bucket){ + super(bucket); + } + } + + public static class GetLocalBucketReply extends ContainsBucket implements Serializable { + public GetLocalBucketReply(Bucket bucket){ + super(bucket); + } + } + + public static class GetAllBuckets implements Serializable{} + + public static class GetBucketsByMembers implements Serializable{ + private Set
members; + + public GetBucketsByMembers(Set
members){ + Preconditions.checkArgument(members != null, "members can not be null"); + this.members = members; + } + + public Set
getMembers() { + return new HashSet<>(members); + } + } + + public static class ContainsBuckets implements Serializable{ + private Map buckets; + + public ContainsBuckets(Map buckets){ + Preconditions.checkArgument(buckets != null, "buckets can not be null"); + this.buckets = buckets; + } + + public Map getBuckets() { + Map copy = new HashMap<>(buckets.size()); + + for (Map.Entry entry : buckets.entrySet()){ + //ignore null entries + if ( (entry.getKey() == null) || (entry.getValue() == null) ) + continue; + copy.put(entry.getKey(), entry.getValue()); + } + return new HashMap<>(copy); + } + } + + public static class GetAllBucketsReply extends ContainsBuckets implements Serializable{ + public GetAllBucketsReply(Map buckets) { + super(buckets); + } + } + + public static class GetBucketsByMembersReply extends ContainsBuckets implements Serializable{ + public GetBucketsByMembersReply(Map buckets) { + super(buckets); + } + } + + public static class GetBucketVersions implements Serializable{} + + public static class ContainsBucketVersions implements Serializable{ + Map versions; + + public ContainsBucketVersions(Map versions) { + Preconditions.checkArgument(versions != null, "versions can not be null or empty"); + + this.versions = versions; + } + + public Map getVersions() { + return Collections.unmodifiableMap(versions); + } + + } + + public static class GetBucketVersionsReply extends ContainsBucketVersions implements Serializable{ + public GetBucketVersionsReply(Map versions) { + super(versions); + } + } + + public static class UpdateRemoteBuckets extends ContainsBuckets implements Serializable{ + public UpdateRemoteBuckets(Map buckets) { + super(buckets); + } + } + } + + public static class GossiperMessages{ + public static class Tick implements Serializable {} + + public static final class GossipTick extends Tick {} + + public static final class GossipStatus extends ContainsBucketVersions implements Serializable{ + private Address from; + + public GossipStatus(Address from, Map versions) { + super(versions); + this.from = from; + } + + public Address from() { + return from; + } + } + + public static final class GossipEnvelope extends ContainsBuckets implements Serializable { + private final Address from; + private final Address to; + + public GossipEnvelope(Address from, Address to, Map buckets) { + super(buckets); + Preconditions.checkArgument(to != null, "Recipient of message must not be null"); + this.to = to; + this.from = from; + } + + public Address from() { + return from; + } + + public Address to() { + return to; + } + } + } +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/RpcBrokerTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/RpcBrokerTest.java index 392c1e637d..55aa1d6c87 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/RpcBrokerTest.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/RpcBrokerTest.java @@ -25,7 +25,7 @@ import org.opendaylight.controller.remote.rpc.messages.InvokeRoutedRpc; import org.opendaylight.controller.remote.rpc.messages.InvokeRpc; import org.opendaylight.controller.remote.rpc.messages.RpcResponse; import org.opendaylight.controller.remote.rpc.registry.ClusterWrapper; -import org.opendaylight.controller.remote.rpc.registry.RpcRegistry; +import org.opendaylight.controller.remote.rpc.registry.RpcRegistryOld; import org.opendaylight.controller.sal.common.util.Rpcs; import org.opendaylight.controller.sal.connector.api.RpcRouter; import org.opendaylight.controller.sal.core.api.Broker; @@ -69,7 +69,7 @@ public class RpcBrokerTest { @Test public void testInvokeRpcError() throws URISyntaxException { new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(Mockito.mock(ClusterWrapper.class))); + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(Mockito.mock(ClusterWrapper.class))); Broker.ProviderSession brokerSession = Mockito.mock(Broker.ProviderSession.class); SchemaContext schemaContext = mock(SchemaContext.class); ActorRef rpcBroker = system.actorOf(RpcBroker.props(brokerSession, rpcRegistry, schemaContext)); @@ -100,7 +100,7 @@ public class RpcBrokerTest { @Test public void testInvokeRpc() throws URISyntaxException { new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(mock(ClusterWrapper.class))); + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(mock(ClusterWrapper.class))); Broker.ProviderSession brokerSession = mock(Broker.ProviderSession.class); SchemaContext schemaContext = mock(SchemaContext.class); ActorRef rpcBroker = system.actorOf(RpcBroker.props(brokerSession, rpcRegistry, schemaContext)); @@ -141,7 +141,7 @@ public class RpcBrokerTest { @Test public void testInvokeRoutedRpcError() throws URISyntaxException { new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(Mockito.mock(ClusterWrapper.class))); + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(Mockito.mock(ClusterWrapper.class))); Broker.ProviderSession brokerSession = Mockito.mock(Broker.ProviderSession.class); SchemaContext schemaContext = mock(SchemaContext.class); ActorRef rpcBroker = system.actorOf(RpcBroker.props(brokerSession, rpcRegistry, schemaContext)); @@ -172,7 +172,7 @@ public class RpcBrokerTest { @Test public void testInvokeRoutedRpc() throws URISyntaxException { new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(mock(ClusterWrapper.class))); + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(mock(ClusterWrapper.class))); Broker.ProviderSession brokerSession = mock(Broker.ProviderSession.class); SchemaContext schemaContext = mock(SchemaContext.class); ActorRef rpcBroker = system.actorOf(RpcBroker.props(brokerSession, rpcRegistry, schemaContext)); diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOldTest.java similarity index 97% rename from opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableTest.java rename to opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOldTest.java index 129a5a56e8..524a91288d 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableTest.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RoutingTableOldTest.java @@ -19,10 +19,10 @@ import java.net.URISyntaxException; import java.util.HashSet; import java.util.Set; -public class RoutingTableTest { +public class RoutingTableOldTest { - private RoutingTable, String> routingTable = - new RoutingTable<>(); + private RoutingTableOld, String> routingTable = + new RoutingTableOld<>(); @Test public void addGlobalRouteNullRouteIdTest() { diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOldTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOldTest.java new file mode 100644 index 0000000000..0f711b4e85 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryOldTest.java @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.remote.rpc.registry; + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.testkit.JavaTestKit; +import junit.framework.Assert; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; +import org.opendaylight.controller.remote.rpc.RouteIdentifierImpl; +import org.opendaylight.controller.remote.rpc.messages.AddRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.AddRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpcReply; +import org.opendaylight.controller.remote.rpc.messages.GetRpc; +import org.opendaylight.controller.remote.rpc.messages.GetRpcReply; +import org.opendaylight.controller.remote.rpc.messages.RemoveRoutedRpc; +import org.opendaylight.controller.remote.rpc.messages.RemoveRpc; +import org.opendaylight.controller.sal.connector.api.RpcRouter; +import org.opendaylight.yangtools.yang.common.QName; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashSet; +import java.util.Set; + +public class RpcRegistryOldTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create(); + } + + @AfterClass + public static void teardown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + /** + This test add, read and remove an entry in global rpc + */ + @Test + public void testGlobalRpc() throws URISyntaxException { + new JavaTestKit(system) {{ + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(Mockito.mock(ClusterWrapper.class))); + QName type = new QName(new URI("actor1"), "actor1"); + RouteIdentifierImpl routeId = new RouteIdentifierImpl(null, type, null); + final String route = "actor1"; + + AddRpc rpcMsg = new AddRpc(routeId, route); + rpcRegistry.tell(rpcMsg, getRef()); + expectMsgEquals(duration("2 second"), "Success"); + + GetRpc getRpc = new GetRpc(routeId); + rpcRegistry.tell(getRpc, getRef()); + + Boolean getMsg = new ExpectMsg("GetRpcReply") { + protected Boolean match(Object in) { + if (in instanceof GetRpcReply) { + GetRpcReply reply = (GetRpcReply)in; + return route.equals(reply.getRoutePath()); + } else { + throw noMatch(); + } + } + }.get(); // this extracts the received message + + Assert.assertTrue(getMsg); + + RemoveRpc removeMsg = new RemoveRpc(routeId); + rpcRegistry.tell(removeMsg, getRef()); + expectMsgEquals(duration("2 second"), "Success"); + + rpcRegistry.tell(getRpc, getRef()); + + Boolean getNullMsg = new ExpectMsg("GetRpcReply") { + protected Boolean match(Object in) { + if (in instanceof GetRpcReply) { + GetRpcReply reply = (GetRpcReply)in; + return reply.getRoutePath() == null; + } else { + throw noMatch(); + } + } + }.get(); + Assert.assertTrue(getNullMsg); + }}; + + } + + /** + This test add, read and remove an entry in routed rpc + */ + @Test + public void testRoutedRpc() throws URISyntaxException { + new JavaTestKit(system) {{ + ActorRef rpcRegistry = system.actorOf(RpcRegistryOld.props(Mockito.mock(ClusterWrapper.class))); + QName type = new QName(new URI("actor1"), "actor1"); + RouteIdentifierImpl routeId = new RouteIdentifierImpl(null, type, null); + final String route = "actor1"; + + Set> routeIds = new HashSet<>(); + routeIds.add(routeId); + + AddRoutedRpc rpcMsg = new AddRoutedRpc(routeIds, route); + rpcRegistry.tell(rpcMsg, getRef()); + expectMsgEquals(duration("2 second"), "Success"); + + GetRoutedRpc getRpc = new GetRoutedRpc(routeId); + rpcRegistry.tell(getRpc, getRef()); + + Boolean getMsg = new ExpectMsg("GetRoutedRpcReply") { + protected Boolean match(Object in) { + if (in instanceof GetRoutedRpcReply) { + GetRoutedRpcReply reply = (GetRoutedRpcReply)in; + return route.equals(reply.getRoutePath()); + } else { + throw noMatch(); + } + } + }.get(); // this extracts the received message + + Assert.assertTrue(getMsg); + + RemoveRoutedRpc removeMsg = new RemoveRoutedRpc(routeIds, route); + rpcRegistry.tell(removeMsg, getRef()); + expectMsgEquals(duration("2 second"), "Success"); + + rpcRegistry.tell(getRpc, getRef()); + + Boolean getNullMsg = new ExpectMsg("GetRoutedRpcReply") { + protected Boolean match(Object in) { + if (in instanceof GetRoutedRpcReply) { + GetRoutedRpcReply reply = (GetRoutedRpcReply)in; + return reply.getRoutePath() == null; + } else { + throw noMatch(); + } + } + }.get(); + Assert.assertTrue(getNullMsg); + }}; + + } + +} diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryTest.java index d011d331a6..da3942a828 100644 --- a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryTest.java +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryTest.java @@ -1,159 +1,282 @@ -/* - * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Public License v1.0 which accompanies this distribution, - * and is available at http://www.eclipse.org/legal/epl-v10.html - */ - package org.opendaylight.controller.remote.rpc.registry; +import akka.actor.ActorPath; import akka.actor.ActorRef; +import akka.actor.ActorSelection; import akka.actor.ActorSystem; +import akka.actor.ChildActorPath; +import akka.actor.Props; +import akka.japi.Pair; import akka.testkit.JavaTestKit; -import junit.framework.Assert; +import com.typesafe.config.ConfigFactory; +import org.junit.After; import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import org.mockito.Mockito; import org.opendaylight.controller.remote.rpc.RouteIdentifierImpl; -import org.opendaylight.controller.remote.rpc.messages.AddRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.AddRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRoutedRpcReply; -import org.opendaylight.controller.remote.rpc.messages.GetRpc; -import org.opendaylight.controller.remote.rpc.messages.GetRpcReply; -import org.opendaylight.controller.remote.rpc.messages.RemoveRoutedRpc; -import org.opendaylight.controller.remote.rpc.messages.RemoveRpc; import org.opendaylight.controller.sal.connector.api.RpcRouter; import org.opendaylight.yangtools.yang.common.QName; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; import java.net.URI; import java.net.URISyntaxException; -import java.util.HashSet; -import java.util.Set; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.AddOrUpdateRoutes; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.RemoveRoutes; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRouters; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRoutersReply; +import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter; public class RpcRegistryTest { - static ActorSystem system; - - - @BeforeClass - public static void setup() { - system = ActorSystem.create(); - } - - @AfterClass - public static void teardown() { - JavaTestKit.shutdownActorSystem(system); - system = null; - } - - /** - This test add, read and remove an entry in global rpc - */ - @Test - public void testGlobalRpc() throws URISyntaxException { - new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(Mockito.mock(ClusterWrapper.class))); - QName type = new QName(new URI("actor1"), "actor1"); - RouteIdentifierImpl routeId = new RouteIdentifierImpl(null, type, null); - final String route = "actor1"; - - AddRpc rpcMsg = new AddRpc(routeId, route); - rpcRegistry.tell(rpcMsg, getRef()); - expectMsgEquals(duration("2 second"), "Success"); - - GetRpc getRpc = new GetRpc(routeId); - rpcRegistry.tell(getRpc, getRef()); - - Boolean getMsg = new ExpectMsg("GetRpcReply") { - protected Boolean match(Object in) { - if (in instanceof GetRpcReply) { - GetRpcReply reply = (GetRpcReply)in; - return route.equals(reply.getRoutePath()); - } else { - throw noMatch(); - } - } - }.get(); // this extracts the received message + private static ActorSystem node1; + private static ActorSystem node2; + private static ActorSystem node3; - Assert.assertTrue(getMsg); + private ActorRef registry1; + private ActorRef registry2; + private ActorRef registry3; - RemoveRpc removeMsg = new RemoveRpc(routeId); - rpcRegistry.tell(removeMsg, getRef()); - expectMsgEquals(duration("2 second"), "Success"); + @BeforeClass + public static void setup() throws InterruptedException { + Thread.sleep(1000); //give some time for previous test to close netty ports + node1 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberA")); + node2 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberB")); + node3 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberC")); + } - rpcRegistry.tell(getRpc, getRef()); + @AfterClass + public static void teardown(){ + JavaTestKit.shutdownActorSystem(node1); + JavaTestKit.shutdownActorSystem(node2); + JavaTestKit.shutdownActorSystem(node3); + if (node1 != null) + node1.shutdown(); + if (node2 != null) + node2.shutdown(); + if (node3 != null) + node3.shutdown(); - Boolean getNullMsg = new ExpectMsg("GetRpcReply") { - protected Boolean match(Object in) { - if (in instanceof GetRpcReply) { - GetRpcReply reply = (GetRpcReply)in; - return reply.getRoutePath() == null; - } else { - throw noMatch(); - } - } - }.get(); - Assert.assertTrue(getNullMsg); - }}; - - } - - /** - This test add, read and remove an entry in routed rpc - */ - @Test - public void testRoutedRpc() throws URISyntaxException { - new JavaTestKit(system) {{ - ActorRef rpcRegistry = system.actorOf(RpcRegistry.props(Mockito.mock(ClusterWrapper.class))); - QName type = new QName(new URI("actor1"), "actor1"); - RouteIdentifierImpl routeId = new RouteIdentifierImpl(null, type, null); - final String route = "actor1"; - - Set> routeIds = new HashSet<>(); - routeIds.add(routeId); - - AddRoutedRpc rpcMsg = new AddRoutedRpc(routeIds, route); - rpcRegistry.tell(rpcMsg, getRef()); - expectMsgEquals(duration("2 second"), "Success"); - - GetRoutedRpc getRpc = new GetRoutedRpc(routeId); - rpcRegistry.tell(getRpc, getRef()); - - Boolean getMsg = new ExpectMsg("GetRoutedRpcReply") { - protected Boolean match(Object in) { - if (in instanceof GetRoutedRpcReply) { - GetRoutedRpcReply reply = (GetRoutedRpcReply)in; - return route.equals(reply.getRoutePath()); - } else { - throw noMatch(); - } + } + + @Before + public void createRpcRegistry() throws InterruptedException { + registry1 = node1.actorOf(Props.create(RpcRegistry.class)); + registry2 = node2.actorOf(Props.create(RpcRegistry.class)); + registry3 = node3.actorOf(Props.create(RpcRegistry.class)); + } + + @After + public void stopRpcRegistry() throws InterruptedException { + if (registry1 != null) + node1.stop(registry1); + if (registry2 != null) + node2.stop(registry2); + if (registry3 != null) + node3.stop(registry3); + } + + /** + * One node cluster. + * 1. Register rpc, ensure router can be found + * 2. Then remove rpc, ensure its deleted + * + * @throws URISyntaxException + * @throws InterruptedException + */ + @Test + public void testAddRemoveRpcOnSameNode() throws URISyntaxException, InterruptedException { + + final JavaTestKit mockBroker = new JavaTestKit(node1); + + //Add rpc on node 1 + registry1.tell(new SetLocalRouter(mockBroker.getRef()), mockBroker.getRef()); + registry1.tell(getAddRouteMessage(), mockBroker.getRef()); + + Thread.sleep(1000);// + + //find the route on node 1's registry + registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef()); + FindRoutersReply message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class); + List> pairs = message.getRouterWithUpdateTime(); + + validateRouterReceived(pairs, mockBroker.getRef()); + + //Now remove rpc + registry1.tell(getRemoveRouteMessage(), mockBroker.getRef()); + Thread.sleep(1000); + //find the route on node 1's registry + registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef()); + message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class); + pairs = message.getRouterWithUpdateTime(); + + Assert.assertTrue(pairs.isEmpty()); + } + + /** + * Three node cluster. + * 1. Register rpc on 1 node, ensure its router can be found on other 2. + * 2. Remove rpc on 1 node, ensure its removed on other 2. + * + * @throws URISyntaxException + * @throws InterruptedException + */ + @Test + public void testRpcAddRemoveInCluster() throws URISyntaxException, InterruptedException { + + validateSystemStartup(); + + final JavaTestKit mockBroker1 = new JavaTestKit(node1); + final JavaTestKit mockBroker2 = new JavaTestKit(node2); + final JavaTestKit mockBroker3 = new JavaTestKit(node3); + + //Add rpc on node 1 + registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef()); + registry1.tell(getAddRouteMessage(), mockBroker1.getRef()); + + Thread.sleep(1000);// give some time for bucket store data sync + + //find the route in node 2's registry + List> pairs = findRouters(registry2, mockBroker2); + validateRouterReceived(pairs, mockBroker1.getRef()); + + //find the route in node 3's registry + pairs = findRouters(registry3, mockBroker3); + validateRouterReceived(pairs, mockBroker1.getRef()); + + //Now remove + registry1.tell(getRemoveRouteMessage(), mockBroker1.getRef()); + Thread.sleep(1000);// give some time for bucket store data sync + + pairs = findRouters(registry2, mockBroker2); + Assert.assertTrue(pairs.isEmpty()); + + pairs = findRouters(registry3, mockBroker3); + Assert.assertTrue(pairs.isEmpty()); + } + + /** + * Three node cluster. + * Register rpc on 2 nodes. Ensure 2 routers are found on 3rd. + * + * @throws Exception + */ + @Test + public void testAnRpcAddedOnMultiNodesShouldReturnMultiRouter() throws Exception { + + validateSystemStartup(); + + final JavaTestKit mockBroker1 = new JavaTestKit(node1); + final JavaTestKit mockBroker2 = new JavaTestKit(node2); + final JavaTestKit mockBroker3 = new JavaTestKit(node3); + + //Thread.sleep(5000);//let system come up + + //Add rpc on node 1 + registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef()); + registry1.tell(getAddRouteMessage(), mockBroker1.getRef()); + + //Add same rpc on node 2 + registry2.tell(new SetLocalRouter(mockBroker2.getRef()), mockBroker2.getRef()); + registry2.tell(getAddRouteMessage(), mockBroker2.getRef()); + + registry3.tell(new SetLocalRouter(mockBroker3.getRef()), mockBroker3.getRef()); + Thread.sleep(1000);// give some time for bucket store data sync + + //find the route in node 3's registry + registry3.tell(new FindRouters(createRouteId()), mockBroker3.getRef()); + FindRoutersReply message = mockBroker3.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class); + List> pairs = message.getRouterWithUpdateTime(); + + validateMultiRouterReceived(pairs, mockBroker1.getRef(), mockBroker2.getRef()); + + } + + private List> findRouters(ActorRef registry, JavaTestKit receivingActor) throws URISyntaxException { + registry.tell(new FindRouters(createRouteId()), receivingActor.getRef()); + FindRoutersReply message = receivingActor.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class); + return message.getRouterWithUpdateTime(); + } + + private void validateMultiRouterReceived(List> actual, ActorRef... expected) { + Assert.assertTrue(actual != null); + Assert.assertTrue(actual.size() == expected.length); + } + + private void validateRouterReceived(List> actual, ActorRef expected){ + Assert.assertTrue(actual != null); + Assert.assertTrue(actual.size() == 1); + + for (Pair pair : actual){ + Assert.assertTrue(expected.path().uid() == pair.first().path().uid()); } - }.get(); // this extracts the received message + } + + private void validateSystemStartup() throws InterruptedException { + + Thread.sleep(5000); + ActorPath gossiper1Path = new ChildActorPath(new ChildActorPath(registry1.path(), "store"), "gossiper"); + ActorPath gossiper2Path = new ChildActorPath(new ChildActorPath(registry2.path(), "store"), "gossiper"); + ActorPath gossiper3Path = new ChildActorPath(new ChildActorPath(registry3.path(), "store"), "gossiper"); + + ActorSelection gossiper1 = node1.actorSelection(gossiper1Path); + ActorSelection gossiper2 = node2.actorSelection(gossiper2Path); + ActorSelection gossiper3 = node3.actorSelection(gossiper3Path); + - Assert.assertTrue(getMsg); + if (!resolveReference(gossiper1, gossiper2, gossiper3)) + Assert.fail("Could not find gossipers"); + } - RemoveRoutedRpc removeMsg = new RemoveRoutedRpc(routeIds, route); - rpcRegistry.tell(removeMsg, getRef()); - expectMsgEquals(duration("2 second"), "Success"); + private Boolean resolveReference(ActorSelection... gossipers) throws InterruptedException { - rpcRegistry.tell(getRpc, getRef()); + Boolean resolved = true; - Boolean getNullMsg = new ExpectMsg("GetRoutedRpcReply") { - protected Boolean match(Object in) { - if (in instanceof GetRoutedRpcReply) { - GetRoutedRpcReply reply = (GetRoutedRpcReply)in; - return reply.getRoutePath() == null; - } else { - throw noMatch(); - } + for (int i=0; i< 5; i++) { + Thread.sleep(1000); + for (ActorSelection gossiper : gossipers) { + Future future = gossiper.resolveOne(new FiniteDuration(5000, TimeUnit.MILLISECONDS)); + + ActorRef ref = null; + try { + ref = Await.result(future, new FiniteDuration(10000, TimeUnit.MILLISECONDS)); + } catch (Exception e) { + e.printStackTrace(); + } + + if (ref == null) + resolved = false; + } + + if (resolved) break; } - }.get(); - Assert.assertTrue(getNullMsg); - }}; + return resolved; + } + + private AddOrUpdateRoutes getAddRouteMessage() throws URISyntaxException { + return new AddOrUpdateRoutes(createRouteIds()); + } + + private RemoveRoutes getRemoveRouteMessage() throws URISyntaxException { + return new RemoveRoutes(createRouteIds()); + } - } + private List> createRouteIds() throws URISyntaxException { + QName type = new QName(new URI("/mockrpc"), "mockrpc"); + List> routeIds = new ArrayList<>(); + routeIds.add(new RouteIdentifierImpl(null, type, null)); + return routeIds; + } -} + private RpcRouter.RouteIdentifier createRouteId() throws URISyntaxException { + QName type = new QName(new URI("/mockrpc"), "mockrpc"); + return new RouteIdentifierImpl(null, type, null); + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStoreTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStoreTest.java new file mode 100644 index 0000000000..7e87da0f99 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStoreTest.java @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; +import akka.testkit.TestActorRef; +import akka.testkit.TestProbe; +import com.typesafe.config.ConfigFactory; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.opendaylight.controller.remote.rpc.TerminationMonitor; + +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; + +public class BucketStoreTest { + + private static ActorSystem system; + private static BucketStore store; + + private BucketStore mockStore; + + @BeforeClass + public static void setup() { + + system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("unit-test")); + system.actorOf(Props.create(TerminationMonitor.class), "termination-monitor"); + + store = createStore(); + } + + @AfterClass + public static void teardown() { + system.shutdown(); + } + + @Before + public void createMocks(){ + mockStore = spy(store); + } + + @After + public void resetMocks(){ + reset(mockStore); + } + + @Test + public void testReceiveUpdateBucket_WhenInputBucketShouldUpdateVersion(){ + Bucket bucket = new BucketImpl(); + Long expectedVersion = bucket.getVersion(); + + mockStore.receiveUpdateBucket(bucket); + + Assert.assertEquals(bucket, mockStore.getLocalBucket()); + Assert.assertEquals(expectedVersion, mockStore.getLocalBucket().getVersion()); + } + + /** + * Create BucketStore actor and returns the underlying instance of BucketStore class. + * + * @return instance of BucketStore class + */ + private static BucketStore createStore(){ + TestProbe mockActor = new TestProbe(system); + ActorRef mockGossiper = mockActor.ref(); + final Props props = Props.create(BucketStore.class, mockGossiper); + final TestActorRef testRef = TestActorRef.create(system, props, "testStore"); + + return testRef.underlyingActor(); + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/GossiperTest.java b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/GossiperTest.java new file mode 100644 index 0000000000..f076c136fe --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/gossip/GossiperTest.java @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.remote.rpc.registry.gossip; + +import akka.actor.ActorSystem; +import akka.actor.Address; +import akka.actor.Props; +import akka.testkit.TestActorRef; +import com.typesafe.config.ConfigFactory; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.opendaylight.controller.remote.rpc.TerminationMonitor; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyMap; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipEnvelope; +import static org.opendaylight.controller.remote.rpc.registry.gossip.Messages.GossiperMessages.GossipStatus; + + +public class GossiperTest { + + private static ActorSystem system; + private static Gossiper gossiper; + + private Gossiper mockGossiper; + + @BeforeClass + public static void setup() throws InterruptedException { + Thread.sleep(1000);//give some time for previous test to stop the system. Netty port conflict arises otherwise. + system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("unit-test")); + system.actorOf(Props.create(TerminationMonitor.class), "termination-monitor"); + + gossiper = createGossiper(); + } + + @AfterClass + public static void teardown() { + if (system != null) + system.shutdown(); + } + + @Before + public void createMocks(){ + mockGossiper = spy(gossiper); + } + + @After + public void resetMocks(){ + reset(mockGossiper); + + } + + @Test + public void testReceiveGossipTick_WhenNoRemoteMemberShouldIgnore(){ + + mockGossiper.setClusterMembers(Collections.EMPTY_LIST); + doNothing().when(mockGossiper).getLocalStatusAndSendTo(any(Address.class)); + mockGossiper.receiveGossipTick(); + verify(mockGossiper, times(0)).getLocalStatusAndSendTo(any(Address.class)); + } + + @Test + public void testReceiveGossipTick_WhenRemoteMemberExistsShouldSendStatus(){ + List
members = new ArrayList<>(); + Address remote = new Address("tcp", "member"); + members.add(remote); + + mockGossiper.setClusterMembers(members); + doNothing().when(mockGossiper).getLocalStatusAndSendTo(any(Address.class)); + mockGossiper.receiveGossipTick(); + verify(mockGossiper, times(1)).getLocalStatusAndSendTo(any(Address.class)); + } + + @Test + public void testReceiveGossipStatus_WhenSenderIsNonMemberShouldIgnore(){ + + Address nonMember = new Address("tcp", "non-member"); + GossipStatus remoteStatus = new GossipStatus(nonMember, mock(Map.class)); + + //add a member + List
members = new ArrayList<>(); + members.add(new Address("tcp", "member")); + + mockGossiper.setClusterMembers(members); + mockGossiper.receiveGossipStatus(remoteStatus); + verify(mockGossiper, times(0)).getSender(); + } + + @Test + public void testReceiveGossip_WhenNotAddressedToSelfShouldIgnore(){ + Address notSelf = new Address("tcp", "not-self"); + + GossipEnvelope envelope = new GossipEnvelope(notSelf, notSelf, mock(Map.class)); + doNothing().when(mockGossiper).updateRemoteBuckets(anyMap()); + mockGossiper.receiveGossip(envelope); + verify(mockGossiper, times(0)).updateRemoteBuckets(anyMap()); + } + + /** + * Create Gossiper actor and return the underlying instance of Gossiper class. + * + * @return instance of Gossiper class + */ + private static Gossiper createGossiper(){ + + final Props props = Props.create(Gossiper.class, false); + final TestActorRef testRef = TestActorRef.create(system, props, "testGossiper"); + + return testRef.underlyingActor(); + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/application.conf b/opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/application.conf new file mode 100644 index 0000000000..874d3fcb90 --- /dev/null +++ b/opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/application.conf @@ -0,0 +1,116 @@ +odl-cluster{ + akka { + loglevel = "INFO" + #log-config-on-start = on + + actor { + provider = "akka.cluster.ClusterActorRefProvider" + debug{ + #autoreceive = on + #lifecycle = on + + } + } + remote { + log-received-messages = on + log-sent-messages = on + + log-remote-lifecycle-events = off + netty.tcp { + hostname = "localhost" + port = 2551 + } + } + + cluster { + seed-nodes = ["akka.tcp://opendaylight-rpc@localhost:2551"] + + auto-down-unreachable-after = 10s + } + } +} +unit-test{ + akka { + loglevel = "INFO" + loggers = ["akka.event.slf4j.Slf4jLogger"] + actor { + provider = "akka.cluster.ClusterActorRefProvider" + } + } +} + +memberA{ + akka { + loglevel = "INFO" + loggers = ["akka.event.slf4j.Slf4jLogger"] + actor { + provider = "akka.cluster.ClusterActorRefProvider" + } + remote { + log-received-messages = off + log-sent-messages = off + + log-remote-lifecycle-events = off + netty.tcp { + hostname = "localhost" + port = 2551 + } + } + + cluster { + seed-nodes = ["akka.tcp://opendaylight-rpc@localhost:2551"] + + auto-down-unreachable-after = 10s + } + } +} +memberB{ + akka { + loglevel = "INFO" + loggers = ["akka.event.slf4j.Slf4jLogger"] + actor { + provider = "akka.cluster.ClusterActorRefProvider" + } + remote { + log-received-messages = off + log-sent-messages = off + + log-remote-lifecycle-events = off + netty.tcp { + hostname = "localhost" + port = 2552 + } + } + + cluster { + seed-nodes = ["akka.tcp://opendaylight-rpc@localhost:2551"] + + auto-down-unreachable-after = 10s + } + } +} +memberC{ + akka { + loglevel = "INFO" + loggers = ["akka.event.slf4j.Slf4jLogger"] + actor { + provider = "akka.cluster.ClusterActorRefProvider" + } + remote { + log-received-messages = off + log-sent-messages = off + + log-remote-lifecycle-events = off + netty.tcp { + hostname = "localhost" + port = 2553 + } + } + + cluster { + seed-nodes = ["akka.tcp://opendaylight-rpc@localhost:2551"] + + auto-down-unreachable-after = 10s + } + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-connector-config/pom.xml b/opendaylight/md-sal/sal-rest-connector-config/pom.xml index 6d050cf425..fa91f0398d 100644 --- a/opendaylight/md-sal/sal-rest-connector-config/pom.xml +++ b/opendaylight/md-sal/sal-rest-connector-config/pom.xml @@ -17,4 +17,30 @@ sal-rest-connector-config Configuration files for sal-rest-connector jar + + + + org.codehaus.mojo + build-helper-maven-plugin + + + attach-artifacts + + attach-artifact + + package + + + + ${project.build.directory}/classes/initial/10-rest-connector.xml + xml + config + + + + + + + + diff --git a/opendaylight/md-sal/sal-rest-connector-config/src/main/resources/initial/10-rest-connector.xml b/opendaylight/md-sal/sal-rest-connector-config/src/main/resources/initial/10-rest-connector.xml index 2fdc8c7d1e..3be423c356 100644 --- a/opendaylight/md-sal/sal-rest-connector-config/src/main/resources/initial/10-rest-connector.xml +++ b/opendaylight/md-sal/sal-rest-connector-config/src/main/resources/initial/10-rest-connector.xml @@ -14,7 +14,7 @@ and is available at http://www.eclipse.org/legal/epl-v10.html rest:rest-connector-impl rest-connector-default-impl - 8181 + 8185 dom:dom-broker-osgi-registry dom-broker @@ -35,4 +35,7 @@ and is available at http://www.eclipse.org/legal/epl-v10.html + + urn:opendaylight:params:xml:ns:yang:controller:md:sal:rest:connector?module=opendaylight-rest-connector&revision=2014-07-24 + diff --git a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/BrokerFacade.java b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/BrokerFacade.java index e8701f37e5..8dbc5b50ee 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/BrokerFacade.java +++ b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/BrokerFacade.java @@ -7,20 +7,12 @@ */ package org.opendaylight.controller.sal.restconf.impl; -import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION; -import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.OPERATIONAL; - import com.google.common.base.Optional; import com.google.common.util.concurrent.CheckedFuture; import com.google.common.util.concurrent.ListenableFuture; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import javax.ws.rs.core.Response.Status; import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; +import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException; import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationException; import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationOperation; @@ -45,6 +37,16 @@ import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.ws.rs.core.Response.Status; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; + +import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION; +import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.OPERATIONAL; + public class BrokerFacade { private final static Logger LOG = LoggerFactory.getLogger(BrokerFacade.class); @@ -255,15 +257,20 @@ public class BrokerFacade { currentArguments.add(currentArg); YangInstanceIdentifier currentPath = YangInstanceIdentifier.create(currentArguments); - final Optional> datastoreData; + final Boolean exists; + try { - datastoreData = rwTx.read(store, currentPath).get(); - } catch (InterruptedException | ExecutionException e) { + + CheckedFuture future = + rwTx.exists(store, currentPath); + exists = future.checkedGet(); + } catch (ReadFailedException e) { LOG.error("Failed to read pre-existing data from store {} path {}", store, currentPath, e); throw new IllegalStateException("Failed to read pre-existing data", e); } - if (!datastoreData.isPresent() && iterator.hasNext()) { + + if (!exists && iterator.hasNext()) { rwTx.merge(store, currentPath, currentOp.createDefault(currentArg)); } } diff --git a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java index 73ca02c505..fac6c80564 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java +++ b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java @@ -632,7 +632,8 @@ public class RestconfImpl implements RestconfService { NormalizedNode data = null; YangInstanceIdentifier normalizedII; if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); data = broker.readConfigurationData(mountPoint, normalizedII); } else { normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier()); @@ -695,7 +696,8 @@ public class RestconfImpl implements RestconfService { NormalizedNode data = null; YangInstanceIdentifier normalizedII; if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); data = broker.readOperationalData(mountPoint, normalizedII); } else { normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier()); @@ -730,7 +732,8 @@ public class RestconfImpl implements RestconfService { try { if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); broker.commitConfigurationDataPut(mountPoint, normalizedII, datastoreNormalizedNode).get(); } else { normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier()); @@ -842,7 +845,8 @@ public class RestconfImpl implements RestconfService { try { if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); broker.commitConfigurationDataPost(mountPoint, normalizedII, datastoreNormalizedData); } else { normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier()); @@ -886,7 +890,8 @@ public class RestconfImpl implements RestconfService { try { if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); broker.commitConfigurationDataPost(mountPoint, normalizedII, datastoreNormalizedData); } else { @@ -908,7 +913,8 @@ public class RestconfImpl implements RestconfService { try { if (mountPoint != null) { - normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData.getInstanceIdentifier()); + normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData + .getInstanceIdentifier()); broker.commitConfigurationDataDelete(mountPoint, normalizedII); } else { normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier()); @@ -1082,7 +1088,12 @@ public class RestconfImpl implements RestconfService { iiBuilder = YangInstanceIdentifier.builder(iiOriginal); } - iiBuilder.node(schemaOfData.getQName()); + if ((schemaOfData instanceof ListSchemaNode)) { + HashMap keys = this.resolveKeysFromData(((ListSchemaNode) schemaOfData), data); + iiBuilder.nodeWithKey(schemaOfData.getQName(), keys); + } else { + iiBuilder.node(schemaOfData.getQName()); + } YangInstanceIdentifier instance = iiBuilder.toInstance(); DOMMountPoint mountPoint = null; @@ -1093,6 +1104,34 @@ public class RestconfImpl implements RestconfService { return new InstanceIdWithSchemaNode(instance, schemaOfData, mountPoint); } + private HashMap resolveKeysFromData(final ListSchemaNode listNode, final CompositeNode dataNode) { + final HashMap keyValues = new HashMap(); + List _keyDefinition = listNode.getKeyDefinition(); + for (final QName key : _keyDefinition) { + SimpleNode head = null; + String localName = key.getLocalName(); + List> simpleNodesByName = dataNode.getSimpleNodesByName(localName); + if (simpleNodesByName != null) { + head = Iterables.getFirst(simpleNodesByName, null); + } + + Object dataNodeKeyValueObject = null; + if (head != null) { + dataNodeKeyValueObject = head.getValue(); + } + + if (dataNodeKeyValueObject == null) { + throw new RestconfDocumentedException("Data contains list \"" + dataNode.getNodeType().getLocalName() + + "\" which does not contain key: \"" + key.getLocalName() + "\"", ErrorType.PROTOCOL, + ErrorTag.INVALID_VALUE); + } + + keyValues.put(key, dataNodeKeyValueObject); + } + + return keyValues; + } + private boolean endsWithMountPoint(final String identifier) { return identifier.endsWith(ControllerContext.MOUNT) || identifier.endsWith(ControllerContext.MOUNT + "/"); } @@ -1431,7 +1470,8 @@ public class RestconfImpl implements RestconfService { "It wasn't possible to correctly interpret data.")); } - private NormalizedNode compositeNodeToDatastoreNormalizedNode(final CompositeNode compNode, final DataSchemaNode schema) { + private NormalizedNode compositeNodeToDatastoreNormalizedNode(final CompositeNode compNode, + final DataSchemaNode schema) { List> lst = new ArrayList>(); lst.add(compNode); if (schema instanceof ContainerSchemaNode) { @@ -1448,7 +1488,8 @@ public class RestconfImpl implements RestconfService { "It wasn't possible to translate specified data to datastore readable form.")); } - private InstanceIdWithSchemaNode normalizeInstanceIdentifierWithSchemaNode(final InstanceIdWithSchemaNode iiWithSchemaNode) { + private InstanceIdWithSchemaNode normalizeInstanceIdentifierWithSchemaNode( + final InstanceIdWithSchemaNode iiWithSchemaNode) { return normalizeInstanceIdentifierWithSchemaNode(iiWithSchemaNode, false); } @@ -1459,8 +1500,8 @@ public class RestconfImpl implements RestconfService { iiWithSchemaNode.getMountPoint()); } - private YangInstanceIdentifier instanceIdentifierToReadableFormForNormalizeNode(final YangInstanceIdentifier instIdentifier, - final boolean unwrapLastListNode) { + private YangInstanceIdentifier instanceIdentifierToReadableFormForNormalizeNode( + final YangInstanceIdentifier instIdentifier, final boolean unwrapLastListNode) { Preconditions.checkNotNull(instIdentifier, "Instance identifier can't be null"); final List result = new ArrayList(); final Iterator iter = instIdentifier.getPathArguments().iterator(); diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/BrokerFacadeTest.java b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/BrokerFacadeTest.java index 146e88299a..6b25830240 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/BrokerFacadeTest.java +++ b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/BrokerFacadeTest.java @@ -8,21 +8,9 @@ package org.opendaylight.controller.sal.restconf.impl.test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertSame; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - import com.google.common.base.Optional; import com.google.common.util.concurrent.CheckedFuture; import com.google.common.util.concurrent.Futures; -import java.util.concurrent.Future; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -55,6 +43,19 @@ import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdent import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode; import org.opendaylight.yangtools.yang.data.impl.schema.Builders; +import java.util.concurrent.Future; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + /** * Unit tests for BrokerFacade. * @@ -111,6 +112,11 @@ public class BrokerFacadeTest { return Futures.immediateCheckedFuture(Optional.> of(dummyNode)); } + private CheckedFuture wrapExistence(Boolean exists) { + return Futures.immediateCheckedFuture(exists); + } + + /** * Value of this node shouldn't be important for testing purposes */ @@ -189,8 +195,14 @@ public class BrokerFacadeTest { CheckedFuture expFuture = mock(CheckedFuture.class); NormalizedNode dummyNode2 = createDummyNode("dummy:namespace2", "2014-07-01", "dummy local name2"); + when(rwTransaction.read(eq(LogicalDatastoreType.CONFIGURATION), any(YangInstanceIdentifier.class))).thenReturn( wrapDummyNode(dummyNode2)); + + when(rwTransaction.exists(eq(LogicalDatastoreType.CONFIGURATION), any(YangInstanceIdentifier.class))).thenReturn( + wrapExistence(true)); + + when(rwTransaction.submit()).thenReturn(expFuture); CheckedFuture actualFuture = brokerFacade.commitConfigurationDataPost( diff --git a/opendaylight/md-sal/sal-rest-docgen/pom.xml b/opendaylight/md-sal/sal-rest-docgen/pom.xml index 0c8b4d5a2a..1141e1d72e 100644 --- a/opendaylight/md-sal/sal-rest-docgen/pom.xml +++ b/opendaylight/md-sal/sal-rest-docgen/pom.xml @@ -96,6 +96,10 @@ mockito-all test + + org.opendaylight.yangtools + yang-data-api + diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGenerator.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGenerator.java index 82409d2e40..633d419fa9 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGenerator.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGenerator.java @@ -7,8 +7,8 @@ */ package org.opendaylight.controller.sal.rest.doc.impl; +import com.google.common.base.Preconditions; import javax.ws.rs.core.UriInfo; - import org.opendaylight.controller.sal.core.api.model.SchemaService; import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration; import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList; @@ -16,11 +16,8 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Preconditions; - /** - * This class gathers all yang defined {@link Module}s and generates Swagger - * compliant documentation. + * This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation. */ public class ApiDocGenerator extends BaseYangSwaggerGenerator { diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/BaseYangSwaggerGenerator.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/BaseYangSwaggerGenerator.java index 5ba8b26bc1..1b27182514 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/BaseYangSwaggerGenerator.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/BaseYangSwaggerGenerator.java @@ -7,6 +7,12 @@ */ package org.opendaylight.controller.sal.rest.doc.impl; +import static org.opendaylight.controller.sal.rest.doc.util.RestDocgenUtil.resolvePathArgumentsName; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule; +import com.google.common.base.Preconditions; import java.io.IOException; import java.net.URI; import java.text.DateFormat; @@ -22,9 +28,7 @@ import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; - import javax.ws.rs.core.UriInfo; - import org.json.JSONException; import org.json.JSONObject; import org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder; @@ -46,11 +50,6 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule; -import com.google.common.base.Preconditions; - public class BaseYangSwaggerGenerator { private static Logger _logger = LoggerFactory.getLogger(BaseYangSwaggerGenerator.class); @@ -75,8 +74,7 @@ public class BaseYangSwaggerGenerator { * @param operType * @return list of modules converted to swagger compliant resource list. */ - public ResourceList getResourceListing(UriInfo uriInfo, SchemaContext schemaContext, - String context) { + public ResourceList getResourceListing(UriInfo uriInfo, SchemaContext schemaContext, String context) { ResourceList resourceList = createResourceList(); @@ -88,11 +86,9 @@ public class BaseYangSwaggerGenerator { for (Module module : modules) { String revisionString = SIMPLE_DATE_FORMAT.format(module.getRevision()); - Resource resource = new Resource(); _logger.debug("Working on [{},{}]...", module.getName(), revisionString); - ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo, - schemaContext, context); + ApiDeclaration doc = getApiDeclaration(module.getName(), revisionString, uriInfo, schemaContext, context); if (doc != null) { resource.setPath(generatePath(uriInfo, module.getName(), revisionString)); @@ -119,8 +115,7 @@ public class BaseYangSwaggerGenerator { return uri.toASCIIString(); } - public ApiDeclaration getApiDeclaration(String module, String revision, UriInfo uriInfo, - SchemaContext schemaContext, String context) { + public ApiDeclaration getApiDeclaration(String module, String revision, UriInfo uriInfo, SchemaContext schemaContext, String context) { Date rev = null; try { rev = SIMPLE_DATE_FORMAT.parse(revision); @@ -128,17 +123,15 @@ public class BaseYangSwaggerGenerator { throw new IllegalArgumentException(e); } Module m = schemaContext.findModuleByName(module, rev); - Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module - + "," + revision); + Preconditions.checkArgument(m != null, "Could not find module by name,revision: " + module + "," + revision); - return getApiDeclaration(m, rev, uriInfo, schemaContext, context); + return getApiDeclaration(m, rev, uriInfo, context, schemaContext); } - public ApiDeclaration getApiDeclaration(Module module, Date revision, UriInfo uriInfo, - SchemaContext schemaContext, String context) { + public ApiDeclaration getApiDeclaration(Module module, Date revision, UriInfo uriInfo, String context, SchemaContext schemaContext) { String basePath = createBasePathFromUriInfo(uriInfo); - ApiDeclaration doc = getSwaggerDocSpec(module, basePath, context); + ApiDeclaration doc = getSwaggerDocSpec(module, basePath, context, schemaContext); if (doc != null) { return doc; } @@ -152,12 +145,12 @@ public class BaseYangSwaggerGenerator { portPart = ":" + port; } String basePath = new StringBuilder(uriInfo.getBaseUri().getScheme()).append("://") - .append(uriInfo.getBaseUri().getHost()).append(portPart).append("/") - .append(RESTCONF_CONTEXT_ROOT).toString(); + .append(uriInfo.getBaseUri().getHost()).append(portPart).append("/").append(RESTCONF_CONTEXT_ROOT) + .toString(); return basePath; } - public ApiDeclaration getSwaggerDocSpec(Module m, String basePath, String context) { + public ApiDeclaration getSwaggerDocSpec(Module m, String basePath, String context, SchemaContext schemaContext) { ApiDeclaration doc = createApiDeclaration(basePath); List apis = new ArrayList(); @@ -167,22 +160,21 @@ public class BaseYangSwaggerGenerator { for (DataSchemaNode node : dataSchemaNodes) { if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) { - _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node - .getQName().getLocalName()); + _logger.debug("Is Configuration node [{}] [{}]", node.isConfiguration(), node.getQName().getLocalName()); List pathParams = new ArrayList(); - String resourcePath = getDataStorePath("/config/", context) + m.getName() + ":"; - addApis(node, apis, resourcePath, pathParams, true); + String resourcePath = getDataStorePath("/config/", context); + addApis(node, apis, resourcePath, pathParams, schemaContext, true); pathParams = new ArrayList(); - resourcePath = getDataStorePath("/operational/", context) + m.getName() + ":"; - addApis(node, apis, resourcePath, pathParams, false); + resourcePath = getDataStorePath("/operational/", context); + addApis(node, apis, resourcePath, pathParams, schemaContext, false); } Set rpcs = m.getRpcs(); for (RpcDefinition rpcDefinition : rpcs) { - String resourcePath = getDataStorePath("/operations/", context) + m.getName() + ":"; - addRpcs(rpcDefinition, apis, resourcePath); + String resourcePath = getDataStorePath("/operations/", context); + addRpcs(rpcDefinition, apis, resourcePath, schemaContext); } } @@ -193,7 +185,7 @@ public class BaseYangSwaggerGenerator { JSONObject models = null; try { - models = jsonConverter.convertToJsonSchema(m); + models = jsonConverter.convertToJsonSchema(m, schemaContext); doc.setModels(models); if (_logger.isDebugEnabled()) { _logger.debug(mapper.writeValueAsString(doc)); @@ -228,13 +220,13 @@ public class BaseYangSwaggerGenerator { return module + "(" + revision + ")"; } - private void addApis(DataSchemaNode node, List apis, String parentPath, - List parentPathParams, boolean addConfigApi) { + private void addApis(DataSchemaNode node, List apis, String parentPath, List parentPathParams, SchemaContext schemaContext, + boolean addConfigApi) { Api api = new Api(); List pathParams = new ArrayList(parentPathParams); - String resourcePath = parentPath + createPath(node, pathParams) + "/"; + String resourcePath = parentPath + createPath(node, pathParams, schemaContext) + "/"; _logger.debug("Adding path: [{}]", resourcePath); api.setPath(resourcePath); api.setOperations(operations(node, pathParams, addConfigApi)); @@ -248,7 +240,7 @@ public class BaseYangSwaggerGenerator { if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) { // keep config and operation attributes separate. if (childNode.isConfiguration() == addConfigApi) { - addApis(childNode, apis, resourcePath, pathParams, addConfigApi); + addApis(childNode, apis, resourcePath, pathParams, schemaContext, addConfigApi); } } } @@ -261,8 +253,7 @@ public class BaseYangSwaggerGenerator { * @param pathParams * @return */ - private List operations(DataSchemaNode node, List pathParams, - boolean isConfig) { + private List operations(DataSchemaNode node, List pathParams, boolean isConfig) { List operations = new ArrayList<>(); OperationBuilder.Get getBuilder = new OperationBuilder.Get(node, isConfig); @@ -281,41 +272,37 @@ public class BaseYangSwaggerGenerator { return operations; } - private String createPath(final DataSchemaNode schemaNode, List pathParams) { + private String createPath(final DataSchemaNode schemaNode, List pathParams, SchemaContext schemaContext) { ArrayList pathListParams = new ArrayList(); StringBuilder path = new StringBuilder(); - QName _qName = schemaNode.getQName(); - String localName = _qName.getLocalName(); + String localName = resolvePathArgumentsName(schemaNode, schemaContext); path.append(localName); if ((schemaNode instanceof ListSchemaNode)) { final List listKeys = ((ListSchemaNode) schemaNode).getKeyDefinition(); for (final QName listKey : listKeys) { - { - DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode) - .getDataChildByName(listKey); - pathListParams.add(((LeafSchemaNode) _dataChildByName)); - - String pathParamIdentifier = new StringBuilder("/{") - .append(listKey.getLocalName()).append("}").toString(); - path.append(pathParamIdentifier); - - Parameter pathParam = new Parameter(); - pathParam.setName(listKey.getLocalName()); - pathParam.setDescription(_dataChildByName.getDescription()); - pathParam.setType("string"); - pathParam.setParamType("path"); - - pathParams.add(pathParam); - } + DataSchemaNode _dataChildByName = ((DataNodeContainer) schemaNode).getDataChildByName(listKey); + pathListParams.add(((LeafSchemaNode) _dataChildByName)); + + String pathParamIdentifier = new StringBuilder("/{").append(listKey.getLocalName()).append("}") + .toString(); + path.append(pathParamIdentifier); + + Parameter pathParam = new Parameter(); + pathParam.setName(listKey.getLocalName()); + pathParam.setDescription(_dataChildByName.getDescription()); + pathParam.setType("string"); + pathParam.setParamType("path"); + + pathParams.add(pathParam); } } return path.toString(); } - protected void addRpcs(RpcDefinition rpcDefn, List apis, String parentPath) { + protected void addRpcs(RpcDefinition rpcDefn, List apis, String parentPath, SchemaContext schemaContext) { Api rpc = new Api(); - String resourcePath = parentPath + rpcDefn.getQName().getLocalName(); + String resourcePath = parentPath + resolvePathArgumentsName(rpcDefn, schemaContext); rpc.setPath(resourcePath); Operation operationSpec = new Operation(); @@ -364,4 +351,5 @@ public class BaseYangSwaggerGenerator { } return sortedModules; } + } diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ModelGenerator.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ModelGenerator.java index 95bb1a0943..819892f647 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ModelGenerator.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ModelGenerator.java @@ -7,6 +7,8 @@ */ package org.opendaylight.controller.sal.rest.doc.impl; +import static org.opendaylight.controller.sal.rest.doc.util.RestDocgenUtil.resolveNodesName; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -19,6 +21,7 @@ import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder; +import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.model.api.AnyXmlSchemaNode; import org.opendaylight.yangtools.yang.model.api.ChoiceCaseNode; import org.opendaylight.yangtools.yang.model.api.ChoiceNode; @@ -31,6 +34,7 @@ import org.opendaylight.yangtools.yang.model.api.LeafSchemaNode; import org.opendaylight.yangtools.yang.model.api.ListSchemaNode; import org.opendaylight.yangtools.yang.model.api.Module; import org.opendaylight.yangtools.yang.model.api.RpcDefinition; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.opendaylight.yangtools.yang.model.api.TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition; @@ -86,8 +90,8 @@ public class ModelGenerator { private static final String NUMBER = "number"; private static final String BOOLEAN = "boolean"; private static final String STRING = "string"; - private static final String ID_KEY = "id"; - private static final String SUB_TYPES_KEY = "subTypes"; + private static final String ID_KEY = "id"; + private static final String SUB_TYPES_KEY = "subTypes"; private static final Map>, String> YANG_TYPE_TO_JSON_TYPE_MAPPING; @@ -111,18 +115,21 @@ public class ModelGenerator { YANG_TYPE_TO_JSON_TYPE_MAPPING = Collections.unmodifiableMap(tempMap1); } + private Module topLevelModule; + public ModelGenerator() { } - public JSONObject convertToJsonSchema(Module module) throws IOException, JSONException { + public JSONObject convertToJsonSchema(Module module, SchemaContext schemaContext) throws IOException, JSONException { JSONObject models = new JSONObject(); - processContainers(module, models); - processRPCs(module, models); - processIdentities(module, models); + topLevelModule = module; + processContainers(module, models, schemaContext); + processRPCs(module, models, schemaContext); + processIdentities(module, models); return models; } - private void processContainers(Module module, JSONObject models) throws IOException, + private void processContainers(Module module, JSONObject models, SchemaContext schemaContext) throws IOException, JSONException { String moduleName = module.getName(); @@ -136,10 +143,10 @@ public class ModelGenerator { * For every container in the module */ if (childNode instanceof ContainerSchemaNode) { - configModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, - true, models, true); - operationalModuleJSON = processContainer((ContainerSchemaNode) childNode, - moduleName, true, models, false); + configModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models, true, + schemaContext); + operationalModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models, + false, schemaContext); } if (configModuleJSON != null) { @@ -157,15 +164,15 @@ public class ModelGenerator { } /** - * Process the RPCs for a Module Spits out a file each of the name - * -input.json and -output.json for each RPC that contains - * input & output elements + * Process the RPCs for a Module Spits out a file each of the name -input.json and -output.json + * for each RPC that contains input & output elements * * @param module * @throws JSONException * @throws IOException */ - private void processRPCs(Module module, JSONObject models) throws JSONException, IOException { + private void processRPCs(Module module, JSONObject models, SchemaContext schemaContext) throws JSONException, + IOException { Set rpcs = module.getRpcs(); String moduleName = module.getName(); @@ -173,7 +180,7 @@ public class ModelGenerator { ContainerSchemaNode input = rpc.getInput(); if (input != null) { - JSONObject inputJSON = processContainer(input, moduleName, true, models); + JSONObject inputJSON = processContainer(input, moduleName, true, models, schemaContext); String filename = "(" + rpc.getQName().getLocalName() + ")input"; inputJSON.put("id", filename); // writeToFile(filename, inputJSON.toString(2), moduleName); @@ -182,7 +189,7 @@ public class ModelGenerator { ContainerSchemaNode output = rpc.getOutput(); if (output != null) { - JSONObject outputJSON = processContainer(output, moduleName, true, models); + JSONObject outputJSON = processContainer(output, moduleName, true, models, schemaContext); String filename = "(" + rpc.getQName().getLocalName() + ")output"; outputJSON.put("id", filename); models.put(filename, outputJSON); @@ -190,58 +197,59 @@ public class ModelGenerator { } } - /** - * Processes the 'identity' statement in a yang model - * and maps it to a 'model' in the Swagger JSON spec. - * - * @param module The module from which the identity stmt will be processed - * @param models The JSONObject in which the parsed identity will be put as a 'model' obj - * @throws JSONException - */ - private void processIdentities(Module module, JSONObject models) throws JSONException { - - String moduleName = module.getName(); - Set idNodes = module.getIdentities(); - _logger.debug("Processing Identities for module {} . Found {} identity statements", moduleName, idNodes.size()); - - for(IdentitySchemaNode idNode : idNodes){ - JSONObject identityObj=new JSONObject(); - String identityName = idNode.getQName().getLocalName(); - _logger.debug("Processing Identity: {}", identityName); - - identityObj.put(ID_KEY, identityName); - identityObj.put(DESCRIPTION_KEY, idNode.getDescription()); - - JSONObject props = new JSONObject(); - IdentitySchemaNode baseId = idNode.getBaseIdentity(); + /** + * Processes the 'identity' statement in a yang model and maps it to a 'model' in the Swagger JSON spec. + * + * @param module + * The module from which the identity stmt will be processed + * @param models + * The JSONObject in which the parsed identity will be put as a 'model' obj + * @throws JSONException + */ + private void processIdentities(Module module, JSONObject models) throws JSONException { + String moduleName = module.getName(); + Set idNodes = module.getIdentities(); + _logger.debug("Processing Identities for module {} . Found {} identity statements", moduleName, idNodes.size()); + + for (IdentitySchemaNode idNode : idNodes) { + JSONObject identityObj = new JSONObject(); + String identityName = idNode.getQName().getLocalName(); + _logger.debug("Processing Identity: {}", identityName); + + identityObj.put(ID_KEY, identityName); + identityObj.put(DESCRIPTION_KEY, idNode.getDescription()); + + JSONObject props = new JSONObject(); + IdentitySchemaNode baseId = idNode.getBaseIdentity(); + + if (baseId == null) { + /** + * This is a base identity. So lets see if it has sub types. If it does, then add them to the model + * definition. + */ + Set derivedIds = idNode.getDerivedIdentities(); + + if (derivedIds != null) { + JSONArray subTypes = new JSONArray(); + for (IdentitySchemaNode derivedId : derivedIds) { + subTypes.put(derivedId.getQName().getLocalName()); + } + identityObj.put(SUB_TYPES_KEY, subTypes); + } + } else { + /** + * This is a derived entity. Add it's base type & move on. + */ + props.put(TYPE_KEY, baseId.getQName().getLocalName()); + } - if(baseId==null) { - /** - * This is a base identity. So lets see if - * it has sub types. If it does, then add them to the model definition. - */ - Set derivedIds = idNode.getDerivedIdentities(); - - if(derivedIds != null) { - JSONArray subTypes = new JSONArray(); - for(IdentitySchemaNode derivedId : derivedIds){ - subTypes.put(derivedId.getQName().getLocalName()); - } - identityObj.put(SUB_TYPES_KEY, subTypes); + // Add the properties. For a base type, this will be an empty object as required by the Swagger spec. + identityObj.put(PROPERTIES_KEY, props); + models.put(identityName, identityObj); } - } else { - /** - * This is a derived entity. Add it's base type & move on. - */ - props.put(TYPE_KEY, baseId.getQName().getLocalName()); - } - - //Add the properties. For a base type, this will be an empty object as required by the Swagger spec. - identityObj.put(PROPERTIES_KEY, props); - models.put(identityName, identityObj); } - } + /** * Processes the container node and populates the moduleJSON * @@ -251,14 +259,13 @@ public class ModelGenerator { * @throws JSONException * @throws IOException */ - private JSONObject processContainer(ContainerSchemaNode container, String moduleName, - boolean addSchemaStmt, JSONObject models) throws JSONException, IOException { - return processContainer(container, moduleName, addSchemaStmt, models, (Boolean) null); + private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt, + JSONObject models, SchemaContext schemaContext) throws JSONException, IOException { + return processContainer(container, moduleName, addSchemaStmt, models, (Boolean) null, schemaContext); } - private JSONObject processContainer(ContainerSchemaNode container, String moduleName, - boolean addSchemaStmt, JSONObject models, Boolean isConfig) throws JSONException, - IOException { + private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt, + JSONObject models, Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException { JSONObject moduleJSON = getSchemaTemplate(); if (addSchemaStmt) { moduleJSON = getSchemaTemplate(); @@ -270,57 +277,58 @@ public class ModelGenerator { String containerDescription = container.getDescription(); moduleJSON.put(DESCRIPTION_KEY, containerDescription); - JSONObject properties = processChildren(container.getChildNodes(), moduleName, models, isConfig); + JSONObject properties = processChildren(container.getChildNodes(), container.getQName(), moduleName, models, + isConfig, schemaContext); moduleJSON.put(PROPERTIES_KEY, properties); return moduleJSON; } - private JSONObject processChildren(Iterable nodes, String moduleName, - JSONObject models) throws JSONException, IOException { - return processChildren(nodes, moduleName, models, null); + private JSONObject processChildren(Iterable nodes, QName parentQName, String moduleName, + JSONObject models, SchemaContext schemaContext) throws JSONException, IOException { + return processChildren(nodes, parentQName, moduleName, models, null, schemaContext); } /** * Processes the nodes * * @param nodes + * @param parentQName * @param moduleName * @param isConfig * @return * @throws JSONException * @throws IOException */ - private JSONObject processChildren(Iterable nodes, String moduleName, - JSONObject models, Boolean isConfig) throws JSONException, IOException { + private JSONObject processChildren(Iterable nodes, QName parentQName, String moduleName, + JSONObject models, Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException { JSONObject properties = new JSONObject(); for (DataSchemaNode node : nodes) { if (isConfig == null || node.isConfiguration() == isConfig) { - String name = node.getQName().getLocalName(); + String name = resolveNodesName(node, topLevelModule, schemaContext); JSONObject property = null; if (node instanceof LeafSchemaNode) { property = processLeafNode((LeafSchemaNode) node); } else if (node instanceof ListSchemaNode) { - property = processListSchemaNode((ListSchemaNode) node, moduleName, models, isConfig); + property = processListSchemaNode((ListSchemaNode) node, moduleName, models, isConfig, schemaContext); } else if (node instanceof LeafListSchemaNode) { property = processLeafListNode((LeafListSchemaNode) node); } else if (node instanceof ChoiceNode) { - property = processChoiceNode((ChoiceNode) node, moduleName, models); + property = processChoiceNode((ChoiceNode) node, moduleName, models, schemaContext); } else if (node instanceof AnyXmlSchemaNode) { property = processAnyXMLNode((AnyXmlSchemaNode) node); } else if (node instanceof ContainerSchemaNode) { - property = processContainer((ContainerSchemaNode) node, moduleName, false, - models, isConfig); + property = processContainer((ContainerSchemaNode) node, moduleName, false, models, isConfig, + schemaContext); } else { - throw new IllegalArgumentException("Unknown DataSchemaNode type: " - + node.getClass()); + throw new IllegalArgumentException("Unknown DataSchemaNode type: " + node.getClass()); } property.putOpt(DESCRIPTION_KEY, node.getDescription()); @@ -356,15 +364,16 @@ public class ModelGenerator { * @throws JSONException * @throws IOException */ - private JSONObject processChoiceNode(ChoiceNode choiceNode, String moduleName, JSONObject models) - throws JSONException, IOException { + private JSONObject processChoiceNode(ChoiceNode choiceNode, String moduleName, JSONObject models, + SchemaContext schemaContext) throws JSONException, IOException { Set cases = choiceNode.getCases(); JSONArray choiceProps = new JSONArray(); for (ChoiceCaseNode choiceCase : cases) { String choiceName = choiceCase.getQName().getLocalName(); - JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), moduleName, models); + JSONObject choiceProp = processChildren(choiceCase.getChildNodes(), choiceCase.getQName(), moduleName, + models, schemaContext); JSONObject choiceObj = new JSONObject(); choiceObj.put(choiceName, choiceProp); choiceObj.put(TYPE_KEY, OBJECT_TYPE); @@ -384,8 +393,7 @@ public class ModelGenerator { * @param props * @throws JSONException */ - private void processConstraints(ConstraintDefinition constraints, JSONObject props) - throws JSONException { + private void processConstraints(ConstraintDefinition constraints, JSONObject props) throws JSONException { boolean isMandatory = constraints.isMandatory(); props.put(REQUIRED_KEY, isMandatory); @@ -402,9 +410,8 @@ public class ModelGenerator { /** * Parses a ListSchema node. * - * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in - * a separate JSON schema file. Hence, we have to write some properties to a - * new file, while continuing to process the rest. + * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence, we + * have to write some properties to a new file, while continuing to process the rest. * * @param listNode * @param moduleName @@ -413,21 +420,21 @@ public class ModelGenerator { * @throws JSONException * @throws IOException */ - private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, - JSONObject models, Boolean isConfig) throws JSONException, IOException { + private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models, + Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException { - String fileName = (BooleanUtils.isNotFalse(isConfig)?OperationBuilder.CONFIG:OperationBuilder.OPERATIONAL) + - listNode.getQName().getLocalName(); + String fileName = (BooleanUtils.isNotFalse(isConfig) ? OperationBuilder.CONFIG : OperationBuilder.OPERATIONAL) + + listNode.getQName().getLocalName(); - JSONObject childSchemaProperties = processChildren(listNode.getChildNodes(), moduleName, models); + JSONObject childSchemaProperties = processChildren(listNode.getChildNodes(), listNode.getQName(), moduleName, + models, schemaContext); JSONObject childSchema = getSchemaTemplate(); childSchema.put(TYPE_KEY, OBJECT_TYPE); childSchema.put(PROPERTIES_KEY, childSchemaProperties); /* - * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must - * be in a separate JSON schema file. Hence, we have to write some - * properties to a new file, while continuing to process the rest. + * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence, + * we have to write some properties to a new file, while continuing to process the rest. */ // writeToFile(fileName, childSchema.toString(2), moduleName); childSchema.put("id", fileName); @@ -483,8 +490,7 @@ public class ModelGenerator { * @param property * @throws JSONException */ - private void processTypeDef(TypeDefinition leafTypeDef, JSONObject property) - throws JSONException { + private void processTypeDef(TypeDefinition leafTypeDef, JSONObject property) throws JSONException { if (leafTypeDef instanceof ExtendedType) { processExtendedType(leafTypeDef, property); @@ -498,7 +504,7 @@ public class ModelGenerator { processUnionType((UnionTypeDefinition) leafTypeDef, property); } else if (leafTypeDef instanceof IdentityrefTypeDefinition) { - property.putOpt(TYPE_KEY, ((IdentityrefTypeDefinition) leafTypeDef).getIdentity().getQName().getLocalName()); + property.putOpt(TYPE_KEY, ((IdentityrefTypeDefinition) leafTypeDef).getIdentity().getQName().getLocalName()); } else if (leafTypeDef instanceof BinaryTypeDefinition) { processBinaryType((BinaryTypeDefinition) leafTypeDef, property); } else { @@ -517,15 +523,13 @@ public class ModelGenerator { * @param property * @throws JSONException */ - private void processExtendedType(TypeDefinition leafTypeDef, JSONObject property) - throws JSONException { + private void processExtendedType(TypeDefinition leafTypeDef, JSONObject property) throws JSONException { Object leafBaseType = leafTypeDef.getBaseType(); if (leafBaseType instanceof ExtendedType) { // recursively process an extended type until we hit a base type processExtendedType((TypeDefinition) leafBaseType, property); } else { - List lengthConstraints = ((ExtendedType) leafTypeDef) - .getLengthConstraints(); + List lengthConstraints = ((ExtendedType) leafTypeDef).getLengthConstraints(); for (LengthConstraint lengthConstraint : lengthConstraints) { Number min = lengthConstraint.getMin(); Number max = lengthConstraint.getMax(); @@ -541,8 +545,7 @@ public class ModelGenerator { /* * */ - private void processBinaryType(BinaryTypeDefinition binaryType, JSONObject property) - throws JSONException { + private void processBinaryType(BinaryTypeDefinition binaryType, JSONObject property) throws JSONException { property.put(TYPE_KEY, STRING); JSONObject media = new JSONObject(); media.put(BINARY_ENCODING_KEY, BASE_64); @@ -555,8 +558,7 @@ public class ModelGenerator { * @param property * @throws JSONException */ - private void processEnumType(EnumerationType enumLeafType, JSONObject property) - throws JSONException { + private void processEnumType(EnumerationType enumLeafType, JSONObject property) throws JSONException { List enumPairs = enumLeafType.getValues(); List enumNames = new ArrayList(); for (EnumPair enumPair : enumPairs) { @@ -571,8 +573,7 @@ public class ModelGenerator { * @param property * @throws JSONException */ - private void processBitsType(BitsTypeDefinition bitsType, JSONObject property) - throws JSONException { + private void processBitsType(BitsTypeDefinition bitsType, JSONObject property) throws JSONException { property.put(TYPE_KEY, ARRAY_TYPE); property.put(MIN_ITEMS, 0); property.put(UNIQUE_ITEMS_KEY, true); @@ -593,18 +594,17 @@ public class ModelGenerator { * @param property * @throws JSONException */ - private void processUnionType(UnionTypeDefinition unionType, JSONObject property) - throws JSONException { + private void processUnionType(UnionTypeDefinition unionType, JSONObject property) throws JSONException { StringBuilder type = new StringBuilder(); - for (TypeDefinition typeDef : unionType.getTypes() ) { - if( type.length() > 0 ){ - type.append( " or " ); + for (TypeDefinition typeDef : unionType.getTypes()) { + if (type.length() > 0) { + type.append(" or "); } type.append(YANG_TYPE_TO_JSON_TYPE_MAPPING.get(typeDef.getClass())); } - property.put(TYPE_KEY, type ); + property.put(TYPE_KEY, type); } /** @@ -619,4 +619,5 @@ public class ModelGenerator { return schemaJSON; } + } diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/mountpoints/MountPointSwagger.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/mountpoints/MountPointSwagger.java index 29ada12c6f..7e8707110f 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/mountpoints/MountPointSwagger.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/mountpoints/MountPointSwagger.java @@ -17,9 +17,7 @@ import java.util.Map.Entry; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; - import javax.ws.rs.core.UriInfo; - import org.opendaylight.controller.sal.core.api.model.SchemaService; import org.opendaylight.controller.sal.core.api.mount.MountProvisionInstance; import org.opendaylight.controller.sal.core.api.mount.MountProvisionService; diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/util/RestDocgenUtil.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/util/RestDocgenUtil.java new file mode 100644 index 0000000000..9e1d82ae05 --- /dev/null +++ b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/util/RestDocgenUtil.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.sal.rest.doc.util; + +import java.net.URI; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import org.opendaylight.yangtools.yang.common.QName; +import org.opendaylight.yangtools.yang.model.api.Module; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.model.api.SchemaNode; + +public class RestDocgenUtil { + + private RestDocgenUtil() { + } + + private static Map> namespaceAndRevisionToModule = new HashMap>(); + + /** + * Resolve path argument name for {@code node}. + * + * The name can contain also prefix which consists of module name followed by colon. The module prefix is presented + * if namespace of {@code node} and its parent is different. In other cases only name of {@code node} is returned. + * + * @return name of {@code node} + */ + public static String resolvePathArgumentsName(final SchemaNode node, final SchemaContext schemaContext) { + Iterable schemaPath = node.getPath().getPathTowardsRoot(); + Iterator it = schemaPath.iterator(); + QName nodeQName = it.next(); + + QName parentQName = null; + if (it.hasNext()) { + parentQName = it.next(); + } + if (isEqualNamespaceAndRevision(parentQName, nodeQName)) { + return node.getQName().getLocalName(); + } else { + return resolveFullNameFromNode(node, schemaContext); + } + } + + private synchronized static String resolveFullNameFromNode(final SchemaNode node, final SchemaContext schemaContext) { + final URI namespace = node.getQName().getNamespace(); + final Date revision = node.getQName().getRevision(); + + Map revisionToModule = namespaceAndRevisionToModule.get(namespace); + if (revisionToModule == null) { + revisionToModule = new HashMap<>(); + namespaceAndRevisionToModule.put(namespace, revisionToModule); + } + Module module = revisionToModule.get(revision); + if (module == null) { + module = schemaContext.findModuleByNamespaceAndRevision(namespace, revision); + revisionToModule.put(revision, module); + } + if (module != null) { + return module.getName() + ":" + node.getQName().getLocalName(); + } + return node.getQName().getLocalName(); + } + + public static String resolveNodesName(final SchemaNode node, final Module module, final SchemaContext schemaContext) { + if (node.getQName().getNamespace().equals(module.getQNameModule().getNamespace()) + && node.getQName().getRevision().equals(module.getQNameModule().getRevision())) { + return node.getQName().getLocalName(); + } else { + return resolveFullNameFromNode(node, schemaContext); + } + } + + private static boolean isEqualNamespaceAndRevision(QName parentQName, QName nodeQName) { + if (parentQName == null) { + if (nodeQName == null) { + return true; + } + return false; + } + return parentQName.getNamespace().equals(nodeQName.getNamespace()) + && parentQName.getRevision().equals(nodeQName.getRevision()); + } +} diff --git a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGeneratorTest.java b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGeneratorTest.java index 07c9378439..19f82b5386 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGeneratorTest.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGeneratorTest.java @@ -5,16 +5,17 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.common.base.Preconditions; import java.io.File; import java.util.Arrays; +import java.util.HashSet; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; - import javax.ws.rs.core.UriInfo; - import junit.framework.Assert; - +import org.json.JSONException; +import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -25,8 +26,8 @@ import org.opendaylight.controller.sal.rest.doc.swagger.Operation; import org.opendaylight.controller.sal.rest.doc.swagger.Resource; import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList; import org.opendaylight.yangtools.yang.model.api.Module; - -import com.google.common.base.Preconditions; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; /** * @@ -36,12 +37,14 @@ public class ApiDocGeneratorTest { public static final String HTTP_HOST = "http://host"; private ApiDocGenerator generator; private DocGenTestHelper helper; + private SchemaContext schemaContext; @Before public void setUp() throws Exception { generator = new ApiDocGenerator(); helper = new DocGenTestHelper(); helper.setUp(); + schemaContext = new YangParserImpl().resolveSchemaContext(new HashSet(helper.getModules().values())); } @After @@ -59,8 +62,9 @@ public class ApiDocGeneratorTest { for (Entry m : helper.getModules().entrySet()) { if (m.getKey().getAbsolutePath().endsWith("toaster_short.yang")) { ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), - "http://localhost:8080/restconf", ""); + "http://localhost:8080/restconf", "",schemaContext); validateToaster(doc); + validateTosterDocContainsModulePrefixes(doc); Assert.assertNotNull(doc); } } @@ -73,7 +77,7 @@ public class ApiDocGeneratorTest { for (Entry m : helper.getModules().entrySet()) { if (m.getKey().getAbsolutePath().endsWith("toaster.yang")) { ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), - "http://localhost:8080/restconf", ""); + "http://localhost:8080/restconf", "",schemaContext); Assert.assertNotNull(doc); //testing bugs.opendaylight.org bug 1290. UnionType model type. @@ -84,11 +88,21 @@ public class ApiDocGeneratorTest { } } + /** + * Tests whether from yang files are generated all required paths for HTTP operations (GET, DELETE, PUT, POST) + * + * If container | list is augmented then in path there should be specified module name followed with collon (e. g. + * "/config/module1:element1/element2/module2:element3") + * + * @param doc + * @throws Exception + */ private void validateToaster(ApiDeclaration doc) throws Exception { Set expectedUrls = new TreeSet<>(Arrays.asList(new String[] { "/config/toaster2:toaster/", "/operational/toaster2:toaster/", "/operations/toaster2:cancel-toast", "/operations/toaster2:make-toast", - "/operations/toaster2:restock-toaster" })); + "/operations/toaster2:restock-toaster", + "/config/toaster2:toaster/toasterSlot/{slotId}/toaster-augmented:slotInfo/" })); Set actualUrls = new TreeSet<>(); @@ -130,7 +144,7 @@ public class ApiDocGeneratorTest { @Test public void testGetResourceListing() throws Exception { UriInfo info = helper.createMockUriInfo(HTTP_HOST); - SchemaService mockSchemaService = helper.createMockSchemaService(); + SchemaService mockSchemaService = helper.createMockSchemaService(schemaContext); generator.setSchemaService(mockSchemaService); @@ -154,4 +168,30 @@ public class ApiDocGeneratorTest { assertEquals(HTTP_HOST + "/toaster2(2009-11-20)", toaster2.getPath()); } + private void validateTosterDocContainsModulePrefixes(ApiDeclaration doc) { + JSONObject topLevelJson = doc.getModels(); + try { + JSONObject configToaster = topLevelJson.getJSONObject("(config)toaster"); + assertNotNull("(config)toaster JSON object missing", configToaster); + //without module prefix + containsProperties(configToaster, "toasterSlot"); + + JSONObject toasterSlot = topLevelJson.getJSONObject("(config)toasterSlot"); + assertNotNull("(config)toasterSlot JSON object missing", toasterSlot); + //with module prefix + containsProperties(toasterSlot, "toaster-augmented:slotInfo"); + + } catch (JSONException e) { + fail("Json exception while reading JSON object. Original message "+e.getMessage()); + } + } + + private void containsProperties(final JSONObject jsonObject,final String...properties) throws JSONException { + for (String property : properties) { + JSONObject propertiesObject = jsonObject.getJSONObject("properties"); + assertNotNull("Properties object missing in ", propertiesObject); + JSONObject concretePropertyObject = propertiesObject.getJSONObject(property); + assertNotNull(property + " is missing",concretePropertyObject); + } + } } diff --git a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/DocGenTestHelper.java b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/DocGenTestHelper.java index 0f15d00e79..7701d2a735 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/DocGenTestHelper.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/DocGenTestHelper.java @@ -10,6 +10,9 @@ package org.opendaylight.controller.sal.rest.doc.impl; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule; import java.io.File; import java.io.FileNotFoundException; import java.net.URI; @@ -19,23 +22,17 @@ import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; - import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; - import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.opendaylight.controller.sal.core.api.model.SchemaService; import org.opendaylight.yangtools.yang.model.api.Module; import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import org.opendaylight.yangtools.yang.model.parser.api.YangModelParser; +import org.opendaylight.yangtools.yang.model.parser.api.YangContextParser; import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule; - public class DocGenTestHelper { private Map modules; @@ -45,7 +42,7 @@ public class DocGenTestHelper { URISyntaxException { URI resourceDirUri = getClass().getResource(resourceDirectory).toURI(); - final YangModelParser parser = new YangParserImpl(); + final YangContextParser parser = new YangParserImpl(); final File testDir = new File(resourceDirUri); final String[] fileList = testDir.list(); final List testFiles = new ArrayList<>(); @@ -90,6 +87,7 @@ public class DocGenTestHelper { final ArgumentCaptor moduleCapture = ArgumentCaptor.forClass(String.class); final ArgumentCaptor dateCapture = ArgumentCaptor.forClass(Date.class); + final ArgumentCaptor namespaceCapture = ArgumentCaptor.forClass(URI.class); when(mockContext.findModuleByName(moduleCapture.capture(), dateCapture.capture())).then( new Answer() { @Override @@ -104,6 +102,20 @@ public class DocGenTestHelper { return null; } }); + when(mockContext.findModuleByNamespaceAndRevision(namespaceCapture.capture(), dateCapture.capture())).then( + new Answer() { + @Override + public Module answer(InvocationOnMock invocation) throws Throwable { + URI namespace = namespaceCapture.getValue(); + Date date = dateCapture.getValue(); + for (Module m : modules.values()) { + if (m.getNamespace().equals(namespace) && m.getRevision().equals(date)) { + return m; + } + } + return null; + } + }); return mockContext; } diff --git a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/MountPointSwaggerTest.java b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/MountPointSwaggerTest.java index bba8ed9ca6..940b99fd99 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/MountPointSwaggerTest.java +++ b/opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/MountPointSwaggerTest.java @@ -14,12 +14,11 @@ import static org.mockito.Mockito.when; import java.net.URISyntaxException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.TreeSet; - import javax.ws.rs.core.UriInfo; - import org.junit.Before; import org.junit.Test; import org.opendaylight.controller.sal.core.api.model.SchemaService; @@ -33,7 +32,9 @@ import org.opendaylight.controller.sal.rest.doc.swagger.Resource; import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; +import org.opendaylight.yangtools.yang.model.api.Module; import org.opendaylight.yangtools.yang.model.api.SchemaContext; +import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; public class MountPointSwaggerTest { @@ -44,12 +45,14 @@ public class MountPointSwaggerTest { private static final String INSTANCE_URL = "nodes/node/123/"; private MountPointSwagger swagger; private DocGenTestHelper helper; + private SchemaContext schemaContext; @Before public void setUp() throws Exception { swagger = new MountPointSwagger(); helper = new DocGenTestHelper(); helper.setUp(); + schemaContext = new YangParserImpl().resolveSchemaContext(new HashSet(helper.getModules().values())); } @Test() diff --git a/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_augmented.yang b/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_augmented.yang new file mode 100644 index 0000000000..4db7897a99 --- /dev/null +++ b/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_augmented.yang @@ -0,0 +1,21 @@ +module toaster-augmented { + + yang-version 1; + + namespace + "http://netconfcentral.org/ns/toaster/augmented"; + + prefix toast; + import toaster2 {prefix tst; revision-date 2009-11-20;} + + revision "2014-7-14" { + } + + augment "/tst:toaster/tst:toasterSlot" { + container slotInfo { + leaf numberOfToastPrepared { + type uint32; + } + } + } +} \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_short.yang b/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_short.yang index a1d5ab0a12..6884076d5d 100644 --- a/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_short.yang +++ b/opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_short.yang @@ -83,6 +83,13 @@ Microsoft Toaster."; } + list toasterSlot { + key "slotId"; + leaf slotId { + type string; + } + } + leaf toasterModelNumber { type DisplayString; config false; diff --git a/opendaylight/md-sal/topology-lldp-discovery/pom.xml b/opendaylight/md-sal/topology-lldp-discovery/pom.xml index e01a0d5dcb..97ed15df19 100644 --- a/opendaylight/md-sal/topology-lldp-discovery/pom.xml +++ b/opendaylight/md-sal/topology-lldp-discovery/pom.xml @@ -35,11 +35,11 @@ org.opendaylight.controller - sal + sal-binding-api org.opendaylight.controller - sal-binding-api + liblldp org.opendaylight.controller.model diff --git a/opendaylight/md-sal/topology-lldp-discovery/src/main/java/org/opendaylight/md/controller/topology/lldp/utils/LLDPDiscoveryUtils.java b/opendaylight/md-sal/topology-lldp-discovery/src/main/java/org/opendaylight/md/controller/topology/lldp/utils/LLDPDiscoveryUtils.java index 82ab443246..0d1ba11ee1 100644 --- a/opendaylight/md-sal/topology-lldp-discovery/src/main/java/org/opendaylight/md/controller/topology/lldp/utils/LLDPDiscoveryUtils.java +++ b/opendaylight/md-sal/topology-lldp-discovery/src/main/java/org/opendaylight/md/controller/topology/lldp/utils/LLDPDiscoveryUtils.java @@ -9,10 +9,10 @@ package org.opendaylight.md.controller.topology.lldp.utils; import java.nio.charset.Charset; -import org.opendaylight.controller.sal.packet.Ethernet; -import org.opendaylight.controller.sal.packet.LLDP; -import org.opendaylight.controller.sal.packet.LLDPTLV; -import org.opendaylight.controller.sal.utils.NetUtils; +import org.opendaylight.controller.liblldp.Ethernet; +import org.opendaylight.controller.liblldp.LLDP; +import org.opendaylight.controller.liblldp.LLDPTLV; +import org.opendaylight.controller.liblldp.NetUtils; import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorId; import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef; import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeId; diff --git a/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusher.java b/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusherImpl.java similarity index 85% rename from opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusher.java rename to opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusherImpl.java index fff8d611b7..5f311b5232 100644 --- a/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusher.java +++ b/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/ConfigPusherImpl.java @@ -10,9 +10,6 @@ package org.opendaylight.controller.netconf.persist.impl; import static com.google.common.base.Preconditions.checkNotNull; -import com.google.common.base.Function; -import com.google.common.base.Stopwatch; -import com.google.common.collect.Collections2; import java.io.IOException; import java.io.InputStream; import java.util.Collection; @@ -23,10 +20,17 @@ import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; + import javax.annotation.concurrent.Immutable; +import javax.management.MBeanServerConnection; + import org.opendaylight.controller.config.api.ConflictingVersionException; +import org.opendaylight.controller.config.persist.api.ConfigPusher; import org.opendaylight.controller.config.persist.api.ConfigSnapshotHolder; +import org.opendaylight.controller.config.persist.api.Persister; import org.opendaylight.controller.netconf.api.NetconfDocumentedException; import org.opendaylight.controller.netconf.api.NetconfMessage; import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants; @@ -45,22 +49,60 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.SAXException; +import com.google.common.base.Function; +import com.google.common.base.Stopwatch; +import com.google.common.collect.Collections2; + @Immutable -public class ConfigPusher { - private static final Logger logger = LoggerFactory.getLogger(ConfigPusher.class); +public class ConfigPusherImpl implements ConfigPusher { + private static final Logger logger = LoggerFactory.getLogger(ConfigPusherImpl.class); private final long maxWaitForCapabilitiesMillis; private final long conflictingVersionTimeoutMillis; private final NetconfOperationServiceFactory configNetconfConnector; + private static final int QUEUE_SIZE = 100; + private BlockingQueue> queue = new LinkedBlockingQueue>(QUEUE_SIZE); - public ConfigPusher(NetconfOperationServiceFactory configNetconfConnector, long maxWaitForCapabilitiesMillis, + public ConfigPusherImpl(NetconfOperationServiceFactory configNetconfConnector, long maxWaitForCapabilitiesMillis, long conflictingVersionTimeoutMillis) { this.configNetconfConnector = configNetconfConnector; this.maxWaitForCapabilitiesMillis = maxWaitForCapabilitiesMillis; this.conflictingVersionTimeoutMillis = conflictingVersionTimeoutMillis; } - public synchronized LinkedHashMap pushConfigs(List configs) throws NetconfDocumentedException { + public void process(List autoCloseables, MBeanServerConnection platformMBeanServer, Persister persisterAggregator) throws InterruptedException { + List configs; + while(true) { + configs = queue.take(); + try { + internalPushConfigs(configs); + ConfigPersisterNotificationHandler jmxNotificationHandler = new ConfigPersisterNotificationHandler(platformMBeanServer, persisterAggregator); + synchronized (autoCloseables) { + autoCloseables.add(jmxNotificationHandler); + } + /* + * We have completed initial configuration. At this point + * it is good idea to perform garbage collection to prune + * any garbage we have accumulated during startup. + */ + logger.debug("Running post-initialization garbage collection..."); + System.gc(); + logger.debug("Post-initialization garbage collection completed."); + logger.debug("ConfigPusher has pushed configs {}, gc completed", configs); + } + catch (NetconfDocumentedException e) { + logger.error("Error pushing configs {}",configs); + throw new IllegalStateException(e); + } + } + } + + public void pushConfigs(List configs) throws InterruptedException { + logger.debug("Requested to push configs {}", configs); + this.queue.put(configs); + } + + private LinkedHashMap internalPushConfigs(List configs) throws NetconfDocumentedException { logger.debug("Last config snapshots to be pushed to netconf: {}", configs); LinkedHashMap result = new LinkedHashMap<>(); // start pushing snapshots: @@ -278,7 +320,7 @@ public class ConfigPusher { private static NetconfMessage getCommitMessage() { String resource = "/netconfOp/commit.xml"; - try (InputStream stream = ConfigPusher.class.getResourceAsStream(resource)) { + try (InputStream stream = ConfigPusherImpl.class.getResourceAsStream(resource)) { checkNotNull(stream, "Unable to load resource " + resource); return new NetconfMessage(XmlUtil.readXmlToDocument(stream)); } catch (SAXException | IOException e) { diff --git a/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/osgi/ConfigPersisterActivator.java b/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/osgi/ConfigPersisterActivator.java index 48ae0cb91a..0a48e6c67d 100644 --- a/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/osgi/ConfigPersisterActivator.java +++ b/opendaylight/netconf/config-persister-impl/src/main/java/org/opendaylight/controller/netconf/persist/impl/osgi/ConfigPersisterActivator.java @@ -8,13 +8,18 @@ package org.opendaylight.controller.netconf.persist.impl.osgi; -import com.google.common.annotations.VisibleForTesting; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.management.MBeanServer; + +import org.opendaylight.controller.config.persist.api.ConfigPusher; import org.opendaylight.controller.config.persist.api.ConfigSnapshotHolder; -import org.opendaylight.controller.netconf.api.NetconfDocumentedException; import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider; import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory; -import org.opendaylight.controller.netconf.persist.impl.ConfigPersisterNotificationHandler; -import org.opendaylight.controller.netconf.persist.impl.ConfigPusher; +import org.opendaylight.controller.netconf.persist.impl.ConfigPusherImpl; import org.opendaylight.controller.netconf.persist.impl.PersisterAggregator; import org.opendaylight.controller.netconf.util.CloseableUtil; import org.osgi.framework.BundleActivator; @@ -23,16 +28,13 @@ import org.osgi.framework.Constants; import org.osgi.framework.Filter; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; +import org.osgi.framework.ServiceRegistration; import org.osgi.util.tracker.ServiceTracker; import org.osgi.util.tracker.ServiceTrackerCustomizer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.management.MBeanServer; -import java.lang.management.ManagementFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; +import com.google.common.annotations.VisibleForTesting; public class ConfigPersisterActivator implements BundleActivator { @@ -49,11 +51,15 @@ public class ConfigPersisterActivator implements BundleActivator { public static final String STORAGE_ADAPTER_CLASS_PROP_SUFFIX = "storageAdapterClass"; private List autoCloseables; + private volatile BundleContext context; + ServiceRegistration registration; @Override public void start(final BundleContext context) throws Exception { logger.debug("ConfigPersister starting"); + this.context = context; + autoCloseables = new ArrayList<>(); PropertiesProviderBaseImpl propertiesProvider = new PropertiesProviderBaseImpl(context); @@ -81,8 +87,14 @@ public class ConfigPersisterActivator implements BundleActivator { } @Override - public synchronized void stop(BundleContext context) throws Exception { - CloseableUtil.closeAll(autoCloseables); + public void stop(BundleContext context) throws Exception { + synchronized(autoCloseables) { + CloseableUtil.closeAll(autoCloseables); + if (registration != null) { + registration.unregister(); + } + this.context = null; + } } @@ -147,35 +159,29 @@ public class ConfigPersisterActivator implements BundleActivator { logger.trace("Got InnerCustomizer.addingService {}", reference); NetconfOperationServiceFactory service = reference.getBundle().getBundleContext().getService(reference); - final ConfigPusher configPusher = new ConfigPusher(service, maxWaitForCapabilitiesMillis, conflictingVersionTimeoutMillis); + logger.debug("Creating new job queue"); + + final ConfigPusherImpl configPusher = new ConfigPusherImpl(service, maxWaitForCapabilitiesMillis, conflictingVersionTimeoutMillis); logger.debug("Configuration Persister got {}", service); + logger.debug("Context was {}", context); + logger.debug("Registration was {}", registration); + final Thread pushingThread = new Thread(new Runnable() { @Override public void run() { try { - configPusher.pushConfigs(configs); - } catch (NetconfDocumentedException e) { - logger.error("Error pushing configs {}",configs); - throw new IllegalStateException(e); + if(configs != null && !configs.isEmpty()) { + configPusher.pushConfigs(configs); + } + registration = context.registerService(ConfigPusher.class.getName(), configPusher, null); + configPusher.process(autoCloseables, platformMBeanServer, persisterAggregator); + } catch (InterruptedException e) { + logger.info("ConfigPusher thread stopped",e); } logger.info("Configuration Persister initialization completed."); - - /* - * We have completed initial configuration. At this point - * it is good idea to perform garbage collection to prune - * any garbage we have accumulated during startup. - */ - logger.debug("Running post-initialization garbage collection..."); - System.gc(); - logger.debug("Post-initialization garbage collection completed."); - - ConfigPersisterNotificationHandler jmxNotificationHandler = new ConfigPersisterNotificationHandler(platformMBeanServer, persisterAggregator); - synchronized (ConfigPersisterActivator.this) { - autoCloseables.add(jmxNotificationHandler); - } } }, "config-pusher"); - synchronized (ConfigPersisterActivator.this) { + synchronized (autoCloseables) { autoCloseables.add(new AutoCloseable() { @Override public void close() { diff --git a/opendaylight/netconf/config-persister-impl/src/test/java/org/opendaylight/controller/netconf/persist/impl/osgi/MockedBundleContext.java b/opendaylight/netconf/config-persister-impl/src/test/java/org/opendaylight/controller/netconf/persist/impl/osgi/MockedBundleContext.java index 95fd5f6549..3e5249468d 100644 --- a/opendaylight/netconf/config-persister-impl/src/test/java/org/opendaylight/controller/netconf/persist/impl/osgi/MockedBundleContext.java +++ b/opendaylight/netconf/config-persister-impl/src/test/java/org/opendaylight/controller/netconf/persist/impl/osgi/MockedBundleContext.java @@ -7,10 +7,23 @@ */ package org.opendaylight.controller.netconf.persist.impl.osgi; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Collections; +import java.util.Dictionary; +import java.util.List; +import java.util.SortedSet; +import java.util.TreeSet; + import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.opendaylight.controller.config.persist.api.ConfigPusher; import org.opendaylight.controller.config.persist.api.ConfigSnapshotHolder; import org.opendaylight.controller.config.persist.api.Persister; import org.opendaylight.controller.config.persist.api.PropertiesProvider; @@ -23,18 +36,10 @@ import org.osgi.framework.BundleContext; import org.osgi.framework.Filter; import org.osgi.framework.ServiceListener; import org.osgi.framework.ServiceReference; +import org.osgi.framework.ServiceRegistration; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; final class MockedBundleContext { @Mock @@ -49,6 +54,8 @@ final class MockedBundleContext { NetconfOperationServiceFactory serviceFactory; @Mock private NetconfOperationService service; + @Mock + private ServiceRegistration registration; MockedBundleContext(long maxWaitForCapabilitiesMillis, long conflictingVersionTimeoutMillis) throws Exception { MockitoAnnotations.initMocks(this); @@ -77,6 +84,11 @@ final class MockedBundleContext { doReturn(Collections.emptySet()).when(service).getCapabilities(); doNothing().when(service).close(); doReturn("serviceFactoryMock").when(serviceFactory).toString(); + + doNothing().when(registration).unregister(); + doReturn(registration).when(context).registerService( + eq(ConfigPusher.class.getName()), any(Closeable.class), + any(Dictionary.class)); } public BundleContext getBundleContext() { diff --git a/opendaylight/netconf/netconf-cli/src/main/java/org/opendaylight/controller/netconf/cli/NetconfDeviceConnectionHandler.java b/opendaylight/netconf/netconf-cli/src/main/java/org/opendaylight/controller/netconf/cli/NetconfDeviceConnectionHandler.java index bd092bc5bd..d5c9dc6fc7 100644 --- a/opendaylight/netconf/netconf-cli/src/main/java/org/opendaylight/controller/netconf/cli/NetconfDeviceConnectionHandler.java +++ b/opendaylight/netconf/netconf-cli/src/main/java/org/opendaylight/controller/netconf/cli/NetconfDeviceConnectionHandler.java @@ -17,7 +17,7 @@ import org.opendaylight.controller.sal.connect.api.RemoteDeviceHandler; import org.opendaylight.controller.sal.connect.netconf.listener.NetconfSessionCapabilities; import org.opendaylight.controller.sal.core.api.RpcImplementation; import org.opendaylight.yangtools.yang.data.api.CompositeNode; -import org.opendaylight.yangtools.yang.model.api.SchemaContextProvider; +import org.opendaylight.yangtools.yang.model.api.SchemaContext; /** * Implementation of RemoteDeviceHandler. Integrates cli with @@ -41,7 +41,7 @@ public class NetconfDeviceConnectionHandler implements RemoteDeviceHandler netconf-api - netconf-cli + + netconf-config netconf-impl config-netconf-connector diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/Activator.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/Activator.java index 45070ca8b0..2723a3c205 100644 --- a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/Activator.java +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/Activator.java @@ -16,6 +16,11 @@ import org.opendaylight.controller.networkconfig.neutron.INeutronFirewallCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronFirewallPolicyCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronFirewallRuleCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronFloatingIPCRUD; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerCRUD; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerHealthMonitorCRUD; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerListenerCRUD; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolCRUD; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolMemberCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronNetworkCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronPortCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronRouterCRUD; @@ -74,7 +79,12 @@ public class Activator extends ComponentActivatorAbstractBase { NeutronSecurityRuleInterface.class, NeutronFirewallInterface.class, NeutronFirewallPolicyInterface.class, - NeutronFirewallRuleInterface.class}; + NeutronFirewallRuleInterface.class, + NeutronLoadBalancerInterface.class, + NeutronLoadBalancerPoolInterface.class, + NeutronLoadBalancerListenerInterface.class, + NeutronLoadBalancerHealthMonitorInterface.class, + NeutronLoadBalancerPoolMemberInterface.class}; return res; } @@ -253,5 +263,85 @@ public class Activator extends ComponentActivatorAbstractBase { "setConfigurationContainerService", "unsetConfigurationContainerService").setRequired(true)); } + if (imp.equals(NeutronLoadBalancerInterface.class)) { + // export the service + c.setInterface( + new String[] { INeutronLoadBalancerCRUD.class.getName(), + IConfigurationContainerAware.class.getName()}, null); + Dictionary props = new Hashtable(); + props.put("salListenerName", "neutron"); + c.add(createContainerServiceDependency(containerName) + .setService(IClusterContainerServices.class) + .setCallbacks("setClusterContainerService", + "unsetClusterContainerService").setRequired(true)); + c.add(createContainerServiceDependency(containerName).setService( + IConfigurationContainerService.class).setCallbacks( + "setConfigurationContainerService", + "unsetConfigurationContainerService").setRequired(true)); + } + if (imp.equals(NeutronLoadBalancerListenerInterface.class)) { + // export the service + c.setInterface( + new String[] { INeutronLoadBalancerListenerCRUD.class.getName(), + IConfigurationContainerAware.class.getName()}, null); + Dictionary props = new Hashtable(); + props.put("salListenerName", "neutron"); + c.add(createContainerServiceDependency(containerName) + .setService(IClusterContainerServices.class) + .setCallbacks("setClusterContainerService", + "unsetClusterContainerService").setRequired(true)); + c.add(createContainerServiceDependency(containerName).setService( + IConfigurationContainerService.class).setCallbacks( + "setConfigurationContainerService", + "unsetConfigurationContainerService").setRequired(true)); + } + if (imp.equals(NeutronLoadBalancerPoolInterface.class)) { + // export the service + c.setInterface( + new String[] { INeutronLoadBalancerPoolCRUD.class.getName(), + IConfigurationContainerAware.class.getName()}, null); + Dictionary props = new Hashtable(); + props.put("salListenerName", "neutron"); + c.add(createContainerServiceDependency(containerName) + .setService(IClusterContainerServices.class) + .setCallbacks("setClusterContainerService", + "unsetClusterContainerService").setRequired(true)); + c.add(createContainerServiceDependency(containerName).setService( + IConfigurationContainerService.class).setCallbacks( + "setConfigurationContainerService", + "unsetConfigurationContainerService").setRequired(true)); + } + if (imp.equals(NeutronLoadBalancerHealthMonitorInterface.class)) { + // export the service + c.setInterface( + new String[] { INeutronLoadBalancerHealthMonitorCRUD.class.getName(), + IConfigurationContainerAware.class.getName()}, null); + Dictionary props = new Hashtable(); + props.put("salListenerName", "neutron"); + c.add(createContainerServiceDependency(containerName) + .setService(IClusterContainerServices.class) + .setCallbacks("setClusterContainerService", + "unsetClusterContainerService").setRequired(true)); + c.add(createContainerServiceDependency(containerName).setService( + IConfigurationContainerService.class).setCallbacks( + "setConfigurationContainerService", + "unsetConfigurationContainerService").setRequired(true)); + } + if (imp.equals(NeutronLoadBalancerPoolMemberInterface.class)) { + // export the service + c.setInterface( + new String[] { INeutronLoadBalancerPoolMemberCRUD.class.getName(), + IConfigurationContainerAware.class.getName()}, null); + Dictionary props = new Hashtable(); + props.put("salListenerName", "neutron"); + c.add(createContainerServiceDependency(containerName) + .setService(IClusterContainerServices.class) + .setCallbacks("setClusterContainerService", + "unsetClusterContainerService").setRequired(true)); + c.add(createContainerServiceDependency(containerName).setService( + IConfigurationContainerService.class).setCallbacks( + "setConfigurationContainerService", + "unsetConfigurationContainerService").setRequired(true)); + } } } diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerHealthMonitorInterface.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerHealthMonitorInterface.java new file mode 100644 index 0000000000..b0beddf721 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerHealthMonitorInterface.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.implementation; + +import org.apache.felix.dm.Component; +import org.opendaylight.controller.clustering.services.CacheConfigException; +import org.opendaylight.controller.clustering.services.CacheExistException; +import org.opendaylight.controller.clustering.services.IClusterContainerServices; +import org.opendaylight.controller.clustering.services.IClusterServices; +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.opendaylight.controller.configuration.IConfigurationContainerAware; +import org.opendaylight.controller.configuration.IConfigurationContainerService; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerHealthMonitorCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerHealthMonitor; +import org.opendaylight.controller.sal.utils.IObjectReader; +import org.opendaylight.controller.sal.utils.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Dictionary; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class NeutronLoadBalancerHealthMonitorInterface implements INeutronLoadBalancerHealthMonitorCRUD, IConfigurationContainerAware, + IObjectReader { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancerHealthMonitorInterface.class); + private static final String FILE_NAME = "neutron.loadBalancerHealthMonitor.conf"; + private String containerName = null; + + private IClusterContainerServices clusterContainerService = null; + private IConfigurationContainerService configurationService; + private ConcurrentMap loadBalancerHealthMonitorDB; + + // methods needed for creating caches + void setClusterContainerService(IClusterContainerServices s) { + logger.debug("Cluster Service set"); + clusterContainerService = s; + } + + void unsetClusterContainerService(IClusterContainerServices s) { + if (clusterContainerService == s) { + logger.debug("Cluster Service removed!"); + clusterContainerService = null; + } + } + + public void setConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service set: {}", service); + configurationService = service; + } + + public void unsetConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service removed: {}", service); + configurationService = null; + } + + private void allocateCache() { + if (this.clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't create cache"); + return; + } + logger.debug("Creating Cache for Neutron LoadBalancerHealthMonitor"); + try { + // neutron caches + this.clusterContainerService.createCache("neutronLoadBalancerHealthMonitors", + EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL)); + } catch (CacheConfigException cce) { + logger.error("Cache couldn't be created for Neutron LoadBalancerHealthMonitor - check cache mode"); + } catch (CacheExistException cce) { + logger.error("Cache for Neutron LoadBalancerHealthMonitor already exists, destroy and recreate"); + } + logger.debug("Cache successfully created for Neutron LoadBalancerHealthMonitor"); + } + + @SuppressWarnings ({"unchecked"}) + private void retrieveCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't retrieve cache"); + return; + } + + logger.debug("Retrieving cache for Neutron LoadBalancerHealthMonitor"); + loadBalancerHealthMonitorDB = (ConcurrentMap) clusterContainerService + .getCache("neutronLoadBalancerHealthMonitors"); + if (loadBalancerHealthMonitorDB == null) { + logger.error("Cache couldn't be retrieved for Neutron LoadBalancerHealthMonitor"); + } + logger.debug("Cache was successfully retrieved for Neutron LoadBalancerHealthMonitor"); + } + + private void destroyCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterMger, can't destroy cache"); + return; + } + logger.debug("Destroying Cache for LoadBalancerHealthMonitor"); + clusterContainerService.destroyCache("neutronLoadBalancerHealthMonitors"); + } + + private void startUp() { + allocateCache(); + retrieveCache(); + loadConfiguration(); + } + + /** + * Function called by the dependency manager when all the required + * dependencies are satisfied + */ + void init(Component c) { + Dictionary props = c.getServiceProperties(); + if (props != null) { + this.containerName = (String) props.get("containerName"); + logger.debug("Running containerName: {}", this.containerName); + } else { + // In the Global instance case the containerName is empty + this.containerName = ""; + } + startUp(); + } + + /** + * Function called by the dependency manager when at least one dependency + * become unsatisfied or when the component is shutting down because for + * example bundle is being stopped. + */ + void destroy() { + destroyCache(); + } + + /** + * Function called by dependency manager after "init ()" is called and after + * the services provided by the class are registered in the service registry + */ + void start() { + } + + /** + * Function called by the dependency manager before the services exported by + * the component are unregistered, this will be followed by a "destroy ()" + * calls + */ + void stop() { + } + + // this method uses reflection to update an object from it's delta. + + private boolean overwrite(Object target, Object delta) { + Method[] methods = target.getClass().getMethods(); + + for (Method toMethod : methods) { + if (toMethod.getDeclaringClass().equals(target.getClass()) + && toMethod.getName().startsWith("set")) { + + String toName = toMethod.getName(); + String fromName = toName.replace("set", "get"); + + try { + Method fromMethod = delta.getClass().getMethod(fromName); + Object value = fromMethod.invoke(delta, (Object[]) null); + if (value != null) { + toMethod.invoke(target, value); + } + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + } + return true; + } + + @Override + public boolean neutronLoadBalancerHealthMonitorExists(String uuid) { + return loadBalancerHealthMonitorDB.containsKey(uuid); + } + + @Override + public NeutronLoadBalancerHealthMonitor getNeutronLoadBalancerHealthMonitor(String uuid) { + if (!neutronLoadBalancerHealthMonitorExists(uuid)) { + logger.debug("No LoadBalancerHealthMonitor has Been Defined"); + return null; + } + return loadBalancerHealthMonitorDB.get(uuid); + } + + @Override + public List getAllNeutronLoadBalancerHealthMonitors() { + Set allLoadBalancerHealthMonitors = new HashSet(); + for (Entry entry : loadBalancerHealthMonitorDB.entrySet()) { + NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor = entry.getValue(); + allLoadBalancerHealthMonitors.add(loadBalancerHealthMonitor); + } + logger.debug("Exiting getLoadBalancerHealthMonitors, Found {} OpenStackLoadBalancerHealthMonitor", allLoadBalancerHealthMonitors.size()); + List ans = new ArrayList(); + ans.addAll(allLoadBalancerHealthMonitors); + return ans; + } + + @Override + public boolean addNeutronLoadBalancerHealthMonitor(NeutronLoadBalancerHealthMonitor input) { + if (neutronLoadBalancerHealthMonitorExists(input.getLoadBalancerHealthMonitorID())) { + return false; + } + loadBalancerHealthMonitorDB.putIfAbsent(input.getLoadBalancerHealthMonitorID(), input); + //TODO: add code to find INeutronLoadBalancerHealthMonitorAware services and call newtorkCreated on them + return true; + } + + @Override + public boolean removeNeutronLoadBalancerHealthMonitor(String uuid) { + if (!neutronLoadBalancerHealthMonitorExists(uuid)) { + return false; + } + loadBalancerHealthMonitorDB.remove(uuid); + //TODO: add code to find INeutronLoadBalancerHealthMonitorAware services and call newtorkDeleted on them + return true; + } + + @Override + public boolean updateNeutronLoadBalancerHealthMonitor(String uuid, NeutronLoadBalancerHealthMonitor delta) { + if (!neutronLoadBalancerHealthMonitorExists(uuid)) { + return false; + } + NeutronLoadBalancerHealthMonitor target = loadBalancerHealthMonitorDB.get(uuid); + return overwrite(target, delta); + } + + @Override + public boolean neutronLoadBalancerHealthMonitorInUse(String loadBalancerHealthMonitorUUID) { + return !neutronLoadBalancerHealthMonitorExists(loadBalancerHealthMonitorUUID); + } + + private void loadConfiguration() { + for (ConfigurationObject conf : configurationService.retrieveConfiguration(this, FILE_NAME)) { + NeutronLoadBalancerHealthMonitor nn = (NeutronLoadBalancerHealthMonitor) conf; + loadBalancerHealthMonitorDB.put(nn.getLoadBalancerHealthMonitorID(), nn); + } + } + + @Override + public Status saveConfiguration() { + return configurationService.persistConfiguration(new ArrayList(loadBalancerHealthMonitorDB.values()), + FILE_NAME); + } + + @Override + public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { + return ois.readObject(); + } +} diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerInterface.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerInterface.java new file mode 100644 index 0000000000..aa7280beed --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerInterface.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.implementation; + +import org.apache.felix.dm.Component; +import org.opendaylight.controller.clustering.services.CacheConfigException; +import org.opendaylight.controller.clustering.services.CacheExistException; +import org.opendaylight.controller.clustering.services.IClusterContainerServices; +import org.opendaylight.controller.clustering.services.IClusterServices; +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.opendaylight.controller.configuration.IConfigurationContainerAware; +import org.opendaylight.controller.configuration.IConfigurationContainerService; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancer; +import org.opendaylight.controller.sal.utils.IObjectReader; +import org.opendaylight.controller.sal.utils.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Dictionary; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class NeutronLoadBalancerInterface implements INeutronLoadBalancerCRUD, IConfigurationContainerAware, + IObjectReader { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancerInterface.class); + private static final String FILE_NAME = "neutron.loadBalancer.conf"; + private String containerName = null; + + private IClusterContainerServices clusterContainerService = null; + private IConfigurationContainerService configurationService; + private ConcurrentMap loadBalancerDB; + + // methods needed for creating caches + void setClusterContainerService(IClusterContainerServices s) { + logger.debug("Cluster Service set"); + clusterContainerService = s; + } + + void unsetClusterContainerService(IClusterContainerServices s) { + if (clusterContainerService == s) { + logger.debug("Cluster Service removed!"); + clusterContainerService = null; + } + } + + public void setConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service set: {}", service); + configurationService = service; + } + + public void unsetConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service removed: {}", service); + configurationService = null; + } + + private void allocateCache() { + if (this.clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't create cache"); + return; + } + logger.debug("Creating Cache for Neutron LoadBalancer"); + try { + // neutron caches + this.clusterContainerService.createCache("neutronLoadBalancers", + EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL)); + } catch (CacheConfigException cce) { + logger.error("Cache couldn't be created for Neutron LoadBalancer - check cache mode"); + } catch (CacheExistException cce) { + logger.error("Cache for Neutron LoadBalancer already exists, destroy and recreate"); + } + logger.debug("Cache successfully created for Neutron LoadBalancer"); + } + + @SuppressWarnings ({"unchecked"}) + private void retrieveCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't retrieve cache"); + return; + } + + logger.debug("Retrieving cache for Neutron LoadBalancer"); + loadBalancerDB = (ConcurrentMap) clusterContainerService + .getCache("neutronLoadBalancers"); + if (loadBalancerDB == null) { + logger.error("Cache couldn't be retrieved for Neutron LoadBalancer"); + } + logger.debug("Cache was successfully retrieved for Neutron LoadBalancer"); + } + + private void destroyCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterMger, can't destroy cache"); + return; + } + logger.debug("Destroying Cache for LoadBalancer"); + clusterContainerService.destroyCache("neutronLoadBalancers"); + } + + private void startUp() { + allocateCache(); + retrieveCache(); + loadConfiguration(); + } + + /** + * Function called by the dependency manager when all the required + * dependencies are satisfied + */ + void init(Component c) { + Dictionary props = c.getServiceProperties(); + if (props != null) { + this.containerName = (String) props.get("containerName"); + logger.debug("Running containerName: {}", this.containerName); + } else { + // In the Global instance case the containerName is empty + this.containerName = ""; + } + startUp(); + } + + /** + * Function called by the dependency manager when at least one dependency + * become unsatisfied or when the component is shutting down because for + * example bundle is being stopped. + */ + void destroy() { + destroyCache(); + } + + /** + * Function called by dependency manager after "init ()" is called and after + * the services provided by the class are registered in the service registry + */ + void start() { + } + + /** + * Function called by the dependency manager before the services exported by + * the component are unregistered, this will be followed by a "destroy ()" + * calls + */ + void stop() { + } + + // this method uses reflection to update an object from it's delta. + + private boolean overwrite(Object target, Object delta) { + Method[] methods = target.getClass().getMethods(); + + for (Method toMethod : methods) { + if (toMethod.getDeclaringClass().equals(target.getClass()) + && toMethod.getName().startsWith("set")) { + + String toName = toMethod.getName(); + String fromName = toName.replace("set", "get"); + + try { + Method fromMethod = delta.getClass().getMethod(fromName); + Object value = fromMethod.invoke(delta, (Object[]) null); + if (value != null) { + toMethod.invoke(target, value); + } + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + } + return true; + } + + @Override + public boolean neutronLoadBalancerExists(String uuid) { + return loadBalancerDB.containsKey(uuid); + } + + @Override + public NeutronLoadBalancer getNeutronLoadBalancer(String uuid) { + if (!neutronLoadBalancerExists(uuid)) { + logger.debug("No LoadBalancer Have Been Defined"); + return null; + } + return loadBalancerDB.get(uuid); + } + + @Override + public List getAllNeutronLoadBalancers() { + Set allLoadBalancers = new HashSet(); + for (Entry entry : loadBalancerDB.entrySet()) { + NeutronLoadBalancer loadBalancer = entry.getValue(); + allLoadBalancers.add(loadBalancer); + } + logger.debug("Exiting getLoadBalancers, Found {} OpenStackLoadBalancer", allLoadBalancers.size()); + List ans = new ArrayList(); + ans.addAll(allLoadBalancers); + return ans; + } + + @Override + public boolean addNeutronLoadBalancer(NeutronLoadBalancer input) { + if (neutronLoadBalancerExists(input.getLoadBalancerID())) { + return false; + } + loadBalancerDB.putIfAbsent(input.getLoadBalancerID(), input); + //TODO: add code to find INeutronLoadBalancerAware services and call newtorkCreated on them + return true; + } + + @Override + public boolean removeNeutronLoadBalancer(String uuid) { + if (!neutronLoadBalancerExists(uuid)) { + return false; + } + loadBalancerDB.remove(uuid); + //TODO: add code to find INeutronLoadBalancerAware services and call newtorkDeleted on them + return true; + } + + @Override + public boolean updateNeutronLoadBalancer(String uuid, NeutronLoadBalancer delta) { + if (!neutronLoadBalancerExists(uuid)) { + return false; + } + NeutronLoadBalancer target = loadBalancerDB.get(uuid); + return overwrite(target, delta); + } + + @Override + public boolean neutronLoadBalancerInUse(String loadBalancerUUID) { + return !neutronLoadBalancerExists(loadBalancerUUID); + } + + private void loadConfiguration() { + for (ConfigurationObject conf : configurationService.retrieveConfiguration(this, FILE_NAME)) { + NeutronLoadBalancer nn = (NeutronLoadBalancer) conf; + loadBalancerDB.put(nn.getLoadBalancerID(), nn); + } + } + + @Override + public Status saveConfiguration() { + return configurationService.persistConfiguration(new ArrayList(loadBalancerDB.values()), + FILE_NAME); + } + + @Override + public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { + return ois.readObject(); + } +} diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerListenerInterface.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerListenerInterface.java new file mode 100644 index 0000000000..3779863647 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerListenerInterface.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.implementation; + +import org.apache.felix.dm.Component; +import org.opendaylight.controller.clustering.services.CacheConfigException; +import org.opendaylight.controller.clustering.services.CacheExistException; +import org.opendaylight.controller.clustering.services.IClusterContainerServices; +import org.opendaylight.controller.clustering.services.IClusterServices; +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.opendaylight.controller.configuration.IConfigurationContainerAware; +import org.opendaylight.controller.configuration.IConfigurationContainerService; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerListenerCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerListener; +import org.opendaylight.controller.sal.utils.IObjectReader; +import org.opendaylight.controller.sal.utils.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Dictionary; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class NeutronLoadBalancerListenerInterface implements INeutronLoadBalancerListenerCRUD, IConfigurationContainerAware, + IObjectReader { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancerListenerInterface.class); + private static final String FILE_NAME = "neutron.loadBalancerListener.conf"; + private String containerName = null; + + private IClusterContainerServices clusterContainerService = null; + private IConfigurationContainerService configurationService; + private ConcurrentMap loadBalancerListenerDB; + + // methods needed for creating caches + void setClusterContainerService(IClusterContainerServices s) { + logger.debug("Cluster Service set"); + clusterContainerService = s; + } + + void unsetClusterContainerService(IClusterContainerServices s) { + if (clusterContainerService == s) { + logger.debug("Cluster Service removed!"); + clusterContainerService = null; + } + } + + public void setConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service set: {}", service); + configurationService = service; + } + + public void unsetConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service removed: {}", service); + configurationService = null; + } + + private void allocateCache() { + if (this.clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't create cache"); + return; + } + logger.debug("Creating Cache for Neutron LoadBalancerListener"); + try { + // neutron caches + this.clusterContainerService.createCache("neutronLoadBalancerListeners", + EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL)); + } catch (CacheConfigException cce) { + logger.error("Cache couldn't be created for Neutron LoadBalancerListener - check cache mode"); + } catch (CacheExistException cce) { + logger.error("Cache for Neutron LoadBalancerListener already exists, destroy and recreate"); + } + logger.debug("Cache successfully created for Neutron LoadBalancerListener"); + } + + @SuppressWarnings ({"unchecked"}) + private void retrieveCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't retrieve cache"); + return; + } + + logger.debug("Retrieving cache for Neutron LoadBalancerListener"); + loadBalancerListenerDB = (ConcurrentMap) clusterContainerService + .getCache("neutronLoadBalancerListeners"); + if (loadBalancerListenerDB == null) { + logger.error("Cache couldn't be retrieved for Neutron LoadBalancerListener"); + } + logger.debug("Cache was successfully retrieved for Neutron LoadBalancerListener"); + } + + private void destroyCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterMger, can't destroy cache"); + return; + } + logger.debug("Destroying Cache for LoadBalancerListener"); + clusterContainerService.destroyCache("neutronLoadBalancerListeners"); + } + + private void startUp() { + allocateCache(); + retrieveCache(); + loadConfiguration(); + } + + /** + * Function called by the dependency manager when all the required + * dependencies are satisfied + */ + void init(Component c) { + Dictionary props = c.getServiceProperties(); + if (props != null) { + this.containerName = (String) props.get("containerName"); + logger.debug("Running containerName: {}", this.containerName); + } else { + // In the Global instance case the containerName is empty + this.containerName = ""; + } + startUp(); + } + + /** + * Function called by the dependency manager when at least one dependency + * become unsatisfied or when the component is shutting down because for + * example bundle is being stopped. + */ + void destroy() { + destroyCache(); + } + + /** + * Function called by dependency manager after "init ()" is called and after + * the services provided by the class are registered in the service registry + */ + void start() { + } + + /** + * Function called by the dependency manager before the services exported by + * the component are unregistered, this will be followed by a "destroy ()" + * calls + */ + void stop() { + } + + // this method uses reflection to update an object from it's delta. + + private boolean overwrite(Object target, Object delta) { + Method[] methods = target.getClass().getMethods(); + + for (Method toMethod : methods) { + if (toMethod.getDeclaringClass().equals(target.getClass()) + && toMethod.getName().startsWith("set")) { + + String toName = toMethod.getName(); + String fromName = toName.replace("set", "get"); + + try { + Method fromMethod = delta.getClass().getMethod(fromName); + Object value = fromMethod.invoke(delta, (Object[]) null); + if (value != null) { + toMethod.invoke(target, value); + } + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + } + return true; + } + + @Override + public boolean neutronLoadBalancerListenerExists(String uuid) { + return loadBalancerListenerDB.containsKey(uuid); + } + + @Override + public NeutronLoadBalancerListener getNeutronLoadBalancerListener(String uuid) { + if (!neutronLoadBalancerListenerExists(uuid)) { + logger.debug("No LoadBalancerListener Have Been Defined"); + return null; + } + return loadBalancerListenerDB.get(uuid); + } + + @Override + public List getAllNeutronLoadBalancerListeners() { + Set allLoadBalancerListeners = new HashSet(); + for (Entry entry : loadBalancerListenerDB.entrySet()) { + NeutronLoadBalancerListener loadBalancerListener = entry.getValue(); + allLoadBalancerListeners.add(loadBalancerListener); + } + logger.debug("Exiting getLoadBalancerListeners, Found {} OpenStackLoadBalancerListener", allLoadBalancerListeners.size()); + List ans = new ArrayList(); + ans.addAll(allLoadBalancerListeners); + return ans; + } + + @Override + public boolean addNeutronLoadBalancerListener(NeutronLoadBalancerListener input) { + if (neutronLoadBalancerListenerExists(input.getLoadBalancerListenerID())) { + return false; + } + loadBalancerListenerDB.putIfAbsent(input.getLoadBalancerListenerID(), input); + //TODO: add code to find INeutronLoadBalancerListenerAware services and call newtorkCreated on them + return true; + } + + @Override + public boolean removeNeutronLoadBalancerListener(String uuid) { + if (!neutronLoadBalancerListenerExists(uuid)) { + return false; + } + loadBalancerListenerDB.remove(uuid); + //TODO: add code to find INeutronLoadBalancerListenerAware services and call newtorkDeleted on them + return true; + } + + @Override + public boolean updateNeutronLoadBalancerListener(String uuid, NeutronLoadBalancerListener delta) { + if (!neutronLoadBalancerListenerExists(uuid)) { + return false; + } + NeutronLoadBalancerListener target = loadBalancerListenerDB.get(uuid); + return overwrite(target, delta); + } + + @Override + public boolean neutronLoadBalancerListenerInUse(String loadBalancerListenerUUID) { + return !neutronLoadBalancerListenerExists(loadBalancerListenerUUID); + } + + private void loadConfiguration() { + for (ConfigurationObject conf : configurationService.retrieveConfiguration(this, FILE_NAME)) { + NeutronLoadBalancerListener nn = (NeutronLoadBalancerListener) conf; + loadBalancerListenerDB.put(nn.getLoadBalancerListenerID(), nn); + } + } + + @Override + public Status saveConfiguration() { + return configurationService.persistConfiguration(new ArrayList(loadBalancerListenerDB.values()), + FILE_NAME); + } + + @Override + public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { + return ois.readObject(); + } +} diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolInterface.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolInterface.java new file mode 100644 index 0000000000..34cdba3f04 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolInterface.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.implementation; + +import org.apache.felix.dm.Component; +import org.opendaylight.controller.clustering.services.CacheConfigException; +import org.opendaylight.controller.clustering.services.CacheExistException; +import org.opendaylight.controller.clustering.services.IClusterContainerServices; +import org.opendaylight.controller.clustering.services.IClusterServices; +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.opendaylight.controller.configuration.IConfigurationContainerAware; +import org.opendaylight.controller.configuration.IConfigurationContainerService; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPool; +import org.opendaylight.controller.sal.utils.IObjectReader; +import org.opendaylight.controller.sal.utils.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Dictionary; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class NeutronLoadBalancerPoolInterface implements INeutronLoadBalancerPoolCRUD, IConfigurationContainerAware, + IObjectReader { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancerPoolInterface.class); + private static final String FILE_NAME = "neutron.loadBalancerPool.conf"; + private String containerName = null; + + private IClusterContainerServices clusterContainerService = null; + private IConfigurationContainerService configurationService; + private ConcurrentMap loadBalancerPoolDB; + + // methods needed for creating caches + void setClusterContainerService(IClusterContainerServices s) { + logger.debug("Cluster Service set"); + clusterContainerService = s; + } + + void unsetClusterContainerService(IClusterContainerServices s) { + if (clusterContainerService == s) { + logger.debug("Cluster Service removed!"); + clusterContainerService = null; + } + } + + public void setConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service set: {}", service); + configurationService = service; + } + + public void unsetConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service removed: {}", service); + configurationService = null; + } + + private void allocateCache() { + if (this.clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't create cache"); + return; + } + logger.debug("Creating Cache for Neutron LoadBalancerPool"); + try { + // neutron caches + this.clusterContainerService.createCache("neutronLoadBalancerPools", + EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL)); + } catch (CacheConfigException cce) { + logger.error("Cache couldn't be created for Neutron LoadBalancerPool - check cache mode"); + } catch (CacheExistException cce) { + logger.error("Cache for Neutron LoadBalancerPool already exists, destroy and recreate"); + } + logger.debug("Cache successfully created for Neutron LoadBalancerPool"); + } + + @SuppressWarnings ({"unchecked"}) + private void retrieveCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't retrieve cache"); + return; + } + + logger.debug("Retrieving cache for Neutron LoadBalancerPool"); + loadBalancerPoolDB = (ConcurrentMap) clusterContainerService + .getCache("neutronLoadBalancerPools"); + if (loadBalancerPoolDB == null) { + logger.error("Cache couldn't be retrieved for Neutron LoadBalancerPool"); + } + logger.debug("Cache was successfully retrieved for Neutron LoadBalancerPool"); + } + + private void destroyCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterMger, can't destroy cache"); + return; + } + logger.debug("Destroying Cache for LoadBalancerPool"); + clusterContainerService.destroyCache("neutronLoadBalancerPools"); + } + + private void startUp() { + allocateCache(); + retrieveCache(); + loadConfiguration(); + } + + /** + * Function called by the dependency manager when all the required + * dependencies are satisfied + */ + void init(Component c) { + Dictionary props = c.getServiceProperties(); + if (props != null) { + this.containerName = (String) props.get("containerName"); + logger.debug("Running containerName: {}", this.containerName); + } else { + // In the Global instance case the containerName is empty + this.containerName = ""; + } + startUp(); + } + + /** + * Function called by the dependency manager when at least one dependency + * become unsatisfied or when the component is shutting down because for + * example bundle is being stopped. + */ + void destroy() { + destroyCache(); + } + + /** + * Function called by dependency manager after "init ()" is called and after + * the services provided by the class are registered in the service registry + */ + void start() { + } + + /** + * Function called by the dependency manager before the services exported by + * the component are unregistered, this will be followed by a "destroy ()" + * calls + */ + void stop() { + } + + // this method uses reflection to update an object from it's delta. + + private boolean overwrite(Object target, Object delta) { + Method[] methods = target.getClass().getMethods(); + + for (Method toMethod : methods) { + if (toMethod.getDeclaringClass().equals(target.getClass()) + && toMethod.getName().startsWith("set")) { + + String toName = toMethod.getName(); + String fromName = toName.replace("set", "get"); + + try { + Method fromMethod = delta.getClass().getMethod(fromName); + Object value = fromMethod.invoke(delta, (Object[]) null); + if (value != null) { + toMethod.invoke(target, value); + } + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + } + return true; + } + + @Override + public boolean neutronLoadBalancerPoolExists(String uuid) { + return loadBalancerPoolDB.containsKey(uuid); + } + + @Override + public NeutronLoadBalancerPool getNeutronLoadBalancerPool(String uuid) { + if (!neutronLoadBalancerPoolExists(uuid)) { + logger.debug("No LoadBalancerPool has Been Defined"); + return null; + } + return loadBalancerPoolDB.get(uuid); + } + + @Override + public List getAllNeutronLoadBalancerPools() { + Set allLoadBalancerPools = new HashSet(); + for (Entry entry : loadBalancerPoolDB.entrySet()) { + NeutronLoadBalancerPool loadBalancerPool = entry.getValue(); + allLoadBalancerPools.add(loadBalancerPool); + } + logger.debug("Exiting getLoadBalancerPools, Found {} OpenStackLoadBalancerPool", allLoadBalancerPools.size()); + List ans = new ArrayList(); + ans.addAll(allLoadBalancerPools); + return ans; + } + + @Override + public boolean addNeutronLoadBalancerPool(NeutronLoadBalancerPool input) { + if (neutronLoadBalancerPoolExists(input.getLoadBalancerPoolID())) { + return false; + } + loadBalancerPoolDB.putIfAbsent(input.getLoadBalancerPoolID(), input); + //TODO: add code to find INeutronLoadBalancerPoolAware services and call newtorkCreated on them + return true; + } + + @Override + public boolean removeNeutronLoadBalancerPool(String uuid) { + if (!neutronLoadBalancerPoolExists(uuid)) { + return false; + } + loadBalancerPoolDB.remove(uuid); + //TODO: add code to find INeutronLoadBalancerPoolAware services and call newtorkDeleted on them + return true; + } + + @Override + public boolean updateNeutronLoadBalancerPool(String uuid, NeutronLoadBalancerPool delta) { + if (!neutronLoadBalancerPoolExists(uuid)) { + return false; + } + NeutronLoadBalancerPool target = loadBalancerPoolDB.get(uuid); + return overwrite(target, delta); + } + + @Override + public boolean neutronLoadBalancerPoolInUse(String loadBalancerPoolUUID) { + return !neutronLoadBalancerPoolExists(loadBalancerPoolUUID); + } + + private void loadConfiguration() { + for (ConfigurationObject conf : configurationService.retrieveConfiguration(this, FILE_NAME)) { + NeutronLoadBalancerPool nn = (NeutronLoadBalancerPool) conf; + loadBalancerPoolDB.put(nn.getLoadBalancerPoolID(), nn); + } + } + + @Override + public Status saveConfiguration() { + return configurationService.persistConfiguration(new ArrayList(loadBalancerPoolDB.values()), + FILE_NAME); + } + + @Override + public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { + return ois.readObject(); + } +} diff --git a/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolMemberInterface.java b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolMemberInterface.java new file mode 100644 index 0000000000..7418bb2f3d --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/implementation/src/main/java/org/opendaylight/controller/networkconfig/neutron/implementation/NeutronLoadBalancerPoolMemberInterface.java @@ -0,0 +1,267 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.implementation; + +import org.apache.felix.dm.Component; +import org.opendaylight.controller.clustering.services.CacheConfigException; +import org.opendaylight.controller.clustering.services.CacheExistException; +import org.opendaylight.controller.clustering.services.IClusterContainerServices; +import org.opendaylight.controller.clustering.services.IClusterServices; +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.opendaylight.controller.configuration.IConfigurationContainerAware; +import org.opendaylight.controller.configuration.IConfigurationContainerService; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolMemberCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPoolMember; +import org.opendaylight.controller.sal.utils.IObjectReader; +import org.opendaylight.controller.sal.utils.Status; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Dictionary; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +public class NeutronLoadBalancerPoolMemberInterface + implements INeutronLoadBalancerPoolMemberCRUD, IConfigurationContainerAware, + IObjectReader { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancerPoolMemberInterface.class); + private static final String FILE_NAME = "neutron.loadBalancerPoolMember.conf"; + private String containerName = null; + + private IClusterContainerServices clusterContainerService = null; + private IConfigurationContainerService configurationService; + private ConcurrentMap loadBalancerPoolMemberDB; + + // methods needed for creating caches + void setClusterContainerService(IClusterContainerServices s) { + logger.debug("Cluster Service set"); + clusterContainerService = s; + } + + void unsetClusterContainerService(IClusterContainerServices s) { + if (clusterContainerService == s) { + logger.debug("Cluster Service removed!"); + clusterContainerService = null; + } + } + + public void setConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service set: {}", service); + configurationService = service; + } + + public void unsetConfigurationContainerService(IConfigurationContainerService service) { + logger.trace("Configuration service removed: {}", service); + configurationService = null; + } + + private void allocateCache() { + if (this.clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't create cache"); + return; + } + logger.debug("Creating Cache for Neutron LoadBalancerPoolMember"); + try { + // neutron caches + this.clusterContainerService.createCache("neutronLoadBalancerPoolMembers", + EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL)); + } catch(CacheConfigException cce) { + logger.error("Cache couldn't be created for Neutron LoadBalancerPoolMember - check cache mode"); + } catch(CacheExistException cce) { + logger.error("Cache for Neutron LoadBalancerPoolMember already exists, destroy and recreate"); + } + logger.debug("Cache successfully created for Neutron LoadBalancerPoolMember"); + } + + @SuppressWarnings({"unchecked"}) + private void retrieveCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterContainerService, can't retrieve cache"); + return; + } + + logger.debug("Retrieving cache for Neutron LoadBalancerPoolMember"); + loadBalancerPoolMemberDB = (ConcurrentMap) clusterContainerService + .getCache("neutronLoadBalancerPoolMembers"); + if (loadBalancerPoolMemberDB == null) { + logger.error("Cache couldn't be retrieved for Neutron LoadBalancerPoolMember"); + } + logger.debug("Cache was successfully retrieved for Neutron LoadBalancerPoolMember"); + } + + private void destroyCache() { + if (clusterContainerService == null) { + logger.error("un-initialized clusterMger, can't destroy cache"); + return; + } + logger.debug("Destroying Cache for HostTracker"); + clusterContainerService.destroyCache("neutronLoadBalancerPoolMembers"); + } + + private void startUp() { + allocateCache(); + retrieveCache(); + loadConfiguration(); + } + + /** + * Function called by the dependency manager when all the required + * dependencies are satisfied + */ + void init(Component c) { + Dictionary props = c.getServiceProperties(); + if (props != null) { + this.containerName = (String) props.get("containerName"); + logger.debug("Running containerName: {}", this.containerName); + } else { + // In the Global instance case the containerName is empty + this.containerName = ""; + } + startUp(); + } + + /** + * Function called by the dependency manager when at least one dependency + * become unsatisfied or when the component is shutting down because for + * example bundle is being stopped. + */ + void destroy() { + destroyCache(); + } + + /** + * Function called by dependency manager after "init ()" is called and after + * the services provided by the class are registered in the service registry + */ + void start() { + } + + /** + * Function called by the dependency manager before the services exported by + * the component are unregistered, this will be followed by a "destroy ()" + * calls + */ + void stop() { + } + + // this method uses reflection to update an object from it's delta. + + private boolean overwrite(Object target, Object delta) { + Method[] methods = target.getClass().getMethods(); + + for (Method toMethod : methods) { + if (toMethod.getDeclaringClass().equals(target.getClass()) + && toMethod.getName().startsWith("set")) { + + String toName = toMethod.getName(); + String fromName = toName.replace("set", "get"); + + try { + Method fromMethod = delta.getClass().getMethod(fromName); + Object value = fromMethod.invoke(delta, (Object[]) null); + if (value != null) { + toMethod.invoke(target, value); + } + } catch(Exception e) { + e.printStackTrace(); + return false; + } + } + } + return true; + } + + @Override + public boolean neutronLoadBalancerPoolMemberExists(String uuid) { + return loadBalancerPoolMemberDB.containsKey(uuid); + } + + @Override + public NeutronLoadBalancerPoolMember getNeutronLoadBalancerPoolMember(String uuid) { + if (!neutronLoadBalancerPoolMemberExists(uuid)) { + logger.debug("No LoadBalancerPoolMember Have Been Defined"); + return null; + } + return loadBalancerPoolMemberDB.get(uuid); + } + + @Override + public List getAllNeutronLoadBalancerPoolMembers() { + Set allLoadBalancerPoolMembers = new HashSet(); + for (Map.Entry entry : loadBalancerPoolMemberDB.entrySet()) { + NeutronLoadBalancerPoolMember loadBalancerPoolMember = entry.getValue(); + allLoadBalancerPoolMembers.add(loadBalancerPoolMember); + } + logger.debug("Exiting getLoadBalancerPoolMembers, Found {} OpenStackLoadBalancerPoolMember", + allLoadBalancerPoolMembers.size()); + List ans = new ArrayList(); + ans.addAll(allLoadBalancerPoolMembers); + return ans; + } + + @Override + public boolean addNeutronLoadBalancerPoolMember(NeutronLoadBalancerPoolMember input) { + if (neutronLoadBalancerPoolMemberExists(input.getPoolMemberID())) { + return false; + } + loadBalancerPoolMemberDB.putIfAbsent(input.getPoolMemberID(), input); + return true; + } + + @Override + public boolean removeNeutronLoadBalancerPoolMember(String uuid) { + if (!neutronLoadBalancerPoolMemberExists(uuid)) { + return false; + } + loadBalancerPoolMemberDB.remove(uuid); + return true; + } + + @Override + public boolean updateNeutronLoadBalancerPoolMember(String uuid, NeutronLoadBalancerPoolMember delta) { + if (!neutronLoadBalancerPoolMemberExists(uuid)) { + return false; + } + NeutronLoadBalancerPoolMember target = loadBalancerPoolMemberDB.get(uuid); + return overwrite(target, delta); + } + + @Override + public boolean neutronLoadBalancerPoolMemberInUse(String loadBalancerPoolMemberID) { + return !neutronLoadBalancerPoolMemberExists(loadBalancerPoolMemberID); + } + + private void loadConfiguration() { + for (ConfigurationObject conf : configurationService.retrieveConfiguration(this, FILE_NAME)) { + NeutronLoadBalancerPoolMember nn = (NeutronLoadBalancerPoolMember) conf; + loadBalancerPoolMemberDB.put(nn.getPoolMemberID(), nn); + } + } + + @Override + public Status saveConfiguration() { + return configurationService.persistConfiguration( + new ArrayList(loadBalancerPoolMemberDB.values()), + FILE_NAME); + } + + @Override + public Object readObject(ObjectInputStream ois) throws FileNotFoundException, IOException, ClassNotFoundException { + return ois.readObject(); + } +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerAware.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerAware.java new file mode 100644 index 0000000000..e4aa5f382b --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerAware.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +/** + * This interface defines the methods a service that wishes to be aware of LoadBalancer Rules needs to implement + * + */ + +public interface INeutronLoadBalancerAware { + + /** + * Services provide this interface method to indicate if the specified loadBalancer can be created + * + * @param loadBalancer + * instance of proposed new LoadBalancer object + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the create operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canCreateNeutronLoadBalancer(NeutronLoadBalancer loadBalancer); + + /** + * Services provide this interface method for taking action after a loadBalancer has been created + * + * @param loadBalancer + * instance of new LoadBalancer object + * @return void + */ + public void neutronLoadBalancerCreated(NeutronLoadBalancer loadBalancer); + + /** + * Services provide this interface method to indicate if the specified loadBalancer can be changed using the specified + * delta + * + * @param delta + * updates to the loadBalancer object using patch semantics + * @param original + * instance of the LoadBalancer object to be updated + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the update operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canUpdateNeutronLoadBalancer(NeutronLoadBalancer delta, NeutronLoadBalancer original); + + /** + * Services provide this interface method for taking action after a loadBalancer has been updated + * + * @param loadBalancer + * instance of modified LoadBalancer object + * @return void + */ + public void neutronLoadBalancerUpdated(NeutronLoadBalancer loadBalancer); + + /** + * Services provide this interface method to indicate if the specified loadBalancer can be deleted + * + * @param loadBalancer + * instance of the LoadBalancer object to be deleted + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the delete operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canDeleteNeutronLoadBalancer(NeutronLoadBalancer loadBalancer); + + /** + * Services provide this interface method for taking action after a loadBalancer has been deleted + * + * @param loadBalancer + * instance of deleted LoadBalancer object + * @return void + */ + public void neutronLoadBalancerDeleted(NeutronLoadBalancer loadBalancer); +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerCRUD.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerCRUD.java new file mode 100644 index 0000000000..a2ce41eab2 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerCRUD.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import java.util.List; + +/** + * This interface defines the methods for CRUD of NB OpenStack LoadBalancer objects + * + */ + +public interface INeutronLoadBalancerCRUD { + /** + * Applications call this interface method to determine if a particular + *LoadBalancer object exists + * + * @param uuid + * UUID of the LoadBalancer object + * @return boolean + */ + + public boolean neutronLoadBalancerExists(String uuid); + + /** + * Applications call this interface method to return if a particular + * LoadBalancer object exists + * + * @param uuid + * UUID of the LoadBalancer object + * @return {@link NeutronLoadBalancer} + * OpenStackLoadBalancer class + */ + + public NeutronLoadBalancer getNeutronLoadBalancer(String uuid); + + /** + * Applications call this interface method to return all LoadBalancer objects + * + * @return List of OpenStackNetworks objects + */ + + public List getAllNeutronLoadBalancers(); + + /** + * Applications call this interface method to add a LoadBalancer object to the + * concurrent map + * + * @param input + * OpenStackNetwork object + * @return boolean on whether the object was added or not + */ + + public boolean addNeutronLoadBalancer(NeutronLoadBalancer input); + + /** + * Applications call this interface method to remove a Neutron LoadBalancer object to the + * concurrent map + * + * @param uuid + * identifier for the LoadBalancer object + * @return boolean on whether the object was removed or not + */ + + public boolean removeNeutronLoadBalancer(String uuid); + + /** + * Applications call this interface method to edit a LoadBalancer object + * + * @param uuid + * identifier of the LoadBalancer object + * @param delta + * OpenStackLoadBalancer object containing changes to apply + * @return boolean on whether the object was updated or not + */ + + public boolean updateNeutronLoadBalancer(String uuid, NeutronLoadBalancer delta); + + /** + * Applications call this interface method to see if a MAC address is in use + * + * @param uuid + * identifier of the LoadBalancer object + * @return boolean on whether the macAddress is already associated with a + * port or not + */ + + public boolean neutronLoadBalancerInUse(String uuid); + +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorAware.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorAware.java new file mode 100644 index 0000000000..7194da32b4 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorAware.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +/** + * This interface defines the methods a service that wishes to be aware of LoadBalancerHealthMonitor Rules needs to implement + * + */ + +public interface INeutronLoadBalancerHealthMonitorAware { + + /** + * Services provide this interface method to indicate if the specified loadBalancerHealthMonitor can be created + * + * @param loadBalancerHealthMonitor + * instance of proposed new LoadBalancerHealthMonitor object + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the create operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canCreateNeutronLoadBalancerHealthMonitor(NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor); + + /** + * Services provide this interface method for taking action after a loadBalancerHealthMonitor has been created + * + * @param loadBalancerHealthMonitor + * instance of new LoadBalancerHealthMonitor object + * @return void + */ + public void neutronLoadBalancerHealthMonitorCreated(NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor); + + /** + * Services provide this interface method to indicate if the specified loadBalancerHealthMonitor can be changed using the specified + * delta + * + * @param delta + * updates to the loadBalancerHealthMonitor object using patch semantics + * @param original + * instance of the LoadBalancerHealthMonitor object to be updated + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the update operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canUpdateNeutronLoadBalancerHealthMonitor(NeutronLoadBalancerHealthMonitor delta, + NeutronLoadBalancerHealthMonitor original); + + /** + * Services provide this interface method for taking action after a loadBalancerHealthMonitor has been updated + * + * @param loadBalancerHealthMonitor + * instance of modified LoadBalancerHealthMonitor object + * @return void + */ + public void neutronLoadBalancerHealthMonitorUpdated(NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor); + + /** + * Services provide this interface method to indicate if the specified loadBalancerHealthMonitor can be deleted + * + * @param loadBalancerHealthMonitor + * instance of the LoadBalancerHealthMonitor object to be deleted + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the delete operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canDeleteNeutronLoadBalancerHealthMonitor(NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor); + + /** + * Services provide this interface method for taking action after a loadBalancerHealthMonitor has been deleted + * + * @param loadBalancerHealthMonitor + * instance of deleted LoadBalancerHealthMonitor object + * @return void + */ + public void neutronLoadBalancerHealthMonitorDeleted(NeutronLoadBalancerHealthMonitor loadBalancerHealthMonitor); +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorCRUD.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorCRUD.java new file mode 100644 index 0000000000..78380001df --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerHealthMonitorCRUD.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import java.util.List; + +/** + * This interface defines the methods for CRUD of NB OpenStack LoadBalancerHealthMonitor objects + * + */ + +public interface INeutronLoadBalancerHealthMonitorCRUD { + /** + * Applications call this interface method to determine if a particular + *LoadBalancerHealthMonitor object exists + * + * @param uuid + * UUID of the LoadBalancerHealthMonitor object + * @return boolean + */ + + public boolean neutronLoadBalancerHealthMonitorExists(String uuid); + + /** + * Applications call this interface method to return if a particular + * LoadBalancerHealthMonitor object exists + * + * @param uuid + * UUID of the LoadBalancerHealthMonitor object + * @return {@link NeutronLoadBalancerHealthMonitor} + * OpenStackLoadBalancerHealthMonitor class + */ + + public NeutronLoadBalancerHealthMonitor getNeutronLoadBalancerHealthMonitor(String uuid); + + /** + * Applications call this interface method to return all LoadBalancerHealthMonitor objects + * + * @return List of OpenStackNetworks objects + */ + + public List getAllNeutronLoadBalancerHealthMonitors(); + + /** + * Applications call this interface method to add a LoadBalancerHealthMonitor object to the + * concurrent map + * + * @param input + * OpenStackNetwork object + * @return boolean on whether the object was added or not + */ + + public boolean addNeutronLoadBalancerHealthMonitor(NeutronLoadBalancerHealthMonitor input); + + /** + * Applications call this interface method to remove a Neutron LoadBalancerHealthMonitor object to the + * concurrent map + * + * @param uuid + * identifier for the LoadBalancerHealthMonitor object + * @return boolean on whether the object was removed or not + */ + + public boolean removeNeutronLoadBalancerHealthMonitor(String uuid); + + /** + * Applications call this interface method to edit a LoadBalancerHealthMonitor object + * + * @param uuid + * identifier of the LoadBalancerHealthMonitor object + * @param delta + * OpenStackLoadBalancerHealthMonitor object containing changes to apply + * @return boolean on whether the object was updated or not + */ + + public boolean updateNeutronLoadBalancerHealthMonitor(String uuid, NeutronLoadBalancerHealthMonitor delta); + + /** + * Applications call this interface method to see if a MAC address is in use + * + * @param uuid + * identifier of the LoadBalancerHealthMonitor object + * @return boolean on whether the macAddress is already associated with a + * port or not + */ + + public boolean neutronLoadBalancerHealthMonitorInUse(String uuid); + +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerAware.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerAware.java new file mode 100644 index 0000000000..417419f936 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerAware.java @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +/** + * This interface defines the methods a service that wishes to be aware of LoadBalancerListener Rules needs to implement + * + */ + +public interface INeutronLoadBalancerListenerAware { + + /** + * Services provide this interface method to indicate if the specified loadBalancerListener can be created + * + * @param loadBalancerListener + * instance of proposed new LoadBalancerListener object + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the create operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canCreateNeutronLoadBalancerListener(NeutronLoadBalancerListener loadBalancerListener); + + /** + * Services provide this interface method for taking action after a loadBalancerListener has been created + * + * @param loadBalancerListener + * instance of new LoadBalancerListener object + * @return void + */ + public void neutronLoadBalancerListenerCreated(NeutronLoadBalancerListener loadBalancerListener); + + /** + * Services provide this interface method to indicate if the specified loadBalancerListener can be changed using the specified + * delta + * + * @param delta + * updates to the loadBalancerListener object using patch semantics + * @param original + * instance of the LoadBalancerListener object to be updated + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the update operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canUpdateNeutronLoadBalancerListener(NeutronLoadBalancerListener delta, + NeutronLoadBalancerListener original); + + /** + * Services provide this interface method for taking action after a loadBalancerListener has been updated + * + * @param loadBalancerListener + * instance of modified LoadBalancerListener object + * @return void + */ + public void neutronLoadBalancerListenerUpdated(NeutronLoadBalancerListener loadBalancerListener); + + /** + * Services provide this interface method to indicate if the specified loadBalancerListener can be deleted + * + * @param loadBalancerListener + * instance of the LoadBalancerListener object to be deleted + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the delete operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canDeleteNeutronLoadBalancerListener(NeutronLoadBalancerListener loadBalancerListener); + + /** + * Services provide this interface method for taking action after a loadBalancerListener has been deleted + * + * @param loadBalancerListener + * instance of deleted LoadBalancerListener object + * @return void + */ + public void neutronLoadBalancerListenerDeleted(NeutronLoadBalancerListener loadBalancerListener); +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerCRUD.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerCRUD.java new file mode 100644 index 0000000000..c160f8ed8b --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerListenerCRUD.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import java.util.List; + +/** + * This interface defines the methods for CRUD of NB OpenStack LoadBalancerListener objects + * + */ + +public interface INeutronLoadBalancerListenerCRUD { + /** + * Applications call this interface method to determine if a particular + *LoadBalancerListener object exists + * + * @param uuid + * UUID of the LoadBalancerListener object + * @return boolean + */ + + public boolean neutronLoadBalancerListenerExists(String uuid); + + /** + * Applications call this interface method to return if a particular + * LoadBalancerListener object exists + * + * @param uuid + * UUID of the LoadBalancerListener object + * @return {@link NeutronLoadBalancerListener} + * OpenStackLoadBalancerListener class + */ + + public NeutronLoadBalancerListener getNeutronLoadBalancerListener(String uuid); + + /** + * Applications call this interface method to return all LoadBalancerListener objects + * + * @return List of OpenStackNetworks objects + */ + + public List getAllNeutronLoadBalancerListeners(); + + /** + * Applications call this interface method to add a LoadBalancerListener object to the + * concurrent map + * + * @param input + * OpenStackNetwork object + * @return boolean on whether the object was added or not + */ + + public boolean addNeutronLoadBalancerListener(NeutronLoadBalancerListener input); + + /** + * Applications call this interface method to remove a Neutron LoadBalancerListener object to the + * concurrent map + * + * @param uuid + * identifier for the LoadBalancerListener object + * @return boolean on whether the object was removed or not + */ + + public boolean removeNeutronLoadBalancerListener(String uuid); + + /** + * Applications call this interface method to edit a LoadBalancerListener object + * + * @param uuid + * identifier of the LoadBalancerListener object + * @param delta + * OpenStackLoadBalancerListener object containing changes to apply + * @return boolean on whether the object was updated or not + */ + + public boolean updateNeutronLoadBalancerListener(String uuid, NeutronLoadBalancerListener delta); + + /** + * Applications call this interface method to see if a MAC address is in use + * + * @param uuid + * identifier of the LoadBalancerListener object + * @return boolean on whether the macAddress is already associated with a + * port or not + */ + + public boolean neutronLoadBalancerListenerInUse(String uuid); + +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolAware.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolAware.java new file mode 100644 index 0000000000..16c7d37169 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolAware.java @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +/** + * This interface defines the methods a service that wishes to be aware of LoadBalancerPool Rules needs to implement + * + */ + +public interface INeutronLoadBalancerPoolAware { + + /** + * Services provide this interface method to indicate if the specified loadBalancerPool can be created + * + * @param loadBalancerPool + * instance of proposed new LoadBalancerPool object + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the create operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canCreateNeutronLoadBalancerPool(NeutronLoadBalancerPool loadBalancerPool); + + /** + * Services provide this interface method for taking action after a loadBalancerPool has been created + * + * @param loadBalancerPool + * instance of new LoadBalancerPool object + * @return void + */ + public void neutronLoadBalancerPoolCreated(NeutronLoadBalancerPool loadBalancerPool); + + /** + * Services provide this interface method to indicate if the specified loadBalancerPool can be changed using the specified + * delta + * + * @param delta + * updates to the loadBalancerPool object using patch semantics + * @param original + * instance of the LoadBalancerPool object to be updated + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the update operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canUpdateNeutronLoadBalancerPool(NeutronLoadBalancerPool delta, NeutronLoadBalancerPool original); + + /** + * Services provide this interface method for taking action after a loadBalancerPool has been updated + * + * @param loadBalancerPool + * instance of modified LoadBalancerPool object + * @return void + */ + public void neutronLoadBalancerPoolUpdated(NeutronLoadBalancerPool loadBalancerPool); + + /** + * Services provide this interface method to indicate if the specified loadBalancerPool can be deleted + * + * @param loadBalancerPool + * instance of the LoadBalancerPool object to be deleted + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the delete operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canDeleteNeutronLoadBalancerPool(NeutronLoadBalancerPool loadBalancerPool); + + /** + * Services provide this interface method for taking action after a loadBalancerPool has been deleted + * + * @param loadBalancerPool + * instance of deleted LoadBalancerPool object + * @return void + */ + public void neutronLoadBalancerPoolDeleted(NeutronLoadBalancerPool loadBalancerPool); +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolCRUD.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolCRUD.java new file mode 100644 index 0000000000..9614448d06 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolCRUD.java @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import java.util.List; + +/** + * This interface defines the methods for CRUD of NB OpenStack LoadBalancerPool objects + * + */ + +public interface INeutronLoadBalancerPoolCRUD { + /** + * Applications call this interface method to determine if a particular + *LoadBalancerPool object exists + * + * @param uuid + * UUID of the LoadBalancerPool object + * @return boolean + */ + + public boolean neutronLoadBalancerPoolExists(String uuid); + + /** + * Applications call this interface method to return if a particular + * LoadBalancerPool object exists + * + * @param uuid + * UUID of the LoadBalancerPool object + * @return {@link NeutronLoadBalancerPool} + * OpenStackLoadBalancerPool class + */ + + public NeutronLoadBalancerPool getNeutronLoadBalancerPool(String uuid); + + /** + * Applications call this interface method to return all LoadBalancerPool objects + * + * @return List of OpenStackNetworks objects + */ + + public List getAllNeutronLoadBalancerPools(); + + /** + * Applications call this interface method to add a LoadBalancerPool object to the + * concurrent map + * + * @param input + * OpenStackNetwork object + * @return boolean on whether the object was added or not + */ + + public boolean addNeutronLoadBalancerPool(NeutronLoadBalancerPool input); + + /** + * Applications call this interface method to remove a Neutron LoadBalancerPool object to the + * concurrent map + * + * @param uuid + * identifier for the LoadBalancerPool object + * @return boolean on whether the object was removed or not + */ + + public boolean removeNeutronLoadBalancerPool(String uuid); + + /** + * Applications call this interface method to edit a LoadBalancerPool object + * + * @param uuid + * identifier of the LoadBalancerPool object + * @param delta + * OpenStackLoadBalancerPool object containing changes to apply + * @return boolean on whether the object was updated or not + */ + + public boolean updateNeutronLoadBalancerPool(String uuid, NeutronLoadBalancerPool delta); + + /** + * Applications call this interface method to see if a MAC address is in use + * + * @param uuid + * identifier of the LoadBalancerPool object + * @return boolean on whether the macAddress is already associated with a + * port or not + */ + + public boolean neutronLoadBalancerPoolInUse(String uuid); + +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberAware.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberAware.java new file mode 100644 index 0000000000..0a1da77604 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberAware.java @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.networkconfig.neutron; + +public interface INeutronLoadBalancerPoolMemberAware { + + + /** + * Services provide this interface method to indicate if the specified loadBalancerPoolMember can be created + * + * @param loadBalancerPoolMember + * instance of proposed new LoadBalancerPool object + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the create operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canCreateNeutronLoadBalancerPoolMember(NeutronLoadBalancerPoolMember loadBalancerPoolMember); + + /** + * Services provide this interface method for taking action after a loadBalancerPoolMember has been created + * + * @param loadBalancerPoolMember + * instance of new LoadBalancerPool object + * @return void + */ + public void neutronLoadBalancerPoolMemberCreated(NeutronLoadBalancerPoolMember loadBalancerPoolMember); + + /** + * Services provide this interface method to indicate if the specified loadBalancerPoolMember can be changed using the specified + * delta + * + * @param delta + * updates to the loadBalancerPoolMember object using patch semantics + * @param original + * instance of the LoadBalancerPool object to be updated + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the update operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canUpdateNeutronLoadBalancerPoolMember(NeutronLoadBalancerPoolMember delta, + NeutronLoadBalancerPoolMember original); + + /** + * Services provide this interface method for taking action after a loadBalancerPoolMember has been updated + * + * @param loadBalancerPoolMember + * instance of modified LoadBalancerPool object + * @return void + */ + public void neutronLoadBalancerPoolMemberUpdated(NeutronLoadBalancerPoolMember loadBalancerPoolMember); + + /** + * Services provide this interface method to indicate if the specified loadBalancerPoolMember can be deleted + * + * @param loadBalancerPoolMember + * instance of the LoadBalancerPool object to be deleted + * @return integer + * the return value is understood to be a HTTP status code. A return value outside of 200 through 299 + * results in the delete operation being interrupted and the returned status value reflected in the + * HTTP response. + */ + public int canDeleteNeutronLoadBalancerPoolMember(NeutronLoadBalancerPoolMember loadBalancerPoolMember); + + /** + * Services provide this interface method for taking action after a loadBalancerPoolMember has been deleted + * + * @param loadBalancerPoolMember + * instance of deleted LoadBalancerPool object + * @return void + */ + public void NeutronLoadBalancerPoolMemberDeleted(NeutronLoadBalancerPoolMember loadBalancerPoolMember); +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberCRUD.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberCRUD.java new file mode 100644 index 0000000000..c1f5c7003c --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberCRUD.java @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import java.util.List; + +public interface INeutronLoadBalancerPoolMemberCRUD { + + /** + * Applications call this interface method to determine if a particular + *NeutronLoadBalancerPoolMember object exists + * + * @param uuid + * UUID of the NeutronLoadBalancerPoolMember object + * @return boolean + */ + + public boolean neutronLoadBalancerPoolMemberExists(String uuid); + + /** + * Applications call this interface method to return if a particular + * NeutronLoadBalancerPoolMember object exists + * + * @param uuid + * UUID of the NeutronLoadBalancerPoolMember object + * @return {@link org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPoolMember} + * OpenStackNeutronLoadBalancerPoolMember class + */ + + public NeutronLoadBalancerPoolMember getNeutronLoadBalancerPoolMember(String uuid); + + /** + * Applications call this interface method to return all NeutronLoadBalancerPoolMember objects + * + * @return List of OpenStackNetworks objects + */ + + public List getAllNeutronLoadBalancerPoolMembers(); + + /** + * Applications call this interface method to add a NeutronLoadBalancerPoolMember object to the + * concurrent map + * + * @param input + * OpenStackNetwork object + * @return boolean on whether the object was added or not + */ + + public boolean addNeutronLoadBalancerPoolMember(NeutronLoadBalancerPoolMember input); + + /** + * Applications call this interface method to remove a Neutron NeutronLoadBalancerPoolMember object to the + * concurrent map + * + * @param uuid + * identifier for the NeutronLoadBalancerPoolMember object + * @return boolean on whether the object was removed or not + */ + + public boolean removeNeutronLoadBalancerPoolMember(String uuid); + + /** + * Applications call this interface method to edit a NeutronLoadBalancerPoolMember object + * + * @param uuid + * identifier of the NeutronLoadBalancerPoolMember object + * @param delta + * OpenStackNeutronLoadBalancerPoolMember object containing changes to apply + * @return boolean on whether the object was updated or not + */ + + public boolean updateNeutronLoadBalancerPoolMember(String uuid, NeutronLoadBalancerPoolMember delta); + + /** + * Applications call this interface method to see if a MAC address is in use + * + * @param uuid + * identifier of the NeutronLoadBalancerPoolMember object + * @return boolean on whether the macAddress is already associated with a + * port or not + */ + + public boolean neutronLoadBalancerPoolMemberInUse(String uuid); + +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberRequest.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberRequest.java new file mode 100644 index 0000000000..d8c5eb9e93 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/INeutronLoadBalancerPoolMemberRequest.java @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import javax.xml.bind.annotation.XmlElement; +import java.util.List; + +public class INeutronLoadBalancerPoolMemberRequest { + + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="member") + NeutronLoadBalancerPoolMember singletonLoadBalancerPoolMember; + + @XmlElement(name="members") + List bulkRequest; + + INeutronLoadBalancerPoolMemberRequest() { + } + + public INeutronLoadBalancerPoolMemberRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancerPoolMember = null; + } + + INeutronLoadBalancerPoolMemberRequest(NeutronLoadBalancerPoolMember group) { + singletonLoadBalancerPoolMember = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancerPoolMember getSingleton() { + return singletonLoadBalancerPoolMember; + } + + public boolean isSingleton() { + return (singletonLoadBalancerPoolMember != null); + } +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronCRUDInterfaces.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronCRUDInterfaces.java index 6ce5499cdf..472debe383 100644 --- a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronCRUDInterfaces.java +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronCRUDInterfaces.java @@ -61,4 +61,29 @@ public class NeutronCRUDInterfaces { INeutronFirewallRuleCRUD answer = (INeutronFirewallRuleCRUD) ServiceHelper.getGlobalInstance(INeutronFirewallRuleCRUD.class, o); return answer; } + + public static INeutronLoadBalancerCRUD getINeutronLoadBalancerCRUD(Object o) { + INeutronLoadBalancerCRUD answer = (INeutronLoadBalancerCRUD) ServiceHelper.getGlobalInstance(INeutronLoadBalancerCRUD.class, o); + return answer; + } + + public static INeutronLoadBalancerPoolCRUD getINeutronLoadBalancerPoolCRUD(Object o) { + INeutronLoadBalancerPoolCRUD answer = (INeutronLoadBalancerPoolCRUD) ServiceHelper.getGlobalInstance(INeutronLoadBalancerPoolCRUD.class, o); + return answer; + } + + public static INeutronLoadBalancerListenerCRUD getINeutronLoadBalancerListenerCRUD(Object o) { + INeutronLoadBalancerListenerCRUD answer = (INeutronLoadBalancerListenerCRUD) ServiceHelper.getGlobalInstance(INeutronLoadBalancerListenerCRUD.class, o); + return answer; + } + + public static INeutronLoadBalancerHealthMonitorCRUD getINeutronLoadBalancerHealthMonitorCRUD(Object o) { + INeutronLoadBalancerHealthMonitorCRUD answer = (INeutronLoadBalancerHealthMonitorCRUD) ServiceHelper.getGlobalInstance(INeutronLoadBalancerHealthMonitorCRUD.class, o); + return answer; + } + + public static INeutronLoadBalancerPoolMemberCRUD getINeutronLoadBalancerPoolMemberCRUD(Object o) { + INeutronLoadBalancerPoolMemberCRUD answer = (INeutronLoadBalancerPoolMemberCRUD) ServiceHelper.getGlobalInstance(INeutronLoadBalancerPoolMemberCRUD.class, o); + return answer; + } } \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancer.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancer.java new file mode 100644 index 0000000000..15544f0424 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancer.java @@ -0,0 +1,161 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import org.opendaylight.controller.configuration.ConfigurationObject; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; + +/** + * OpenStack Neutron v2.0 Load Balancer as a service + * (LBaaS) bindings. See OpenStack Network API + * v2.0 Reference for description of the fields: + * Implemented fields are as follows: + * + * id uuid-str + * tenant_id uuid-str + * name String + * description String + * status String + * vip_address IP address + * vip_subnet uuid-str + * http://docs.openstack.org/api/openstack-network/2.0/openstack-network.pdf + */ + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancer extends ConfigurationObject implements Serializable { + private static final long serialVersionUID = 1L; + + @XmlElement(name="id") + String loadBalancerID; + + @XmlElement (name="tenant_id") + String loadBalancerTenantID; + + @XmlElement (name="name") + String loadBalancerName; + + @XmlElement (name="description") + String loadBalancerDescription; + + @XmlElement (name="status") + String loadBalancerStatus; + + @XmlElement (name="vip_address") + String loadBalancerVipAddress; + + @XmlElement (name="vip_subnet_id") + String loadBalancerVipSubnetID; + + public String getLoadBalancerID() { + return loadBalancerID; + } + + public void setLoadBalancerID(String loadBalancerID) { + this.loadBalancerID = loadBalancerID; + } + + public String getLoadBalancerTenantID() { + return loadBalancerTenantID; + } + + public void setLoadBalancerTenantID(String loadBalancerTenantID) { + this.loadBalancerTenantID = loadBalancerTenantID; + } + + public String getLoadBalancerName() { + return loadBalancerName; + } + + public void setLoadBalancerName(String loadBalancerName) { + this.loadBalancerName = loadBalancerName; + } + + public String getLoadBalancerDescription() { + return loadBalancerDescription; + } + + public void setLoadBalancerDescription(String loadBalancerDescription) { + this.loadBalancerDescription = loadBalancerDescription; + } + + public String getLoadBalancerStatus() { + return loadBalancerStatus; + } + + public void setLoadBalancerStatus(String loadBalancerStatus) { + this.loadBalancerStatus = loadBalancerStatus; + } + + public String getLoadBalancerVipAddress() { + return loadBalancerVipAddress; + } + + public void setLoadBalancerVipAddress(String loadBalancerVipAddress) { + this.loadBalancerVipAddress = loadBalancerVipAddress; + } + + public String getLoadBalancerVipSubnetID() { + return loadBalancerVipSubnetID; + } + + public void setLoadBalancerVipSubnetID(String loadBalancerVipSubnetID) { + this.loadBalancerVipSubnetID = loadBalancerVipSubnetID; + } + + public NeutronLoadBalancer extractFields(List fields) { + NeutronLoadBalancer ans = new NeutronLoadBalancer(); + Iterator i = fields.iterator(); + while (i.hasNext()) { + String s = i.next(); + if (s.equals("id")) { + ans.setLoadBalancerID(this.getLoadBalancerID()); + } + if (s.equals("tenant_id")) { + ans.setLoadBalancerTenantID(this.getLoadBalancerTenantID()); + } + if (s.equals("name")) { + ans.setLoadBalancerName(this.getLoadBalancerName()); + } + if(s.equals("description")) { + ans.setLoadBalancerDescription(this.getLoadBalancerDescription()); + } + if (s.equals("vip_address")) { + ans.setLoadBalancerVipAddress(this.getLoadBalancerVipAddress()); + } + if (s.equals("vip_subnet_id")) { + ans.setLoadBalancerVipSubnetID(this.getLoadBalancerVipSubnetID()); + } + if (s.equals("status")) { + ans.setLoadBalancerStatus(this.getLoadBalancerStatus()); + } + } + return ans; + } + + @Override public String toString() { + return "NeutronLoadBalancer{" + + "loadBalancerID='" + loadBalancerID + '\'' + + ", loadBalancerTenantID='" + loadBalancerTenantID + '\'' + + ", loadBalancerName='" + loadBalancerName + '\'' + + ", loadBalancerDescription='" + loadBalancerDescription + '\'' + + ", loadBalancerStatus='" + loadBalancerStatus + '\'' + + ", loadBalancerVipAddress='" + loadBalancerVipAddress + '\'' + + ", loadBalancerVipSubnetID='" + loadBalancerVipSubnetID + '\'' + + '}'; + } +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerHealthMonitor.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerHealthMonitor.java new file mode 100644 index 0000000000..0e9e1af999 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerHealthMonitor.java @@ -0,0 +1,229 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import org.opendaylight.controller.configuration.ConfigurationObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; + +/** + * OpenStack Neutron v2.0 Load Balancer as a service + * (LBaaS) bindings. See OpenStack Network API + * v2.0 Reference for description of the fields: + * Implemented fields are as follows: + * + * + * id uuid-str + * tenant_id uuid-str + * type String + * delay Integer + * timeout Integer + * max_retries Integer + * http_method String + * url_path String + * expected_codes String + * admin_state_up Boolean + * status String + * http://docs.openstack.org/api/openstack-network/2.0/openstack-network.pdf + */ + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerHealthMonitor extends ConfigurationObject implements Serializable { + private static final long serialVersionUID = 1L; + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancer.class); + + @XmlElement(name="id") + String loadBalancerHealthMonitorID; + + @XmlElement (name="tenant_id") + String loadBalancerHealthMonitorTenantID; + + @XmlElement (name="type") + String loadBalancerHealthMonitorType; + + @XmlElement (name="delay") + Integer loadBalancerHealthMonitorDelay; + + @XmlElement (name="timeout") + Integer loadBalancerHealthMonitorTimeout; + + @XmlElement (name="max_retries") + Integer loadBalancerHealthMonitorMaxRetries; + + @XmlElement (name="http_method") + String loadBalancerHealthMonitorHttpMethod; + + @XmlElement (name="url_path") + String loadBalancerHealthMonitorUrlPath; + + @XmlElement (name="expected_codes") + String loadBalancerHealthMonitorExpectedCodes; + + @XmlElement (defaultValue="true", name="admin_state_up") + Boolean loadBalancerHealthMonitorAdminStateIsUp; + + @XmlElement (name="status") + String loadBalancerHealthMonitorStatus; + + public String getLoadBalancerHealthMonitorID() { + return loadBalancerHealthMonitorID; + } + + public void setLoadBalancerHealthMonitorID(String loadBalancerHealthMonitorID) { + this.loadBalancerHealthMonitorID = loadBalancerHealthMonitorID; + } + + public String getLoadBalancerHealthMonitorTenantID() { + return loadBalancerHealthMonitorTenantID; + } + + public void setLoadBalancerHealthMonitorTenantID(String loadBalancerHealthMonitorTenantID) { + this.loadBalancerHealthMonitorTenantID = loadBalancerHealthMonitorTenantID; + } + + public String getLoadBalancerHealthMonitorType() { + return loadBalancerHealthMonitorType; + } + + public void setLoadBalancerHealthMonitorType(String loadBalancerHealthMonitorType) { + this.loadBalancerHealthMonitorType = loadBalancerHealthMonitorType; + } + + public Integer getLoadBalancerHealthMonitorDelay() { + return loadBalancerHealthMonitorDelay; + } + + public void setLoadBalancerHealthMonitorDelay(Integer loadBalancerHealthMonitorDelay) { + this.loadBalancerHealthMonitorDelay = loadBalancerHealthMonitorDelay; + } + + public Integer getLoadBalancerHealthMonitorTimeout() { + return loadBalancerHealthMonitorTimeout; + } + + public void setLoadBalancerHealthMonitorTimeout(Integer loadBalancerHealthMonitorTimeout) { + this.loadBalancerHealthMonitorTimeout = loadBalancerHealthMonitorTimeout; + } + + public Integer getLoadBalancerHealthMonitorMaxRetries() { + return loadBalancerHealthMonitorMaxRetries; + } + + public void setLoadBalancerHealthMonitorMaxRetries(Integer loadBalancerHealthMonitorMaxRetries) { + this.loadBalancerHealthMonitorMaxRetries = loadBalancerHealthMonitorMaxRetries; + } + + public String getLoadBalancerHealthMonitorHttpMethod() { + return loadBalancerHealthMonitorHttpMethod; + } + + public void setLoadBalancerHealthMonitorHttpMethod(String loadBalancerHealthMonitorHttpMethod) { + this.loadBalancerHealthMonitorHttpMethod = loadBalancerHealthMonitorHttpMethod; + } + + public String getLoadBalancerHealthMonitorUrlPath() { + return loadBalancerHealthMonitorUrlPath; + } + + public void setLoadBalancerHealthMonitorUrlPath(String loadBalancerHealthMonitorUrlPath) { + this.loadBalancerHealthMonitorUrlPath = loadBalancerHealthMonitorUrlPath; + } + + public String getLoadBalancerHealthMonitorExpectedCodes() { + return loadBalancerHealthMonitorExpectedCodes; + } + + public void setLoadBalancerHealthMonitorExpectedCodes(String loadBalancerHealthMonitorExpectedCodes) { + this.loadBalancerHealthMonitorExpectedCodes = loadBalancerHealthMonitorExpectedCodes; + } + + public Boolean getLoadBalancerHealthMonitorAdminStateIsUp() { + return loadBalancerHealthMonitorAdminStateIsUp; + } + + public void setLoadBalancerHealthMonitorAdminStateIsUp(Boolean loadBalancerHealthMonitorAdminStateIsUp) { + this.loadBalancerHealthMonitorAdminStateIsUp = loadBalancerHealthMonitorAdminStateIsUp; + } + + public String getLoadBalancerHealthMonitorStatus() { + return loadBalancerHealthMonitorStatus; + } + + public void setLoadBalancerHealthMonitorStatus(String loadBalancerHealthMonitorStatus) { + this.loadBalancerHealthMonitorStatus = loadBalancerHealthMonitorStatus; + } + + public NeutronLoadBalancerHealthMonitor extractFields(List fields) { + NeutronLoadBalancerHealthMonitor ans = new NeutronLoadBalancerHealthMonitor(); + Iterator i = fields.iterator(); + while (i.hasNext()) { + String s = i.next(); + if (s.equals("id")) { + ans.setLoadBalancerHealthMonitorID(this.getLoadBalancerHealthMonitorID()); + } + if (s.equals("tenant_id")) { + ans.setLoadBalancerHealthMonitorTenantID(this.getLoadBalancerHealthMonitorTenantID()); + } + if (s.equals("type")) { + ans.setLoadBalancerHealthMonitorType(this.getLoadBalancerHealthMonitorType()); + } + if (s.equals("delay")) { + ans.setLoadBalancerHealthMonitorDelay(this.getLoadBalancerHealthMonitorDelay()); + } + if (s.equals("timeout")) { + ans.setLoadBalancerHealthMonitorTimeout(this.getLoadBalancerHealthMonitorTimeout()); + } + if (s.equals("max_retries")) { + ans.setLoadBalancerHealthMonitorMaxRetries(this.getLoadBalancerHealthMonitorMaxRetries()); + } + if (s.equals("http_method")) { + ans.setLoadBalancerHealthMonitorHttpMethod(this.getLoadBalancerHealthMonitorHttpMethod()); + } + if(s.equals("url_path")) { + ans.setLoadBalancerHealthMonitorUrlPath(this.getLoadBalancerHealthMonitorUrlPath()); + } + if (s.equals("expected_codes")) { + ans.setLoadBalancerHealthMonitorExpectedCodes(this.getLoadBalancerHealthMonitorExpectedCodes()); + } + if (s.equals("admin_state_up")) { + ans.setLoadBalancerHealthMonitorAdminStateIsUp(loadBalancerHealthMonitorAdminStateIsUp); + } + if (s.equals("status")) { + ans.setLoadBalancerHealthMonitorStatus(this.getLoadBalancerHealthMonitorStatus()); + } + } + return ans; + } + + @Override public String toString() { + return "NeutronLoadBalancerHealthMonitor{" + + "loadBalancerHealthMonitorID='" + loadBalancerHealthMonitorID + '\'' + + ", loadBalancerHealthMonitorTenantID='" + loadBalancerHealthMonitorTenantID + '\'' + + ", loadBalancerHealthMonitorType='" + loadBalancerHealthMonitorType + '\'' + + ", loadBalancerHealthMonitorDelay=" + loadBalancerHealthMonitorDelay + + ", loadBalancerHealthMonitorTimeout=" + loadBalancerHealthMonitorTimeout + + ", loadBalancerHealthMonitorMaxRetries=" + loadBalancerHealthMonitorMaxRetries + + ", loadBalancerHealthMonitorHttpMethod='" + loadBalancerHealthMonitorHttpMethod + '\'' + + ", loadBalancerHealthMonitorUrlPath='" + loadBalancerHealthMonitorUrlPath + '\'' + + ", loadBalancerHealthMonitorExpectedCodes='" + loadBalancerHealthMonitorExpectedCodes + '\'' + + ", loadBalancerHealthMonitorAdminStateIsUp=" + loadBalancerHealthMonitorAdminStateIsUp + + ", loadBalancerHealthMonitorStatus='" + loadBalancerHealthMonitorStatus + '\'' + + '}'; + } +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerListener.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerListener.java new file mode 100644 index 0000000000..39897099a2 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerListener.java @@ -0,0 +1,227 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import org.opendaylight.controller.configuration.ConfigurationObject; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; + +/** + * OpenStack Neutron v2.0 Load Balancer as a service + * (LBaaS) bindings. See OpenStack Network API + * v2.0 Reference for description of the fields: + * Implemented fields are as follows: + * + * id uuid-str + * default_pool_id String + * tenant_id uuid-str + * name String + * description String + * shared Bool + * protocol String + * protocol_port String + * load_balancer_id String + * admin_state_up Boolean + * status String + * + * http://docs.openstack.org/api/openstack-network/2.0/openstack-network.pdf + */ + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerListener extends ConfigurationObject implements Serializable { + private static final long serialVersionUID = 1L; + + @XmlElement(name="id") + String loadBalancerListenerID; + + @XmlElement (name="default_pool_id") + String neutronLoadBalancerListenerDefaultPoolID; + + @XmlElement (name="tenant_id") + String loadBalancerListenerTenantID; + + @XmlElement (name="name") + String loadBalancerListenerName; + + @XmlElement (name="description") + String loadBalancerListenerDescription; + + @XmlElement (defaultValue="true", name="admin_state_up") + Boolean loadBalancerListenerAdminStateIsUp; + + @XmlElement (name="status") + String loadBalancerListenerStatus; + + @XmlElement (defaultValue="false", name="shared") + Boolean loadBalancerListenerIsShared; + + @XmlElement (name="protocol") + String neutronLoadBalancerListenerProtocol; + + @XmlElement (name="protocol_port") + String neutronLoadBalancerListenerProtocolPort; + + @XmlElement (name="load_balancer_id") + String neutronLoadBalancerListenerLoadBalancerID; + + + public String getLoadBalancerListenerID() { + return loadBalancerListenerID; + } + + public void setLoadBalancerListenerID(String loadBalancerListenerID) { + this.loadBalancerListenerID = loadBalancerListenerID; + } + + public String getLoadBalancerListenerTenantID() { + return loadBalancerListenerTenantID; + } + + public void setLoadBalancerListenerTenantID(String loadBalancerListenerTenantID) { + this.loadBalancerListenerTenantID = loadBalancerListenerTenantID; + } + + public String getLoadBalancerListenerName() { + return loadBalancerListenerName; + } + + public void setLoadBalancerListenerName(String loadBalancerListenerName) { + this.loadBalancerListenerName = loadBalancerListenerName; + } + + public String getLoadBalancerListenerDescription() { + return loadBalancerListenerDescription; + } + + public void setLoadBalancerListenerDescription(String loadBalancerListenerDescription) { + this.loadBalancerListenerDescription = loadBalancerListenerDescription; + } + + public Boolean getLoadBalancerListenerAdminStateIsUp() { + return loadBalancerListenerAdminStateIsUp; + } + + public void setLoadBalancerListenerAdminStateIsUp(Boolean loadBalancerListenerAdminStateIsUp) { + this.loadBalancerListenerAdminStateIsUp = loadBalancerListenerAdminStateIsUp; + } + + public String getLoadBalancerListenerStatus() { + return loadBalancerListenerStatus; + } + + public void setLoadBalancerListenerStatus(String loadBalancerListenerStatus) { + this.loadBalancerListenerStatus = loadBalancerListenerStatus; + } + + public Boolean getLoadBalancerListenerIsShared() { + return loadBalancerListenerIsShared; + } + + public void setLoadBalancerListenerIsShared(Boolean loadBalancerListenerIsShared) { + this.loadBalancerListenerIsShared = loadBalancerListenerIsShared; + } + + public String getNeutronLoadBalancerListenerProtocol() { + return neutronLoadBalancerListenerProtocol; + } + + public void setNeutronLoadBalancerListenerProtocol(String neutronLoadBalancerListenerProtocol) { + this.neutronLoadBalancerListenerProtocol = neutronLoadBalancerListenerProtocol; + } + + public String getNeutronLoadBalancerListenerProtocolPort() { + return neutronLoadBalancerListenerProtocolPort; + } + + public void setNeutronLoadBalancerListenerProtocolPort(String neutronLoadBalancerListenerProtocolPort) { + this.neutronLoadBalancerListenerProtocolPort = neutronLoadBalancerListenerProtocolPort; + } + + public String getNeutronLoadBalancerListenerDefaultPoolID() { + return neutronLoadBalancerListenerDefaultPoolID; + } + + public void setNeutronLoadBalancerListenerDefaultPoolID(String neutronLoadBalancerListenerDefaultPoolID) { + this.neutronLoadBalancerListenerDefaultPoolID = neutronLoadBalancerListenerDefaultPoolID; + } + + public String getNeutronLoadBalancerListenerLoadBalancerID() { + return neutronLoadBalancerListenerLoadBalancerID; + } + + public void setNeutronLoadBalancerListenerLoadBalancerID(String neutronLoadBalancerListenerLoadBalancerID) { + this.neutronLoadBalancerListenerLoadBalancerID = neutronLoadBalancerListenerLoadBalancerID; + } + + public NeutronLoadBalancerListener extractFields(List fields) { + NeutronLoadBalancerListener ans = new NeutronLoadBalancerListener(); + Iterator i = fields.iterator(); + while (i.hasNext()) { + String s = i.next(); + if (s.equals("id")) { + ans.setLoadBalancerListenerID(this.getLoadBalancerListenerID()); + } + if(s.equals("default_pool_id")) { + ans.setNeutronLoadBalancerListenerDefaultPoolID(this.getNeutronLoadBalancerListenerDefaultPoolID()); + } + if (s.equals("tenant_id")) { + ans.setLoadBalancerListenerTenantID(this.getLoadBalancerListenerTenantID()); + } + if (s.equals("name")) { + ans.setLoadBalancerListenerName(this.getLoadBalancerListenerName()); + } + if(s.equals("description")) { + ans.setLoadBalancerListenerDescription(this.getLoadBalancerListenerDescription()); + } + if (s.equals("shared")) { + ans.setLoadBalancerListenerIsShared(loadBalancerListenerIsShared); + } + if (s.equals("protocol")) { + ans.setNeutronLoadBalancerListenerProtocol(this.getNeutronLoadBalancerListenerProtocol()); + } + if (s.equals("protocol_port")) { + ans.setNeutronLoadBalancerListenerProtocolPort(this.getNeutronLoadBalancerListenerProtocolPort()); + } + if (s.equals("load_balancer_id")) { + ans.setNeutronLoadBalancerListenerLoadBalancerID(this.getNeutronLoadBalancerListenerLoadBalancerID()); + } + if (s.equals("admin_state_up")) { + ans.setLoadBalancerListenerAdminStateIsUp(loadBalancerListenerAdminStateIsUp); + } + if (s.equals("status")) { + ans.setLoadBalancerListenerStatus(this.getLoadBalancerListenerStatus()); + } + } + return ans; + } + + @Override public String toString() { + return "NeutronLoadBalancerListener{" + + "loadBalancerListenerID='" + loadBalancerListenerID + '\'' + + ", neutronLoadBalancerListenerDefaultPoolID='" + neutronLoadBalancerListenerDefaultPoolID + '\'' + + ", loadBalancerListenerTenantID='" + loadBalancerListenerTenantID + '\'' + + ", loadBalancerListenerName='" + loadBalancerListenerName + '\'' + + ", loadBalancerListenerDescription='" + loadBalancerListenerDescription + '\'' + + ", loadBalancerListenerAdminStateIsUp=" + loadBalancerListenerAdminStateIsUp + + ", loadBalancerListenerStatus='" + loadBalancerListenerStatus + '\'' + + ", loadBalancerListenerIsShared=" + loadBalancerListenerIsShared + + ", neutronLoadBalancerListenerProtocol='" + neutronLoadBalancerListenerProtocol + '\'' + + ", neutronLoadBalancerListenerProtocolPort='" + neutronLoadBalancerListenerProtocolPort + '\'' + + ", neutronLoadBalancerListenerLoadBalancerID='" + neutronLoadBalancerListenerLoadBalancerID + '\'' + + '}'; + } +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPool.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPool.java new file mode 100644 index 0000000000..12c80fe70c --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPool.java @@ -0,0 +1,201 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import org.opendaylight.controller.configuration.ConfigurationObject; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +/** + * OpenStack Neutron v2.0 Load Balancer as a service + * (LBaaS) bindings. See OpenStack Network API + * v2.0 Reference for description of the fields: + * Implemented fields are as follows: + * + * id uuid-str + * tenant_id uuid-str + * name String + * description String + * protocol String + * lb_algorithm String + * healthmonitor_id String + * admin_state_up Bool + * status String + * members List + * http://docs.openstack.org/api/openstack-network/2.0/openstack-network.pdf + */ + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerPool extends ConfigurationObject implements Serializable { + private static final long serialVersionUID = 1L; + + @XmlElement(name="id") + String loadBalancerPoolID; + + @XmlElement (name="tenant_id") + String loadBalancerPoolTenantID; + + @XmlElement (name="name") + String loadBalancerPoolName; + + @XmlElement (name="description") + String loadBalancerPoolDescription; + + @XmlElement (name="protocol") + String loadBalancerPoolProtocol; + + @XmlElement (name="lb_algorithm") + String loadBalancerPoolLbAlgorithm; + + @XmlElement (name="healthmonitor_id") + String neutronLoadBalancerPoolHealthMonitorID; + + @XmlElement (defaultValue="true", name="admin_state_up") + Boolean loadBalancerPoolAdminStateIsUp; + + @XmlElement (name="status") + String loadBalancerPoolStatus; + + @XmlElement (name="members") + List loadBalancerPoolMembers; + + HashMap member; + + public NeutronLoadBalancerPool() { + member = new HashMap(); + } + + public String getLoadBalancerPoolID() { + return loadBalancerPoolID; + } + + public void setLoadBalancerPoolID(String loadBalancerPoolID) { + this.loadBalancerPoolID = loadBalancerPoolID; + } + + public String getLoadBalancerPoolTenantID() { + return loadBalancerPoolTenantID; + } + + public void setLoadBalancerPoolTenantID(String loadBalancerPoolTenantID) { + this.loadBalancerPoolTenantID = loadBalancerPoolTenantID; + } + + public String getLoadBalancerPoolName() { + return loadBalancerPoolName; + } + + public void setLoadBalancerPoolName(String loadBalancerPoolName) { + this.loadBalancerPoolName = loadBalancerPoolName; + } + + public String getLoadBalancerPoolDescription() { + return loadBalancerPoolDescription; + } + + public void setLoadBalancerPoolDescription(String loadBalancerPoolDescription) { + this.loadBalancerPoolDescription = loadBalancerPoolDescription; + } + + public String getLoadBalancerPoolProtocol() { + return loadBalancerPoolProtocol; + } + + public void setLoadBalancerPoolProtocol(String loadBalancerPoolProtocol) { + this.loadBalancerPoolProtocol = loadBalancerPoolProtocol; + } + + public String getLoadBalancerPoolLbAlgorithm() { + return loadBalancerPoolLbAlgorithm; + } + + public void setLoadBalancerPoolLbAlgorithm(String loadBalancerPoolLbAlgorithm) { + this.loadBalancerPoolLbAlgorithm = loadBalancerPoolLbAlgorithm; + } + + public String getNeutronLoadBalancerPoolHealthMonitorID() { + return neutronLoadBalancerPoolHealthMonitorID; + } + + public void setNeutronLoadBalancerPoolHealthMonitorID(String neutronLoadBalancerPoolHealthMonitorID) { + this.neutronLoadBalancerPoolHealthMonitorID = neutronLoadBalancerPoolHealthMonitorID; + } + + public Boolean getLoadBalancerPoolAdminIsStateIsUp() { + return loadBalancerPoolAdminStateIsUp; + } + + public void setLoadBalancerPoolAdminStateIsUp(Boolean loadBalancerPoolAdminStateIsUp) { + this.loadBalancerPoolAdminStateIsUp = loadBalancerPoolAdminStateIsUp; + } + + public String getLoadBalancerPoolStatus() { + return loadBalancerPoolStatus; + } + + public void setLoadBalancerPoolStatus(String loadBalancerPoolStatus) { + this.loadBalancerPoolStatus = loadBalancerPoolStatus; + } + + public List getLoadBalancerPoolMembers() { + return loadBalancerPoolMembers; + } + + public void setLoadBalancerPoolMembers(List loadBalancerPoolMembers) { + this.loadBalancerPoolMembers = loadBalancerPoolMembers; + } + + public NeutronLoadBalancerPool extractFields(List fields) { + NeutronLoadBalancerPool ans = new NeutronLoadBalancerPool(); + Iterator i = fields.iterator(); + while (i.hasNext()) { + String s = i.next(); + if (s.equals("id")) { + ans.setLoadBalancerPoolID(this.getLoadBalancerPoolID()); + } + if (s.equals("tenant_id")) { + ans.setLoadBalancerPoolTenantID(this.getLoadBalancerPoolTenantID()); + } + if (s.equals("name")) { + ans.setLoadBalancerPoolName(this.getLoadBalancerPoolName()); + } + if(s.equals("description")) { + ans.setLoadBalancerPoolDescription(this.getLoadBalancerPoolDescription()); + } + if(s.equals("protocol")) { + ans.setLoadBalancerPoolProtocol(this.getLoadBalancerPoolProtocol()); + } + if(s.equals("lb_algorithm")) { + ans.setLoadBalancerPoolLbAlgorithm(this.getLoadBalancerPoolLbAlgorithm()); + } + if(s.equals("healthmonitor_id")) { + ans.setNeutronLoadBalancerPoolHealthMonitorID(this.getNeutronLoadBalancerPoolHealthMonitorID()); + } + if (s.equals("admin_state_up")) { + ans.setLoadBalancerPoolAdminStateIsUp(loadBalancerPoolAdminStateIsUp); + } + if (s.equals("status")) { + ans.setLoadBalancerPoolStatus(this.getLoadBalancerPoolStatus()); + } + if (s.equals("members")) { + ans.setLoadBalancerPoolMembers(getLoadBalancerPoolMembers()); + } + } + return ans; + } +} \ No newline at end of file diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPoolMember.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPoolMember.java new file mode 100644 index 0000000000..577c3bb528 --- /dev/null +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronLoadBalancerPoolMember.java @@ -0,0 +1,160 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron; + +import org.opendaylight.controller.configuration.ConfigurationObject; + +import javax.xml.bind.annotation.XmlElement; +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; + +public class NeutronLoadBalancerPoolMember extends ConfigurationObject implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * TODO: Plumb into LBaaS Pool. Members are nested underneath Pool CRUD. + */ + @XmlElement (name="id") + String poolMemberID; + + @XmlElement (name="tenant_id") + String poolMemberTenantID; + + @XmlElement (name="address") + String poolMemberAddress; + + @XmlElement (name="protocol_port") + Integer poolMemberProtoPort; + + @XmlElement (name="admin_state_up") + Boolean poolMemberAdminStateIsUp; + + @XmlElement (name="weight") + Integer poolMemberWeight; + + @XmlElement (name="subnet_id") + String poolMemberSubnetID; + + @XmlElement (name="status") + String poolMemberStatus; + + public NeutronLoadBalancerPoolMember() { + } + + public String getPoolMemberID() { + return poolMemberID; + } + + public void setPoolMemberID(String poolMemberID) { + this.poolMemberID = poolMemberID; + } + + public String getPoolMemberTenantID() { + return poolMemberTenantID; + } + + public void setPoolMemberTenantID(String poolMemberTenantID) { + this.poolMemberTenantID = poolMemberTenantID; + } + + public String getPoolMemberAddress() { + return poolMemberAddress; + } + + public void setPoolMemberAddress(String poolMemberAddress) { + this.poolMemberAddress = poolMemberAddress; + } + + public Integer getPoolMemberProtoPort() { + return poolMemberProtoPort; + } + + public void setPoolMemberProtoPort(Integer poolMemberProtoPort) { + this.poolMemberProtoPort = poolMemberProtoPort; + } + + public Boolean getPoolMemberAdminStateIsUp() { + return poolMemberAdminStateIsUp; + } + + public void setPoolMemberAdminStateIsUp(Boolean poolMemberAdminStateIsUp) { + this.poolMemberAdminStateIsUp = poolMemberAdminStateIsUp; + } + + public Integer getPoolMemberWeight() { + return poolMemberWeight; + } + + public void setPoolMemberWeight(Integer poolMemberWeight) { + this.poolMemberWeight = poolMemberWeight; + } + + public String getPoolMemberSubnetID() { + return poolMemberSubnetID; + } + + public void setPoolMemberSubnetID(String poolMemberSubnetID) { + this.poolMemberSubnetID = poolMemberSubnetID; + } + + public String getPoolMemberStatus() { + return poolMemberStatus; + } + + public void setPoolMemberStatus(String poolMemberStatus) { + this.poolMemberStatus = poolMemberStatus; + } + + public NeutronLoadBalancerPoolMember extractFields(List fields) { + NeutronLoadBalancerPoolMember ans = new NeutronLoadBalancerPoolMember(); + Iterator i = fields.iterator(); + while (i.hasNext()) { + String s = i.next(); + if (s.equals("id")) { + ans.setPoolMemberID(this.getPoolMemberID()); + } + if (s.equals("tenant_id")) { + ans.setPoolMemberTenantID(this.getPoolMemberTenantID()); + } + if (s.equals("address")) { + ans.setPoolMemberAddress(this.getPoolMemberAddress()); + } + if(s.equals("protocol_port")) { + ans.setPoolMemberProtoPort(this.getPoolMemberProtoPort()); + } + if (s.equals("admin_state_up")) { + ans.setPoolMemberAdminStateIsUp(poolMemberAdminStateIsUp); + } + if(s.equals("weight")) { + ans.setPoolMemberWeight(this.getPoolMemberWeight()); + } + if(s.equals("subnet_id")) { + ans.setPoolMemberSubnetID(this.getPoolMemberSubnetID()); + } + if (s.equals("status")) { + ans.setPoolMemberStatus(this.getPoolMemberStatus()); + } + } + return ans; + } + @Override public String toString() { + return "NeutronLoadBalancerPoolMember{" + + "poolMemberID='" + poolMemberID + '\'' + + ", poolMemberTenantID='" + poolMemberTenantID + '\'' + + ", poolMemberAddress='" + poolMemberAddress + '\'' + + ", poolMemberProtoPort=" + poolMemberProtoPort + + ", poolMemberAdminStateIsUp=" + poolMemberAdminStateIsUp + + ", poolMemberWeight=" + poolMemberWeight + + ", poolMemberSubnetID='" + poolMemberSubnetID + '\'' + + ", poolMemberStatus='" + poolMemberStatus + '\'' + + '}'; + } +} diff --git a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronPort.java b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronPort.java index b32b01cb3f..803e5e8d41 100644 --- a/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronPort.java +++ b/opendaylight/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/NeutronPort.java @@ -235,6 +235,11 @@ public class NeutronPort extends ConfigurationObject implements Serializable, IN if (s.equals("tenant_id")) { ans.setTenantID(this.getTenantID()); } + if (s.equals("security_groups")) { + List securityGroups = new ArrayList(); + securityGroups.addAll(this.getSecurityGroups()); + ans.setSecurityGroups(securityGroups); + } } return ans; } diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/INeutronLoadBalancerPoolMemberRequest.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/INeutronLoadBalancerPoolMemberRequest.java new file mode 100644 index 0000000000..9d6616373c --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/INeutronLoadBalancerPoolMemberRequest.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPoolMember; + +import javax.xml.bind.annotation.XmlElement; +import java.util.List; + +public class INeutronLoadBalancerPoolMemberRequest { + + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="member") + NeutronLoadBalancerPoolMember singletonLoadBalancerPoolMember; + + @XmlElement(name="members") + List bulkRequest; + + INeutronLoadBalancerPoolMemberRequest() { + } + + INeutronLoadBalancerPoolMemberRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancerPoolMember = null; + } + + INeutronLoadBalancerPoolMemberRequest(NeutronLoadBalancerPoolMember group) { + singletonLoadBalancerPoolMember = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancerPoolMember getSingleton() { + return singletonLoadBalancerPoolMember; + } + + public boolean isSingleton() { + return (singletonLoadBalancerPoolMember != null); + } +} \ No newline at end of file diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorNorthbound.java new file mode 100644 index 0000000000..6cd2673ff8 --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorNorthbound.java @@ -0,0 +1,410 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + + +import org.codehaus.enunciate.jaxrs.ResponseCode; +import org.codehaus.enunciate.jaxrs.StatusCodes; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerHealthMonitorAware; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerHealthMonitorCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancer; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerHealthMonitor; +import org.opendaylight.controller.northbound.commons.RestMessages; +import org.opendaylight.controller.northbound.commons.exception.BadRequestException; +import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException; +import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; +import org.opendaylight.controller.sal.utils.ServiceHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +/** + * Neutron Northbound REST APIs for Load Balancer HealthMonitor.
+ * This class provides REST APIs for managing neutron LoadBalancerHealthMonitor + * + *
+ *
+ * Authentication scheme : HTTP Basic
+ * Authentication realm : opendaylight
+ * Transport : HTTP and HTTPS
+ *
+ * HTTPS Authentication is disabled by default. Administrator can enable it in + * tomcat-server.xml after adding a proper keystore / SSL certificate from a + * trusted authority.
+ * More info : + * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration + * + */ +@Path("/healthmonitors") +public class NeutronLoadBalancerHealthMonitorNorthbound { + private static final Logger logger = LoggerFactory.getLogger(NeutronLoadBalancer.class); + + private NeutronLoadBalancerHealthMonitor extractFields(NeutronLoadBalancerHealthMonitor o, List fields) { + return o.extractFields(fields); + } + + /** + * Returns a list of all LoadBalancerHealthMonitor */ + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + + public Response listGroups( + // return fields + @QueryParam("fields") List fields, + // OpenStack LoadBalancerHealthMonitor attributes + @QueryParam("id") String queryLoadBalancerHealthMonitorID, + @QueryParam("tenant_id") String queryLoadBalancerHealthMonitorTenantID, + // TODO "type" is being a property by the JSON parser. + @QueryParam("type") String queryLoadBalancerHealthMonitorType, + @QueryParam("delay") Integer queryLoadBalancerHealthMonitorDelay, + @QueryParam("timeout") Integer queryLoadBalancerHealthMonitorTimeout, + @QueryParam("max_retries") Integer queryLoadBalancerHealthMonitorMaxRetries, + @QueryParam("http_method") String queryLoadBalancerHealthMonitorHttpMethod, + @QueryParam("url_path") String queryLoadBalancerHealthMonitorUrlPath, + @QueryParam("expected_codes") String queryLoadBalancerHealthMonitorExpectedCodes, + @QueryParam("admin_state_up") Boolean queryLoadBalancerHealthMonitorIsAdminStateUp, + @QueryParam("status") String queryLoadBalancerHealthMonitorStatus, + // pagination + @QueryParam("limit") String limit, + @QueryParam("marker") String marker, + @QueryParam("page_reverse") String pageReverse + // sorting not supported + ) { + INeutronLoadBalancerHealthMonitorCRUD loadBalancerHealthMonitorInterface = NeutronCRUDInterfaces + .getINeutronLoadBalancerHealthMonitorCRUD(this); + if (loadBalancerHealthMonitorInterface == null) { + throw new ServiceUnavailableException("LoadBalancerHealthMonitor CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + List allLoadBalancerHealthMonitors = loadBalancerHealthMonitorInterface.getAllNeutronLoadBalancerHealthMonitors(); + List ans = new ArrayList(); + Iterator i = allLoadBalancerHealthMonitors.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerHealthMonitor nsg = i.next(); + if ((queryLoadBalancerHealthMonitorID == null || + queryLoadBalancerHealthMonitorID.equals(nsg.getLoadBalancerHealthMonitorID())) && + (queryLoadBalancerHealthMonitorTenantID == null || + queryLoadBalancerHealthMonitorTenantID.equals + (nsg.getLoadBalancerHealthMonitorTenantID())) && + (queryLoadBalancerHealthMonitorType == null || + queryLoadBalancerHealthMonitorType.equals + (nsg.getLoadBalancerHealthMonitorType())) && + (queryLoadBalancerHealthMonitorDelay == null || + queryLoadBalancerHealthMonitorDelay.equals + (nsg.getLoadBalancerHealthMonitorDelay())) && + (queryLoadBalancerHealthMonitorTimeout == null || + queryLoadBalancerHealthMonitorTimeout.equals + (nsg.getLoadBalancerHealthMonitorTimeout())) && + (queryLoadBalancerHealthMonitorMaxRetries == null || + queryLoadBalancerHealthMonitorMaxRetries.equals + (nsg.getLoadBalancerHealthMonitorMaxRetries())) && + (queryLoadBalancerHealthMonitorHttpMethod == null || + queryLoadBalancerHealthMonitorHttpMethod.equals + (nsg.getLoadBalancerHealthMonitorHttpMethod())) && + (queryLoadBalancerHealthMonitorUrlPath == null || + queryLoadBalancerHealthMonitorUrlPath.equals + (nsg.getLoadBalancerHealthMonitorUrlPath())) && + (queryLoadBalancerHealthMonitorExpectedCodes == null || + queryLoadBalancerHealthMonitorExpectedCodes.equals + (nsg.getLoadBalancerHealthMonitorExpectedCodes())) && + (queryLoadBalancerHealthMonitorIsAdminStateUp == null || + queryLoadBalancerHealthMonitorIsAdminStateUp.equals + (nsg.getLoadBalancerHealthMonitorAdminStateIsUp())) && + (queryLoadBalancerHealthMonitorStatus == null || + queryLoadBalancerHealthMonitorStatus.equals + (nsg.getLoadBalancerHealthMonitorStatus()))) { + if (fields.size() > 0) { + ans.add(extractFields(nsg,fields)); + } else { + ans.add(nsg); + } + } + } + return Response.status(200).entity( + new NeutronLoadBalancerHealthMonitorRequest(ans)).build(); + } + + /** + * Returns a specific LoadBalancerHealthMonitor */ + + @Path("{loadBalancerHealthMonitorID}") + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response showLoadBalancerHealthMonitor(@PathParam("loadBalancerHealthMonitorID") String loadBalancerHealthMonitorID, + // return fields + @QueryParam("fields") List fields) { + INeutronLoadBalancerHealthMonitorCRUD loadBalancerHealthMonitorInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerHealthMonitorCRUD(this); + if (loadBalancerHealthMonitorInterface == null) { + throw new ServiceUnavailableException("LoadBalancerHealthMonitor CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (!loadBalancerHealthMonitorInterface.neutronLoadBalancerHealthMonitorExists(loadBalancerHealthMonitorID)) { + throw new ResourceNotFoundException("LoadBalancerHealthMonitor UUID does not exist."); + } + if (fields.size() > 0) { + NeutronLoadBalancerHealthMonitor ans = loadBalancerHealthMonitorInterface.getNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID); + return Response.status(200).entity( + new NeutronLoadBalancerHealthMonitorRequest(extractFields(ans, fields))).build(); + } else { + return Response.status(200).entity(new NeutronLoadBalancerHealthMonitorRequest(loadBalancerHealthMonitorInterface.getNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID))).build(); + } + } + + /** + * Creates new LoadBalancerHealthMonitor */ + + @POST + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 201, condition = "Created"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response createLoadBalancerHealthMonitors(final NeutronLoadBalancerHealthMonitorRequest input) { + INeutronLoadBalancerHealthMonitorCRUD loadBalancerHealthMonitorInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerHealthMonitorCRUD(this); + if (loadBalancerHealthMonitorInterface == null) { + throw new ServiceUnavailableException("LoadBalancerHealthMonitor CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (input.isSingleton()) { + NeutronLoadBalancerHealthMonitor singleton = input.getSingleton(); + + /* + * Verify that the LoadBalancerHealthMonitor doesn't already exist. + */ + if (loadBalancerHealthMonitorInterface.neutronLoadBalancerHealthMonitorExists(singleton.getLoadBalancerHealthMonitorID())) { + throw new BadRequestException("LoadBalancerHealthMonitor UUID already exists"); + } + loadBalancerHealthMonitorInterface.addNeutronLoadBalancerHealthMonitor(singleton); + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerHealthMonitorAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + int status = service.canCreateNeutronLoadBalancerHealthMonitor(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerHealthMonitorInterface.addNeutronLoadBalancerHealthMonitor(singleton); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + service.neutronLoadBalancerHealthMonitorCreated(singleton); + } + } + } else { + List bulk = input.getBulk(); + Iterator i = bulk.iterator(); + HashMap testMap = new HashMap(); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerHealthMonitorAware.class, this, null); + while (i.hasNext()) { + NeutronLoadBalancerHealthMonitor test = i.next(); + + /* + * Verify that the firewall policy doesn't already exist + */ + + if (loadBalancerHealthMonitorInterface + .neutronLoadBalancerHealthMonitorExists(test.getLoadBalancerHealthMonitorID())) { + throw new BadRequestException("LoadBalancerHealthMonitor UUID already is already created"); + } + if (testMap.containsKey(test.getLoadBalancerHealthMonitorID())) { + throw new BadRequestException("LoadBalancerHealthMonitor UUID already exists"); + } + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + int status = service.canCreateNeutronLoadBalancerHealthMonitor(test); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + } + /* + * now, each element of the bulk request can be added to the cache + */ + i = bulk.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerHealthMonitor test = i.next(); + loadBalancerHealthMonitorInterface.addNeutronLoadBalancerHealthMonitor(test); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + service.neutronLoadBalancerHealthMonitorCreated(test); + } + } + } + } + return Response.status(201).entity(input).build(); + } + + /** + * Updates a LoadBalancerHealthMonitor Policy + */ + @Path("{loadBalancerHealthMonitorID}") + @PUT + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response updateLoadBalancerHealthMonitor( + @PathParam("loadBalancerHealthMonitorID") String loadBalancerHealthMonitorID, + final NeutronLoadBalancerHealthMonitorRequest input) { + INeutronLoadBalancerHealthMonitorCRUD loadBalancerHealthMonitorInterface = NeutronCRUDInterfaces + .getINeutronLoadBalancerHealthMonitorCRUD(this); + if (loadBalancerHealthMonitorInterface == null) { + throw new ServiceUnavailableException("LoadBalancerHealthMonitor CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancerHealthMonitor exists and there is only one delta provided + */ + if (!loadBalancerHealthMonitorInterface.neutronLoadBalancerHealthMonitorExists(loadBalancerHealthMonitorID)) { + throw new ResourceNotFoundException("LoadBalancerHealthMonitor UUID does not exist."); + } + if (!input.isSingleton()) { + throw new BadRequestException("Only singleton edit supported"); + } + NeutronLoadBalancerHealthMonitor delta = input.getSingleton(); + NeutronLoadBalancerHealthMonitor original = loadBalancerHealthMonitorInterface + .getNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID); + + /* + * updates restricted by Neutron + */ + if (delta.getLoadBalancerHealthMonitorID() != null || + delta.getLoadBalancerHealthMonitorTenantID() != null || + delta.getLoadBalancerHealthMonitorType() != null || + delta.getLoadBalancerHealthMonitorDelay() != null || + delta.getLoadBalancerHealthMonitorTimeout() != null || + delta.getLoadBalancerHealthMonitorMaxRetries() != null || + delta.getLoadBalancerHealthMonitorHttpMethod() != null || + delta.getLoadBalancerHealthMonitorUrlPath() != null || + delta.getLoadBalancerHealthMonitorExpectedCodes() != null || + delta.getLoadBalancerHealthMonitorAdminStateIsUp() != null || + delta.getLoadBalancerHealthMonitorStatus() != null) { + throw new BadRequestException("Attribute edit blocked by Neutron"); + } + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerHealthMonitorAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + int status = service.canUpdateNeutronLoadBalancerHealthMonitor(delta, original); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + /* + * update the object and return it + */ + loadBalancerHealthMonitorInterface.updateNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID, delta); + NeutronLoadBalancerHealthMonitor updatedLoadBalancerHealthMonitor = loadBalancerHealthMonitorInterface + .getNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + service.neutronLoadBalancerHealthMonitorUpdated(updatedLoadBalancerHealthMonitor); + } + } + return Response.status(200).entity(new NeutronLoadBalancerHealthMonitorRequest + (loadBalancerHealthMonitorInterface.getNeutronLoadBalancerHealthMonitor + (loadBalancerHealthMonitorID))).build(); + } + + + + /** + * Deletes a LoadBalancerHealthMonitor + * */ + @Path("{loadBalancerHealthMonitorID}") + @DELETE + @StatusCodes({ + @ResponseCode(code = 204, condition = "No Content"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response deleteLoadBalancerHealthMonitor( + @PathParam("loadBalancerHealthMonitorID") String loadBalancerHealthMonitorID) { + INeutronLoadBalancerHealthMonitorCRUD loadBalancerHealthMonitorInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerHealthMonitorCRUD(this); + if (loadBalancerHealthMonitorInterface == null) { + throw new ServiceUnavailableException("LoadBalancerHealthMonitor CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + /* + * verify the LoadBalancerHealthMonitor exists and it isn't currently in use + */ + if (!loadBalancerHealthMonitorInterface.neutronLoadBalancerHealthMonitorExists(loadBalancerHealthMonitorID)) { + throw new ResourceNotFoundException("LoadBalancerHealthMonitor UUID does not exist."); + } + if (loadBalancerHealthMonitorInterface.neutronLoadBalancerHealthMonitorInUse(loadBalancerHealthMonitorID)) { + return Response.status(409).build(); + } + NeutronLoadBalancerHealthMonitor singleton = loadBalancerHealthMonitorInterface.getNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerHealthMonitorAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + int status = service.canDeleteNeutronLoadBalancerHealthMonitor(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerHealthMonitorInterface.removeNeutronLoadBalancerHealthMonitor(loadBalancerHealthMonitorID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerHealthMonitorAware service = (INeutronLoadBalancerHealthMonitorAware) instance; + service.neutronLoadBalancerHealthMonitorDeleted(singleton); + } + } + return Response.status(204).build(); + } +} diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorRequest.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorRequest.java new file mode 100644 index 0000000000..dc4af2f31f --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerHealthMonitorRequest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerHealthMonitor; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerHealthMonitorRequest { + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="healthmonitor") + NeutronLoadBalancerHealthMonitor singletonLoadBalancerHealthMonitor; + + @XmlElement(name="healthmonitors") + List bulkRequest; + + NeutronLoadBalancerHealthMonitorRequest() { + } + + NeutronLoadBalancerHealthMonitorRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancerHealthMonitor = null; + } + + NeutronLoadBalancerHealthMonitorRequest(NeutronLoadBalancerHealthMonitor group) { + singletonLoadBalancerHealthMonitor = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancerHealthMonitor getSingleton() { + return singletonLoadBalancerHealthMonitor; + } + + public boolean isSingleton() { + return (singletonLoadBalancerHealthMonitor != null); + } +} \ No newline at end of file diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerNorthbound.java new file mode 100644 index 0000000000..f3ef39f7ba --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerNorthbound.java @@ -0,0 +1,391 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + + +import org.codehaus.enunciate.jaxrs.ResponseCode; +import org.codehaus.enunciate.jaxrs.StatusCodes; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerListenerAware; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerListenerCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerListener; +import org.opendaylight.controller.northbound.commons.RestMessages; +import org.opendaylight.controller.northbound.commons.exception.BadRequestException; +import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException; +import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; +import org.opendaylight.controller.sal.utils.ServiceHelper; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +/** + * Neutron Northbound REST APIs for LoadBalancerListener Policies.
+ * This class provides REST APIs for managing neutron LoadBalancerListener Policies + * + *
+ *
+ * Authentication scheme : HTTP Basic
+ * Authentication realm : opendaylight
+ * Transport : HTTP and HTTPS
+ *
+ * HTTPS Authentication is disabled by default. Administrator can enable it in + * tomcat-server.xml after adding a proper keystore / SSL certificate from a + * trusted authority.
+ * More info : + * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration + * + */ +@Path("/listeners") +public class NeutronLoadBalancerListenerNorthbound { + + private NeutronLoadBalancerListener extractFields(NeutronLoadBalancerListener o, List fields) { + return o.extractFields(fields); + } + + /** + * Returns a list of all LoadBalancerListener */ + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + + public Response listGroups( + // return fields + @QueryParam("fields") List fields, + // OpenStack LoadBalancerListener attributes + @QueryParam("id") String queryLoadBalancerListenerID, + @QueryParam("default_pool_id") String queryLoadBalancerListenerDefaultPoolID, + @QueryParam("tenant_id") String queryLoadBalancerListenerTenantID, + @QueryParam("name") String queryLoadBalancerListenerName, + @QueryParam("description") String queryLoadBalancerListenerDescription, + @QueryParam("shared") String queryLoadBalancerListenerIsShared, + @QueryParam("protocol") String queryLoadBalancerListenerProtocol, + @QueryParam("protocol_port") String queryLoadBalancerListenerProtocolPort, + @QueryParam("load_balancer_id") String queryLoadBalancerListenerLoadBalancerID, + @QueryParam("admin_state_up") String queryLoadBalancerListenerAdminIsUp, + @QueryParam("status") String queryLoadBalancerListenerStatus, + // pagination + @QueryParam("limit") String limit, + @QueryParam("marker") String marker, + @QueryParam("page_reverse") String pageReverse + // sorting not supported + ) { + INeutronLoadBalancerListenerCRUD loadBalancerListenerInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerCRUD(this); + // INeutronLoadBalancerListenerRuleCRUD firewallRuleInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerRuleCRUD(this); + + if (loadBalancerListenerInterface == null) { + throw new ServiceUnavailableException("LoadBalancerListener CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + List allLoadBalancerListeners = loadBalancerListenerInterface.getAllNeutronLoadBalancerListeners(); + // List allLoadBalancerListenerRules = firewallRuleInterface.getAllNeutronLoadBalancerListenerRules(); + List ans = new ArrayList(); + // List rules = new ArrayList(); + Iterator i = allLoadBalancerListeners.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerListener nsg = i.next(); + if ((queryLoadBalancerListenerID == null || + queryLoadBalancerListenerID.equals(nsg.getLoadBalancerListenerID())) && + (queryLoadBalancerListenerDefaultPoolID == null || + queryLoadBalancerListenerDefaultPoolID.equals(nsg.getNeutronLoadBalancerListenerDefaultPoolID())) && + (queryLoadBalancerListenerTenantID == null || + queryLoadBalancerListenerTenantID.equals(nsg.getLoadBalancerListenerTenantID())) && + (queryLoadBalancerListenerName == null || + queryLoadBalancerListenerName.equals(nsg.getLoadBalancerListenerName())) && + (queryLoadBalancerListenerDescription == null || + queryLoadBalancerListenerDescription.equals(nsg.getLoadBalancerListenerDescription())) && + (queryLoadBalancerListenerIsShared == null || + queryLoadBalancerListenerIsShared.equals(nsg.getLoadBalancerListenerIsShared())) && + (queryLoadBalancerListenerProtocol == null || + queryLoadBalancerListenerProtocol.equals(nsg.getNeutronLoadBalancerListenerProtocol())) && + (queryLoadBalancerListenerProtocolPort == null || + queryLoadBalancerListenerProtocolPort.equals(nsg.getNeutronLoadBalancerListenerProtocolPort())) && + (queryLoadBalancerListenerLoadBalancerID == null || + queryLoadBalancerListenerLoadBalancerID.equals(nsg.getNeutronLoadBalancerListenerLoadBalancerID())) && + (queryLoadBalancerListenerAdminIsUp == null || + queryLoadBalancerListenerAdminIsUp.equals(nsg.getLoadBalancerListenerAdminStateIsUp())) && + (queryLoadBalancerListenerStatus == null || + queryLoadBalancerListenerStatus.equals(nsg.getLoadBalancerListenerStatus()))) { + if (fields.size() > 0) { + ans.add(extractFields(nsg,fields)); + } else { + ans.add(nsg); + } + } + } + return Response.status(200).entity( + new NeutronLoadBalancerListenerRequest(ans)).build(); + } + + /** + * Returns a specific LoadBalancerListener */ + + @Path("{loadBalancerListenerID}") + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response showLoadBalancerListener(@PathParam("loadBalancerListenerID") String loadBalancerListenerID, + // return fields + @QueryParam("fields") List fields) { + INeutronLoadBalancerListenerCRUD loadBalancerListenerInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerCRUD(this); + if (loadBalancerListenerInterface == null) { + throw new ServiceUnavailableException("LoadBalancerListener CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (!loadBalancerListenerInterface.neutronLoadBalancerListenerExists(loadBalancerListenerID)) { + throw new ResourceNotFoundException("LoadBalancerListener UUID does not exist."); + } + if (fields.size() > 0) { + NeutronLoadBalancerListener ans = loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID); + return Response.status(200).entity( + new NeutronLoadBalancerListenerRequest(extractFields(ans, fields))).build(); + } else { + return Response.status(200).entity(new NeutronLoadBalancerListenerRequest(loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID))).build(); + } + } + + /** + * Creates new LoadBalancerListener */ + + @POST + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 201, condition = "Created"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response createLoadBalancerListeners(final NeutronLoadBalancerListenerRequest input) { + INeutronLoadBalancerListenerCRUD loadBalancerListenerInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerCRUD(this); + if (loadBalancerListenerInterface == null) { + throw new ServiceUnavailableException("LoadBalancerListener CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (input.isSingleton()) { + NeutronLoadBalancerListener singleton = input.getSingleton(); + + /* + * Verify that the LoadBalancerListener doesn't already exist. + */ + if (loadBalancerListenerInterface.neutronLoadBalancerListenerExists(singleton.getLoadBalancerListenerID())) { + throw new BadRequestException("LoadBalancerListener UUID already exists"); + } + loadBalancerListenerInterface.addNeutronLoadBalancerListener(singleton); + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerListenerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + int status = service.canCreateNeutronLoadBalancerListener(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerListenerInterface.addNeutronLoadBalancerListener(singleton); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + service.neutronLoadBalancerListenerCreated(singleton); + } + } + } else { + List bulk = input.getBulk(); + Iterator i = bulk.iterator(); + HashMap testMap = new HashMap(); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerListenerAware.class, this, null); + while (i.hasNext()) { + NeutronLoadBalancerListener test = i.next(); + + /* + * Verify that the firewall policy doesn't already exist + */ + + if (loadBalancerListenerInterface.neutronLoadBalancerListenerExists(test.getLoadBalancerListenerID())) { + throw new BadRequestException("LoadBalancerListener UUID already is already created"); + } + if (testMap.containsKey(test.getLoadBalancerListenerID())) { + throw new BadRequestException("LoadBalancerListener UUID already exists"); + } + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + int status = service.canCreateNeutronLoadBalancerListener(test); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + } + /* + * now, each element of the bulk request can be added to the cache + */ + i = bulk.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerListener test = i.next(); + loadBalancerListenerInterface.addNeutronLoadBalancerListener(test); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + service.neutronLoadBalancerListenerCreated(test); + } + } + } + } + return Response.status(201).entity(input).build(); + } + + /** + * Updates a LoadBalancerListener Policy + */ + @Path("{loadBalancerListenerID}") + @PUT + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response updateLoadBalancerListener( + @PathParam("loadBalancerListenerID") String loadBalancerListenerID, final NeutronLoadBalancerListenerRequest input) { + INeutronLoadBalancerListenerCRUD loadBalancerListenerInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerCRUD(this); + if (loadBalancerListenerInterface == null) { + throw new ServiceUnavailableException("LoadBalancerListener CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancerListener exists and there is only one delta provided + */ + if (!loadBalancerListenerInterface.neutronLoadBalancerListenerExists(loadBalancerListenerID)) { + throw new ResourceNotFoundException("LoadBalancerListener UUID does not exist."); + } + if (!input.isSingleton()) { + throw new BadRequestException("Only singleton edit supported"); + } + NeutronLoadBalancerListener delta = input.getSingleton(); + NeutronLoadBalancerListener original = loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID); + + /* + * updates restricted by Neutron + */ + if (delta.getLoadBalancerListenerID() != null || + delta.getNeutronLoadBalancerListenerDefaultPoolID() != null || + delta.getLoadBalancerListenerTenantID() != null || + delta.getLoadBalancerListenerName() != null || + delta.getLoadBalancerListenerDescription() != null || + delta.getLoadBalancerListenerIsShared() != null || + delta.getNeutronLoadBalancerListenerProtocol() != null || + delta.getNeutronLoadBalancerListenerProtocolPort() != null || + delta.getNeutronLoadBalancerListenerLoadBalancerID() != null || + delta.getLoadBalancerListenerAdminStateIsUp() != null || + delta.getLoadBalancerListenerStatus() != null) { + throw new BadRequestException("Attribute edit blocked by Neutron"); + } + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerListenerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + int status = service.canUpdateNeutronLoadBalancerListener(delta, original); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + /* + * update the object and return it + */ + loadBalancerListenerInterface.updateNeutronLoadBalancerListener(loadBalancerListenerID, delta); + NeutronLoadBalancerListener updatedLoadBalancerListener = loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + service.neutronLoadBalancerListenerUpdated(updatedLoadBalancerListener); + } + } + return Response.status(200).entity(new NeutronLoadBalancerListenerRequest(loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID))).build(); + } + + /** + * Deletes a LoadBalancerListener */ + + @Path("{loadBalancerListenerID}") + @DELETE + @StatusCodes({ + @ResponseCode(code = 204, condition = "No Content"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response deleteLoadBalancerListener( + @PathParam("loadBalancerListenerID") String loadBalancerListenerID) { + INeutronLoadBalancerListenerCRUD loadBalancerListenerInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerListenerCRUD(this); + if (loadBalancerListenerInterface == null) { + throw new ServiceUnavailableException("LoadBalancerListener CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancerListener exists and it isn't currently in use + */ + if (!loadBalancerListenerInterface.neutronLoadBalancerListenerExists(loadBalancerListenerID)) { + throw new ResourceNotFoundException("LoadBalancerListener UUID does not exist."); + } + if (loadBalancerListenerInterface.neutronLoadBalancerListenerInUse(loadBalancerListenerID)) { + return Response.status(409).build(); + } + NeutronLoadBalancerListener singleton = loadBalancerListenerInterface.getNeutronLoadBalancerListener(loadBalancerListenerID); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerListenerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + int status = service.canDeleteNeutronLoadBalancerListener(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + loadBalancerListenerInterface.removeNeutronLoadBalancerListener(loadBalancerListenerID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerListenerAware service = (INeutronLoadBalancerListenerAware) instance; + service.neutronLoadBalancerListenerDeleted(singleton); + } + } + return Response.status(204).build(); + } +} diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerRequest.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerRequest.java new file mode 100644 index 0000000000..ba375af926 --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerListenerRequest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerListener; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerListenerRequest { + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="listener") + NeutronLoadBalancerListener singletonLoadBalancerListener; + + @XmlElement(name="listeners") + List bulkRequest; + + NeutronLoadBalancerListenerRequest() { + } + + NeutronLoadBalancerListenerRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancerListener = null; + } + + NeutronLoadBalancerListenerRequest(NeutronLoadBalancerListener group) { + singletonLoadBalancerListener = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancerListener getSingleton() { + return singletonLoadBalancerListener; + } + + public boolean isSingleton() { + return (singletonLoadBalancerListener != null); + } +} \ No newline at end of file diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerNorthbound.java new file mode 100644 index 0000000000..748dffc8cf --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerNorthbound.java @@ -0,0 +1,384 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + + +import org.codehaus.enunciate.jaxrs.ResponseCode; +import org.codehaus.enunciate.jaxrs.StatusCodes; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerAware; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancer; +import org.opendaylight.controller.northbound.commons.RestMessages; +import org.opendaylight.controller.northbound.commons.exception.BadRequestException; +import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException; +import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; +import org.opendaylight.controller.sal.utils.ServiceHelper; + +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +/** + * Neutron Northbound REST APIs for LoadBalancer Policies.
+ * This class provides REST APIs for managing neutron LoadBalancer Policies + * + *
+ *
+ * Authentication scheme : HTTP Basic
+ * Authentication realm : opendaylight
+ * Transport : HTTP and HTTPS
+ *
+ * HTTPS Authentication is disabled by default. Administrator can enable it in + * tomcat-server.xml after adding a proper keystore / SSL certificate from a + * trusted authority.
+ * More info : + * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration + * + */ +@Path("/loadbalancers") +public class NeutronLoadBalancerNorthbound { + + private NeutronLoadBalancer extractFields(NeutronLoadBalancer o, List fields) { + return o.extractFields(fields); + } + + /** + * Returns a list of all LoadBalancer */ + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + + public Response listGroups( + // return fields + @QueryParam("fields") List fields, + // OpenStack LoadBalancer attributes + @QueryParam("id") String queryLoadBalancerID, + @QueryParam("tenant_id") String queryLoadBalancerTenantID, + @QueryParam("name") String queryLoadBalancerName, + @QueryParam("description") String queryLoadBalancerDescription, + @QueryParam("status") String queryLoadBalancerStatus, + @QueryParam("vip_address") String queryLoadBalancerVipAddress, + @QueryParam("vip_subnet") String queryLoadBalancerVipSubnet, + // pagination + @QueryParam("limit") String limit, + @QueryParam("marker") String marker, + @QueryParam("page_reverse") String pageReverse + // sorting not supported + ) { + INeutronLoadBalancerCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerCRUD( + this); + // INeutronLoadBalancerRuleCRUD firewallRuleInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerRuleCRUD(this); + + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancer CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + List allLoadBalancers = loadBalancerPoolInterface.getAllNeutronLoadBalancers(); + // List allLoadBalancerRules = firewallRuleInterface.getAllNeutronLoadBalancerRules(); + List ans = new ArrayList(); + // List rules = new ArrayList(); + Iterator i = allLoadBalancers.iterator(); + while (i.hasNext()) { + NeutronLoadBalancer nsg = i.next(); + if ((queryLoadBalancerID == null || + queryLoadBalancerID.equals(nsg.getLoadBalancerID())) && + (queryLoadBalancerTenantID == null || + queryLoadBalancerTenantID.equals(nsg.getLoadBalancerTenantID())) && + (queryLoadBalancerName == null || + queryLoadBalancerName.equals(nsg.getLoadBalancerName())) && + (queryLoadBalancerDescription == null || + queryLoadBalancerDescription.equals(nsg.getLoadBalancerDescription())) && + (queryLoadBalancerVipAddress == null || + queryLoadBalancerVipAddress.equals(nsg.getLoadBalancerVipAddress())) && + (queryLoadBalancerVipSubnet == null || + queryLoadBalancerVipSubnet.equals(nsg.getLoadBalancerVipSubnetID()))) { + if (fields.size() > 0) { + ans.add(extractFields(nsg,fields)); + } else { + ans.add(nsg); + } + } + } + return Response.status(200).entity( + new NeutronLoadBalancerRequest(ans)).build(); + } + + /** + * Returns a specific LoadBalancer */ + + @Path("{loadBalancerPoolID}") + @GET + @Produces({ MediaType.APPLICATION_JSON }) + + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response showLoadBalancer(@PathParam("loadBalancerPoolID") String loadBalancerPoolID, + // return fields + @QueryParam("fields") List fields) { + INeutronLoadBalancerCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerCRUD( + this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancer CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (!loadBalancerPoolInterface.neutronLoadBalancerExists(loadBalancerPoolID)) { + throw new ResourceNotFoundException("LoadBalancer UUID does not exist."); + } + if (fields.size() > 0) { + NeutronLoadBalancer ans = loadBalancerPoolInterface.getNeutronLoadBalancer(loadBalancerPoolID); + return Response.status(200).entity( + new NeutronLoadBalancerRequest(extractFields(ans, fields))).build(); + } else { + return Response.status(200).entity(new NeutronLoadBalancerRequest(loadBalancerPoolInterface.getNeutronLoadBalancer( + loadBalancerPoolID))).build(); + } + } + + /** + * Creates new LoadBalancer */ + + @POST + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + + @StatusCodes({ + @ResponseCode(code = 201, condition = "Created"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response createLoadBalancers(final NeutronLoadBalancerRequest input) { + INeutronLoadBalancerCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerCRUD( + this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancer CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (input.isSingleton()) { + NeutronLoadBalancer singleton = input.getSingleton(); + + /* + * Verify that the LoadBalancer doesn't already exist. + */ + if (loadBalancerPoolInterface.neutronLoadBalancerExists(singleton.getLoadBalancerID())) { + throw new BadRequestException("LoadBalancer UUID already exists"); + } + loadBalancerPoolInterface.addNeutronLoadBalancer(singleton); + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + int status = service.canCreateNeutronLoadBalancer(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerPoolInterface.addNeutronLoadBalancer(singleton); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + service.neutronLoadBalancerCreated(singleton); + } + } + } else { + List bulk = input.getBulk(); + Iterator i = bulk.iterator(); + HashMap testMap = new HashMap(); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerAware.class, this, null); + while (i.hasNext()) { + NeutronLoadBalancer test = i.next(); + + /* + * Verify that the firewall policy doesn't already exist + */ + + if (loadBalancerPoolInterface.neutronLoadBalancerExists(test.getLoadBalancerID())) { + throw new BadRequestException("Load Balancer Pool UUID already is already created"); + } + if (testMap.containsKey(test.getLoadBalancerID())) { + throw new BadRequestException("Load Balancer Pool UUID already exists"); + } + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + int status = service.canCreateNeutronLoadBalancer(test); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + } + /* + * now, each element of the bulk request can be added to the cache + */ + i = bulk.iterator(); + while (i.hasNext()) { + NeutronLoadBalancer test = i.next(); + loadBalancerPoolInterface.addNeutronLoadBalancer(test); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + service.neutronLoadBalancerCreated(test); + } + } + } + } + return Response.status(201).entity(input).build(); + } + + /** + * Updates a LoadBalancer Policy + */ + @Path("{loadBalancerPoolID}") + @PUT + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response updateLoadBalancer( + @PathParam("loadBalancerPoolID") String loadBalancerPoolID, final NeutronLoadBalancerRequest input) { + INeutronLoadBalancerCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerCRUD( + this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancer CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancer exists and there is only one delta provided + */ + if (!loadBalancerPoolInterface.neutronLoadBalancerExists(loadBalancerPoolID)) { + throw new ResourceNotFoundException("LoadBalancer UUID does not exist."); + } + if (!input.isSingleton()) { + throw new BadRequestException("Only singleton edit supported"); + } + NeutronLoadBalancer delta = input.getSingleton(); + NeutronLoadBalancer original = loadBalancerPoolInterface.getNeutronLoadBalancer(loadBalancerPoolID); + + /* + * updates restricted by Neutron + */ + if (delta.getLoadBalancerID() != null || + delta.getLoadBalancerTenantID() != null || + delta.getLoadBalancerName() != null || + delta.getLoadBalancerDescription() != null || + delta.getLoadBalancerStatus() != null || + delta.getLoadBalancerVipAddress() != null || + delta.getLoadBalancerVipSubnetID() != null) { + throw new BadRequestException("Attribute edit blocked by Neutron"); + } + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + int status = service.canUpdateNeutronLoadBalancer(delta, original); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + /* + * update the object and return it + */ + loadBalancerPoolInterface.updateNeutronLoadBalancer(loadBalancerPoolID, delta); + NeutronLoadBalancer updatedLoadBalancer = loadBalancerPoolInterface.getNeutronLoadBalancer( + loadBalancerPoolID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + service.neutronLoadBalancerUpdated(updatedLoadBalancer); + } + } + return Response.status(200).entity(new NeutronLoadBalancerRequest(loadBalancerPoolInterface.getNeutronLoadBalancer( + loadBalancerPoolID))).build(); + } + + /** + * Deletes a LoadBalancer */ + + @Path("{loadBalancerPoolID}") + @DELETE + @StatusCodes({ + @ResponseCode(code = 204, condition = "No Content"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response deleteLoadBalancer( + @PathParam("loadBalancerPoolID") String loadBalancerPoolID) { + INeutronLoadBalancerCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerCRUD( + this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancer CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancer exists and it isn't currently in use + */ + if (!loadBalancerPoolInterface.neutronLoadBalancerExists(loadBalancerPoolID)) { + throw new ResourceNotFoundException("LoadBalancer UUID does not exist."); + } + if (loadBalancerPoolInterface.neutronLoadBalancerInUse(loadBalancerPoolID)) { + return Response.status(409).build(); + } + NeutronLoadBalancer singleton = loadBalancerPoolInterface.getNeutronLoadBalancer(loadBalancerPoolID); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + int status = service.canDeleteNeutronLoadBalancer(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + loadBalancerPoolInterface.removeNeutronLoadBalancer(loadBalancerPoolID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerAware service = (INeutronLoadBalancerAware) instance; + service.neutronLoadBalancerDeleted(singleton); + } + } + return Response.status(204).build(); + } +} diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolMembersNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolMembersNorthbound.java new file mode 100644 index 0000000000..ff56fa0a9d --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolMembersNorthbound.java @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.codehaus.enunciate.jaxrs.ResponseCode; +import org.codehaus.enunciate.jaxrs.StatusCodes; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolMemberAware; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolMemberCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPoolMember; +import org.opendaylight.controller.northbound.commons.RestMessages; +import org.opendaylight.controller.northbound.commons.exception.BadRequestException; +import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; +import org.opendaylight.controller.sal.utils.ServiceHelper; + +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + + +@Path("/pools/{loadBalancerPoolID}/members") +public class NeutronLoadBalancerPoolMembersNorthbound { + + private NeutronLoadBalancerPoolMember extractFields(NeutronLoadBalancerPoolMember o, List fields) { + return o.extractFields(fields); + } +/** + * Returns a list of all LoadBalancerPool + */ +@GET +@Produces({MediaType.APPLICATION_JSON}) +@StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 501, condition = "Not Implemented")}) + +public Response listMembers( + // return fields + @QueryParam("fields") List fields, + // OpenStack LoadBalancerPool attributes + @QueryParam("id") String queryLoadBalancerPoolMemberID, + @QueryParam("tenant_id") String queryLoadBalancerPoolMemberTenantID, + @QueryParam("address") String queryLoadBalancerPoolMemberAddress, + @QueryParam("protocol_port") String queryLoadBalancerPoolMemberProtoPort, + @QueryParam("admin_state_up") String queryLoadBalancerPoolMemberAdminStateUp, + @QueryParam("weight") String queryLoadBalancerPoolMemberWeight, + @QueryParam("subnet_id") String queryLoadBalancerPoolMemberSubnetID, + @QueryParam("status") String queryLoadBalancerPoolMemberStatus, + + // pagination + @QueryParam("limit") String limit, + @QueryParam("marker") String marker, + @QueryParam("page_reverse") String pageReverse + // sorting not supported +) { + INeutronLoadBalancerPoolMemberCRUD loadBalancerPoolMemberInterface = NeutronCRUDInterfaces + .getINeutronLoadBalancerPoolMemberCRUD(this); + if (loadBalancerPoolMemberInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPool CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + List allLoadBalancerPoolMembers = loadBalancerPoolMemberInterface + .getAllNeutronLoadBalancerPoolMembers(); + List ans = new ArrayList(); + Iterator i = allLoadBalancerPoolMembers.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerPoolMember nsg = i.next(); + if ((queryLoadBalancerPoolMemberID == null || + queryLoadBalancerPoolMemberID.equals(nsg.getPoolMemberID())) && + (queryLoadBalancerPoolMemberTenantID == null || + queryLoadBalancerPoolMemberTenantID.equals(nsg.getPoolMemberTenantID())) && + (queryLoadBalancerPoolMemberAddress == null || + queryLoadBalancerPoolMemberAddress.equals(nsg.getPoolMemberAddress())) && + (queryLoadBalancerPoolMemberAdminStateUp == null || + queryLoadBalancerPoolMemberAdminStateUp.equals(nsg.getPoolMemberAdminStateIsUp())) && + (queryLoadBalancerPoolMemberWeight == null || + queryLoadBalancerPoolMemberWeight.equals(nsg.getPoolMemberWeight())) && + (queryLoadBalancerPoolMemberSubnetID == null || + queryLoadBalancerPoolMemberSubnetID.equals(nsg.getPoolMemberSubnetID())) && + (queryLoadBalancerPoolMemberStatus == null || + queryLoadBalancerPoolMemberStatus.equals(nsg.getPoolMemberStatus()))) { + if (fields.size() > 0) { + ans.add(extractFields(nsg, fields)); + } else { + ans.add(nsg); + } + } + } + return Response.status(200).entity( + new INeutronLoadBalancerPoolMemberRequest(ans)).build(); +} + +/** + * Adds a Member to an LBaaS Pool member + */ +@Path("/pools/{loadBalancerPoolID}/members") +@PUT +@Produces({MediaType.APPLICATION_JSON}) +@Consumes({MediaType.APPLICATION_JSON}) +@StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented")}) +public Response createLoadBalancerPoolMember( INeutronLoadBalancerPoolMemberRequest input) { + + INeutronLoadBalancerPoolMemberCRUD loadBalancerPoolMemberInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerPoolMemberCRUD( + this); + if (loadBalancerPoolMemberInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPoolMember CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (input.isSingleton()) { + NeutronLoadBalancerPoolMember singleton = input.getSingleton(); + + /* + * Verify that the LoadBalancerPoolMember doesn't already exist. + */ + if (loadBalancerPoolMemberInterface.neutronLoadBalancerPoolMemberExists( + singleton.getPoolMemberID())) { + throw new BadRequestException("LoadBalancerPoolMember UUID already exists"); + } + loadBalancerPoolMemberInterface.addNeutronLoadBalancerPoolMember(singleton); + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerPoolMemberAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolMemberAware service = (INeutronLoadBalancerPoolMemberAware) instance; + int status = service.canCreateNeutronLoadBalancerPoolMember(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerPoolMemberInterface.addNeutronLoadBalancerPoolMember(singleton); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolMemberAware service = (INeutronLoadBalancerPoolMemberAware) instance; + service.neutronLoadBalancerPoolMemberCreated(singleton); + } + } + } else { + List bulk = input.getBulk(); + Iterator i = bulk.iterator(); + HashMap testMap = new HashMap(); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerPoolMemberAware.class, this, null); + while (i.hasNext()) { + NeutronLoadBalancerPoolMember test = i.next(); + + /* + * Verify that the firewall doesn't already exist + */ + + if (loadBalancerPoolMemberInterface.neutronLoadBalancerPoolMemberExists( + test.getPoolMemberID())) { + throw new BadRequestException("Load Balancer PoolMember UUID already is already created"); + } + if (testMap.containsKey(test.getPoolMemberID())) { + throw new BadRequestException("Load Balancer PoolMember UUID already exists"); + } + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolMemberAware service = (INeutronLoadBalancerPoolMemberAware) instance; + int status = service.canCreateNeutronLoadBalancerPoolMember(test); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + } + /* + * now, each element of the bulk request can be added to the cache + */ + i = bulk.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerPoolMember test = i.next(); + loadBalancerPoolMemberInterface.addNeutronLoadBalancerPoolMember(test); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolMemberAware service = (INeutronLoadBalancerPoolMemberAware) instance; + service.neutronLoadBalancerPoolMemberCreated(test); + } + } + } + } + return Response.status(201).entity(input).build(); +} +} diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolNorthbound.java new file mode 100644 index 0000000000..fc5357ccb5 --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolNorthbound.java @@ -0,0 +1,331 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + + +import org.codehaus.enunciate.jaxrs.ResponseCode; +import org.codehaus.enunciate.jaxrs.StatusCodes; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolAware; +import org.opendaylight.controller.networkconfig.neutron.INeutronLoadBalancerPoolCRUD; +import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPool; +import org.opendaylight.controller.northbound.commons.RestMessages; +import org.opendaylight.controller.northbound.commons.exception.BadRequestException; +import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException; +import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; +import org.opendaylight.controller.sal.utils.ServiceHelper; + +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; + +/** + * Neutron Northbound REST APIs for LoadBalancerPool Policies.
+ * This class provides REST APIs for managing neutron LoadBalancerPool Policies + * + *
+ *
+ * Authentication scheme : HTTP Basic
+ * Authentication realm : opendaylight
+ * Transport : HTTP and HTTPS
+ *
+ * HTTPS Authentication is disabled by default. Administrator can enable it in + * tomcat-server.xml after adding a proper keystore / SSL certificate from a + * trusted authority.
+ * More info : + * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration + * + */ +@Path("/pools") +public class NeutronLoadBalancerPoolNorthbound { + + private NeutronLoadBalancerPool extractFields(NeutronLoadBalancerPool o, List fields) { + return o.extractFields(fields); + } + + /** + * Returns a list of all LoadBalancerPool + * */ + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + + public Response listGroups( + // return fields + @QueryParam("fields") List fields, + // OpenStack LoadBalancerPool attributes + @QueryParam("id") String queryLoadBalancerPoolID, + @QueryParam("tenant_id") String queryLoadBalancerPoolTenantID, + @QueryParam("name") String queryLoadBalancerPoolName, + @QueryParam("description") String queryLoadBalancerDescription, + @QueryParam("protocol") String queryLoadBalancerProtocol, + @QueryParam("lb_algorithm") String queryLoadBalancerPoolLbAlgorithm, + @QueryParam("healthmonitor_id") String queryLoadBalancerPoolHealthMonitorID, + @QueryParam("admin_state_up") String queryLoadBalancerIsAdminStateUp, + @QueryParam("status") String queryLoadBalancerPoolStatus, + @QueryParam("members") List queryLoadBalancerPoolMembers, + // pagination + @QueryParam("limit") String limit, + @QueryParam("marker") String marker, + @QueryParam("page_reverse") String pageReverse + // sorting not supported + ) { + INeutronLoadBalancerPoolCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerPoolCRUD(this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPool CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + List allLoadBalancerPools = loadBalancerPoolInterface.getAllNeutronLoadBalancerPools(); + List ans = new ArrayList(); + Iterator i = allLoadBalancerPools.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerPool nsg = i.next(); + if ((queryLoadBalancerPoolID == null || + queryLoadBalancerPoolID.equals(nsg.getLoadBalancerPoolID())) && + (queryLoadBalancerPoolTenantID == null || + queryLoadBalancerPoolTenantID.equals(nsg.getLoadBalancerPoolTenantID())) && + (queryLoadBalancerPoolName == null || + queryLoadBalancerPoolName.equals(nsg.getLoadBalancerPoolName())) && + (queryLoadBalancerDescription == null || + queryLoadBalancerDescription.equals(nsg.getLoadBalancerPoolDescription())) && + (queryLoadBalancerPoolLbAlgorithm == null || + queryLoadBalancerPoolLbAlgorithm.equals(nsg.getLoadBalancerPoolLbAlgorithm())) && + (queryLoadBalancerPoolHealthMonitorID == null || + queryLoadBalancerPoolHealthMonitorID.equals(nsg.getNeutronLoadBalancerPoolHealthMonitorID())) && + (queryLoadBalancerIsAdminStateUp == null || + queryLoadBalancerIsAdminStateUp.equals(nsg.getLoadBalancerPoolAdminIsStateIsUp())) && + (queryLoadBalancerPoolStatus == null || + queryLoadBalancerPoolStatus.equals(nsg.getLoadBalancerPoolStatus())) && + (queryLoadBalancerPoolMembers.size() == 0 || + queryLoadBalancerPoolMembers.equals(nsg.getLoadBalancerPoolMembers()))) { + if (fields.size() > 0) { + ans.add(extractFields(nsg,fields)); + } else { + ans.add(nsg); + } + } + } + return Response.status(200).entity( + new NeutronLoadBalancerPoolRequest(ans)).build(); + } + + /** + * Returns a specific LoadBalancerPool */ + + @Path("{loadBalancerPoolID}") + @GET + @Produces({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response showLoadBalancerPool(@PathParam("loadBalancerPoolID") String loadBalancerPoolID, + // return fields + @QueryParam("fields") List fields) { + INeutronLoadBalancerPoolCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerPoolCRUD(this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPool CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (!loadBalancerPoolInterface.neutronLoadBalancerPoolExists(loadBalancerPoolID)) { + throw new ResourceNotFoundException("LoadBalancerPool UUID does not exist."); + } + if (fields.size() > 0) { + NeutronLoadBalancerPool ans = loadBalancerPoolInterface.getNeutronLoadBalancerPool(loadBalancerPoolID); + return Response.status(200).entity( + new NeutronLoadBalancerPoolRequest(extractFields(ans, fields))).build(); + } else { + return Response.status(200).entity(new NeutronLoadBalancerPoolRequest(loadBalancerPoolInterface.getNeutronLoadBalancerPool(loadBalancerPoolID))).build(); + } + } + + /** + * Creates new LoadBalancerPool */ + + @POST + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 201, condition = "Created"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 409, condition = "Conflict"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response createLoadBalancerPools(final NeutronLoadBalancerPoolRequest input) { + INeutronLoadBalancerPoolCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerPoolCRUD(this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPool CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + if (input.isSingleton()) { + NeutronLoadBalancerPool singleton = input.getSingleton(); + + /* + * Verify that the LoadBalancerPool doesn't already exist. + */ + if (loadBalancerPoolInterface.neutronLoadBalancerPoolExists(singleton.getLoadBalancerPoolID())) { + throw new BadRequestException("LoadBalancerPool UUID already exists"); + } + loadBalancerPoolInterface.addNeutronLoadBalancerPool(singleton); + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerPoolAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + int status = service.canCreateNeutronLoadBalancerPool(singleton); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + loadBalancerPoolInterface.addNeutronLoadBalancerPool(singleton); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + service.neutronLoadBalancerPoolCreated(singleton); + } + } + } else { + List bulk = input.getBulk(); + Iterator i = bulk.iterator(); + HashMap testMap = new HashMap(); + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerPoolAware.class, this, null); + while (i.hasNext()) { + NeutronLoadBalancerPool test = i.next(); + + /* + * Verify that the firewall doesn't already exist + */ + + if (loadBalancerPoolInterface.neutronLoadBalancerPoolExists(test.getLoadBalancerPoolID())) { + throw new BadRequestException("Load Balancer Pool UUID already is already created"); + } + if (testMap.containsKey(test.getLoadBalancerPoolID())) { + throw new BadRequestException("Load Balancer Pool UUID already exists"); + } + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + int status = service.canCreateNeutronLoadBalancerPool(test); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + } + /* + * now, each element of the bulk request can be added to the cache + */ + i = bulk.iterator(); + while (i.hasNext()) { + NeutronLoadBalancerPool test = i.next(); + loadBalancerPoolInterface.addNeutronLoadBalancerPool(test); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + service.neutronLoadBalancerPoolCreated(test); + } + } + } + } + return Response.status(201).entity(input).build(); + } + + /** + * Updates a LoadBalancerPool Policy + */ + @Path("{loadBalancerPoolID}") + @PUT + @Produces({ MediaType.APPLICATION_JSON }) + @Consumes({ MediaType.APPLICATION_JSON }) + @StatusCodes({ + @ResponseCode(code = 200, condition = "Operation successful"), + @ResponseCode(code = 400, condition = "Bad Request"), + @ResponseCode(code = 401, condition = "Unauthorized"), + @ResponseCode(code = 403, condition = "Forbidden"), + @ResponseCode(code = 404, condition = "Not Found"), + @ResponseCode(code = 501, condition = "Not Implemented") }) + public Response updateLoadBalancerPool( + @PathParam("loadBalancerPoolID") String loadBalancerPoolID, final NeutronLoadBalancerPoolRequest input) { + INeutronLoadBalancerPoolCRUD loadBalancerPoolInterface = NeutronCRUDInterfaces.getINeutronLoadBalancerPoolCRUD(this); + if (loadBalancerPoolInterface == null) { + throw new ServiceUnavailableException("LoadBalancerPool CRUD Interface " + + RestMessages.SERVICEUNAVAILABLE.toString()); + } + + /* + * verify the LoadBalancerPool exists and there is only one delta provided + */ + if (!loadBalancerPoolInterface.neutronLoadBalancerPoolExists(loadBalancerPoolID)) { + throw new ResourceNotFoundException("LoadBalancerPool UUID does not exist."); + } + if (!input.isSingleton()) { + throw new BadRequestException("Only singleton edit supported"); + } + NeutronLoadBalancerPool delta = input.getSingleton(); + NeutronLoadBalancerPool original = loadBalancerPoolInterface.getNeutronLoadBalancerPool(loadBalancerPoolID); + + /* + * updates restricted by Neutron + */ + if (delta.getLoadBalancerPoolID() != null || + delta.getLoadBalancerPoolTenantID() != null || + delta.getLoadBalancerPoolName() != null || + delta.getLoadBalancerPoolDescription() != null || + delta.getLoadBalancerPoolProtocol() != null || + delta.getLoadBalancerPoolLbAlgorithm() != null || + delta.getNeutronLoadBalancerPoolHealthMonitorID() != null || + delta.getLoadBalancerPoolAdminIsStateIsUp() != null || + delta.getLoadBalancerPoolStatus() != null || + delta.getLoadBalancerPoolMembers() != null) { + throw new BadRequestException("Attribute edit blocked by Neutron"); + } + + Object[] instances = ServiceHelper.getGlobalInstances(INeutronLoadBalancerPoolAware.class, this, null); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + int status = service.canUpdateNeutronLoadBalancerPool(delta, original); + if (status < 200 || status > 299) { + return Response.status(status).build(); + } + } + } + + /* + * update the object and return it + */ + loadBalancerPoolInterface.updateNeutronLoadBalancerPool(loadBalancerPoolID, delta); + NeutronLoadBalancerPool updatedLoadBalancerPool = loadBalancerPoolInterface.getNeutronLoadBalancerPool(loadBalancerPoolID); + if (instances != null) { + for (Object instance : instances) { + INeutronLoadBalancerPoolAware service = (INeutronLoadBalancerPoolAware) instance; + service.neutronLoadBalancerPoolUpdated(updatedLoadBalancerPool); + } + } + return Response.status(200).entity(new NeutronLoadBalancerPoolRequest(loadBalancerPoolInterface.getNeutronLoadBalancerPool(loadBalancerPoolID))).build(); + } +} diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolRequest.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolRequest.java new file mode 100644 index 0000000000..a1cdc419e1 --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerPoolRequest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancerPool; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerPoolRequest { + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="pool") + NeutronLoadBalancerPool singletonLoadBalancerPool; + + @XmlElement(name="pools") + List bulkRequest; + + NeutronLoadBalancerPoolRequest() { + } + + NeutronLoadBalancerPoolRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancerPool = null; + } + + NeutronLoadBalancerPoolRequest(NeutronLoadBalancerPool group) { + singletonLoadBalancerPool = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancerPool getSingleton() { + return singletonLoadBalancerPool; + } + + public boolean isSingleton() { + return (singletonLoadBalancerPool != null); + } +} \ No newline at end of file diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerRequest.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerRequest.java new file mode 100644 index 0000000000..1cf4e7016d --- /dev/null +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronLoadBalancerRequest.java @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2014 Red Hat, Inc. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.networkconfig.neutron.northbound; + +import org.opendaylight.controller.networkconfig.neutron.NeutronLoadBalancer; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + + +@XmlRootElement +@XmlAccessorType(XmlAccessType.NONE) + +public class NeutronLoadBalancerRequest { + /** + * See OpenStack Network API v2.0 Reference for description of + * http://docs.openstack.org/api/openstack-network/2.0/content/ + */ + + @XmlElement(name="loadbalancer") + NeutronLoadBalancer singletonLoadBalancer; + + @XmlElement(name="loadbalancers") + List bulkRequest; + + NeutronLoadBalancerRequest() { + } + + NeutronLoadBalancerRequest(List bulk) { + bulkRequest = bulk; + singletonLoadBalancer = null; + } + + NeutronLoadBalancerRequest(NeutronLoadBalancer group) { + singletonLoadBalancer = group; + } + + public List getBulk() { + return bulkRequest; + } + + public NeutronLoadBalancer getSingleton() { + return singletonLoadBalancer; + } + + public boolean isSingleton() { + return (singletonLoadBalancer != null); + } +} \ No newline at end of file diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronNorthboundRSApplication.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronNorthboundRSApplication.java index 9abcca7c53..96d72cb926 100644 --- a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronNorthboundRSApplication.java +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronNorthboundRSApplication.java @@ -38,6 +38,11 @@ public class NeutronNorthboundRSApplication extends Application { classes.add(NeutronFirewallNorthbound.class); classes.add(NeutronFirewallPolicyNorthbound.class); classes.add(NeutronFirewallRulesNorthbound.class); + classes.add(NeutronLoadBalancerNorthbound.class); + classes.add(NeutronLoadBalancerListenerNorthbound.class); + classes.add(NeutronLoadBalancerPoolNorthbound.class); + classes.add(NeutronLoadBalancerHealthMonitorNorthbound.class); + classes.add(NeutronLoadBalancerPoolMembersNorthbound.class); return classes; } diff --git a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronRoutersNorthbound.java b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronRoutersNorthbound.java index 806e853b36..0c02adad8a 100644 --- a/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronRoutersNorthbound.java +++ b/opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronRoutersNorthbound.java @@ -527,6 +527,9 @@ public class NeutronRoutersNorthbound { if (input.getPortUUID() != null && input.getSubnetUUID() == null) { NeutronRouter_Interface targetInterface = target.getInterfaces().get(input.getPortUUID()); + if (targetInterface == null) { + throw new ResourceNotFoundException("Router interface not found for given Port UUID"); + } input.setSubnetUUID(targetInterface.getSubnetUUID()); input.setID(target.getID()); input.setTenantID(target.getTenantID()); @@ -554,7 +557,7 @@ public class NeutronRoutersNorthbound { throw new ResourceNotFoundException("Port UUID not found"); } if (port.getFixedIPs() == null) { - throw new ResourceNotFoundException("Port UUID jas no fixed IPs"); + throw new ResourceNotFoundException("Port UUID has no fixed IPs"); } NeutronSubnet subnet = subnetInterface.getSubnet(input.getSubnetUUID()); if (subnet == null) { diff --git a/pom.xml b/pom.xml index 8bebd2aa61..e4c51b7839 100644 --- a/pom.xml +++ b/pom.xml @@ -124,6 +124,7 @@ opendaylight/commons/parent opendaylight/commons/logback_settings opendaylight/commons/filter-valve + opendaylight/commons/liblldp opendaylight/dummy-console