Merge "Fix exception when netconf device provides YIN schemas"
authorTony Tkacik <ttkacik@cisco.com>
Wed, 20 Aug 2014 07:16:33 +0000 (07:16 +0000)
committerGerrit Code Review <gerrit@opendaylight.org>
Wed, 20 Aug 2014 07:16:33 +0000 (07:16 +0000)
56 files changed:
.gitignore
features/config-netty/pom.xml
features/config-persister/pom.xml
features/config/pom.xml
features/config/src/main/resources/features.xml
features/flow/pom.xml
features/mdsal/pom.xml
features/protocol-framework/pom.xml
opendaylight/commons/opendaylight/pom.xml
opendaylight/distribution/opendaylight-karaf-empty/pom.xml [new file with mode: 0644]
opendaylight/distribution/opendaylight-karaf-resources/pom.xml [new file with mode: 0644]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/instance [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/instance with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/instance.bat [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/instance.bat with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/karaf [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/karaf with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/karaf.bat [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/karaf.bat with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/context.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/context.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/logback.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/tomcat-logging.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/tomcat-logging.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/tomcat-server.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/tomcat-server.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/etc/custom.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/custom.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/version.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/version.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf/pom.xml
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties [deleted file]
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties [deleted file]
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties [deleted file]
opendaylight/md-sal/model/model-flow-service/src/main/yang/packet-processing.yang
opendaylight/md-sal/sal-clustering-commons/pom.xml
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/InstanceIdentifierForXmlCodec.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/InstanceIdentifierForXmlCodec.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/RandomPrefix.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/RandomPrefix.java with 96% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlDocumentUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlDocumentUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlStreamUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlStreamUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/utils/XmlUtilsTest.java with 98% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/resources/application.conf
opendaylight/md-sal/sal-clustering-commons/src/test/resources/org/opendaylight/controller/xml/codec/rpcTest.yang [moved from opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/org/opendaylight/controller/remote/rpc/utils/rpcTest.yang with 100% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf [new file with mode: 0644]
opendaylight/md-sal/sal-distributed-datastore/pom.xml
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DistributedDataStore.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionChainProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DeleteData.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/ActorContext.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxyTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/utils/ActorContextTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/utils/MockActorContext.java
opendaylight/md-sal/sal-remoterpc-connector/pom.xml
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RemoteRpcImplementation.java
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcBroker.java
pom.xml

index 9144cda4cc6b40ede292b02156e21f99d136e13b..b304ffce87f5f06b1501a33f424225322353f64c 100644 (file)
@@ -28,4 +28,6 @@ maven-eclipse.xml
 .DS_STORE
 .metadata
 opendaylight/md-sal/sal-distributed-datastore/journal
+!opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin
+
 
index 2f4b4b1e2198bb4e85cd1c66f19fb6e009cf4626..5fbc463df32f798c757a293da391fc0bc77f76bb 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-config-netty</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
@@ -21,7 +21,6 @@
       <artifactId>features-config-persister</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>config-netty-config</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 6dc894134546fabde2a2d7ef2bf71d0673fab5b0..3346c754d628f8fbb03add426db0026bc4af0615 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-config-persister</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <version>${yangtools.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-netconf</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.eclipse.persistence</groupId>
       <artifactId>org.eclipse.persistence.moxy</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index c69e11bed2c91e2a08d936dbffad02c601b12d7f..8c061c27361946cd3bfc62edde9252d2dd069b9b 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-config</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
@@ -22,7 +22,6 @@
       <version>${yangtools.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>config-manager</artifactId>
     </dependency>
+    <!-- test the features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 6c0d32427da9508ccd39463e89e716e603bd2101..5027588acb2e9fce758cffffff77388400c21c68 100644 (file)
@@ -6,7 +6,7 @@
   <repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
 
   <feature name='odl-config-all' version='${project.version}'>
-    <feature version='${project.version}'>odl-mdsal-common</feature>
+      <feature version='${mdsal.version}'>odl-mdsal-common</feature>
       <feature version='${project.version}'>odl-config-api</feature>
       <feature version='${project.version}'>odl-config-netty-config-api</feature>
       <feature version='${project.version}'>odl-config-core</feature>
index 09bb6c91e6a0bde381f3902c299e59d4d4c7fa11..ac189737d937cd0fb36186847327acff27a74c18 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-flow</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
@@ -22,7 +22,6 @@
       <version>${mdsal.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller.model</groupId>
       <groupId>org.opendaylight.controller.md</groupId>
       <artifactId>forwardingrules-manager</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 4f1ba98e5c5d4f60cc4d0a5a2fcbe7c06706d429..e7b825c48b1173b783291e3e2a1797c4cea64ebc 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-mdsal</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <artifactId>features-yangtools</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config-persister</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config-netty</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.opendaylight.controller.samples</groupId>
       <artifactId>toaster-config</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+      <version>0.6.2-SNAPSHOT</version>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 97836be4552f88e4ad960981ebea186e664a090f..dcd24d62169ee16d8e21d24d0469af5a9ba4bbbb 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-protocol-framework</artifactId>
   <version>${protocol-framework.version}</version>
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>protocol-framework</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index c74e7ae4108297abcb2799f0a7f81c4b23d892b6..1064afd82d53fcdcdf6714421b5702535a2d6180 100644 (file)
         <artifactId>toaster-config</artifactId>
         <version>${mdsal.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.opendaylight.yangtools</groupId>
+        <artifactId>features-test</artifactId>
+        <version>${yangtools.version}</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.opendaylight.yangtools</groupId>
         <artifactId>features-yangtools</artifactId>
diff --git a/opendaylight/distribution/opendaylight-karaf-empty/pom.xml b/opendaylight/distribution/opendaylight-karaf-empty/pom.xml
new file mode 100644 (file)
index 0000000..d3dfe19
--- /dev/null
@@ -0,0 +1,235 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.opendaylight.controller</groupId>
+    <artifactId>commons.opendaylight</artifactId>
+    <version>1.4.2-SNAPSHOT</version>
+    <relativePath>../../commons/opendaylight</relativePath>
+  </parent>
+  <artifactId>opendaylight-karaf-empty</artifactId>
+  <packaging>pom</packaging>
+  <prerequisites>
+    <maven>3.0</maven>
+  </prerequisites>
+
+  <dependencies>
+    <dependency>
+      <!-- scope is compile so all features (there is only one) are installed
+            into startup.properties and the feature repo itself is not installed -->
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>framework</artifactId>
+      <version>${karaf.version}</version>
+      <type>kar</type>
+    </dependency>
+    <!-- scope is runtime so the feature repo is listed in the features
+      service config file, and features may be installed using the
+      karaf-maven-plugin configuration -->
+    <dependency>
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>standard</artifactId>
+      <version>${karaf.version}</version>
+      <classifier>features</classifier>
+      <type>xml</type>
+      <scope>runtime</scope>
+    </dependency>
+
+    <!-- ODL Branding -->
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>karaf.branding</artifactId>
+      <scope>compile</scope>
+    </dependency>
+
+    <!-- Resources needed -->
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>opendaylight-karaf-resources</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.felix</groupId>
+                    <artifactId>maven-bundle-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>cleanVersions</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>copy</goal>
+                      <goal>unpack</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.karaf.tooling</groupId>
+                    <artifactId>karaf-maven-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>commands-generate-help</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.fusesource.scalate</groupId>
+                    <artifactId>maven-scalate-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>sitegen</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.servicemix.tooling</groupId>
+                    <artifactId>depends-maven-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>generate-depends-file</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.karaf.tooling</groupId>
+        <artifactId>karaf-maven-plugin</artifactId>
+        <version>${karaf.version}</version>
+        <extensions>true</extensions>
+        <executions>
+          <execution>
+            <id>process-resources</id>
+            <goals>
+              <goal>install-kars</goal>
+            </goals>
+            <phase>process-resources</phase>
+          </execution>
+          <execution>
+            <id>package</id>
+            <goals>
+              <goal>instance-create-archive</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <version>${checkstyle.version}</version>
+        <configuration>
+          <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/configuration\/initial\/</excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.6</version>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <!-- here the phase you need -->
+            <phase>generate-resources</phase>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.opendaylight.controller</groupId>
+                  <artifactId>karaf.branding</artifactId>
+                  <version>${karaf.branding.version}</version>
+                  <outputDirectory>target/assembly/lib</outputDirectory>
+                  <destFileName>karaf.branding-${branding.version}.jar</destFileName>
+                </artifactItem>
+              </artifactItems>
+            </configuration>
+          </execution>
+          <execution>
+            <id>unpack-karaf-resources</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>prepare-package</phase>
+            <configuration>
+             <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+             <groupId>org.opendaylight.controller</groupId>
+             <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+             <excludes>META-INF\/**</excludes>
+             <excludeTransitive>true</excludeTransitive>
+             <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+            <execution>
+                <phase>prepare-package</phase>
+                <goals>
+                    <goal>run</goal>
+                </goals>
+                <configuration>
+                  <tasks>
+                    <chmod perm="755">
+                        <fileset dir="${project.build.directory}/assembly/bin">
+                          <include name="karaf"/>
+                          <include name="instance"/>
+                        </fileset>
+                    </chmod>
+                  </tasks>
+                </configuration>
+            </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <scm>
+    <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+    <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+    <tag>HEAD</tag>
+    <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main</url>
+  </scm>
+</project>
diff --git a/opendaylight/distribution/opendaylight-karaf-resources/pom.xml b/opendaylight/distribution/opendaylight-karaf-resources/pom.xml
new file mode 100644 (file)
index 0000000..00495a3
--- /dev/null
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+
+ This program and the accompanying materials are made available under the
+ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ and is available at http://www.eclipse.org/legal/epl-v10.html
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.opendaylight.controller</groupId>
+    <artifactId>commons.opendaylight</artifactId>
+    <version>1.4.2-SNAPSHOT</version>
+    <relativePath>../../commons/opendaylight</relativePath>
+  </parent>
+  <artifactId>opendaylight-karaf-resources</artifactId>
+  <description>Resources for opendaylight-karaf</description>
+  <packaging>jar</packaging>
+</project>
index b3c3f20ba80bb5cc829c5a208669a239ec8a1981..5cbe412b2b0af28da537a95af2ace0766df8e120 100644 (file)
       <version>${karaf.version}</version>
       <type>kar</type>
     </dependency>
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>base-features</artifactId>
-      <version>${project.version}</version>
-      <type>kar</type>
-    </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>karaf.branding</artifactId>
       <scope>compile</scope>
     </dependency>
-    <!-- scope is runtime so the feature repo is listed in the features
-      service config file, and features may be installed using the
-      karaf-maven-plugin configuration -->
-    <dependency>
-      <groupId>org.apache.karaf.features</groupId>
-      <artifactId>standard</artifactId>
-      <version>${karaf.version}</version>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>base-features</artifactId>
-      <version>${project.parent.version}</version>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
-    </dependency>
-    <!-- scope is compile so all features (there is only one) are installed
-            into startup.properties and the feature repo itself is not installed -->
+
+    <!-- Resources needed -->
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
-      <artifactId>extras-features</artifactId>
+      <artifactId>opendaylight-karaf-resources</artifactId>
       <version>${project.version}</version>
-      <type>kar</type>
-      <scope>runtime</scope>
-    </dependency>
-    <!-- AD-SAL Related Features -->
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>features-adsal</artifactId>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
+
+    <!-- scope is not runtime so the feature repo is pulled into the local
+    repo on build and thus you actually run.  Failure to do so can lead
+    to very confusing errors for devs -->
     <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>features-nsf</artifactId>
-      <version>${project.version}</version>
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>standard</artifactId>
+      <version>${karaf.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
 
+    <!--
+          controller provided features:
+          Note: Nothing should go here that is not locked
+          down with testing... ie, no broken feature repos
+    -->
+
     <!-- MD-SAL Related Features -->
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-mdsal</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-flow</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
   </dependencies>
 
               </artifactItems>
             </configuration>
           </execution>
+          <execution>
+            <id>unpack-karaf-resources</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>prepare-package</phase>
+            <configuration>
+             <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+             <groupId>org.opendaylight.controller</groupId>
+             <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+             <excludes>META-INF\/**</excludes>
+             <excludeTransitive>true</excludeTransitive>
+             <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+            <execution>
+                <phase>prepare-package</phase>
+                <goals>
+                    <goal>run</goal>
+                </goals>
+                <configuration>
+                  <tasks>
+                    <chmod perm="755">
+                        <fileset dir="${project.build.directory}/assembly/bin">
+                          <include name="karaf"/>
+                          <include name="instance"/>
+                        </fileset>
+                    </chmod>
+                  </tasks>
+                </configuration>
+            </execution>
         </executions>
       </plugin>
-        <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-                <execution>
-                    <phase>prepare-package</phase>
-                    <goals>
-                        <goal>run</goal>
-                    </goals>
-                    <configuration>
-                        <tasks>
-                            <copy todir="${project.build.directory}/assembly/bin" overwrite="true">
-                                <fileset dir="${basedir}/src/main/resources/karaf/" includes="karaf,karaf.bat,instance,instance.bat"/>
-                            </copy>
-                        </tasks>
-                    </configuration>
-                </execution>
-            </executions>
-        </plugin>
     </plugins>
   </build>
   <scm>
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties
deleted file mode 100644 (file)
index a98956e..0000000
+++ /dev/null
@@ -1,506 +0,0 @@
-################################################################################
-#
-#    Licensed to the Apache Software Foundation (ASF) under one or more
-#    contributor license agreements.  See the NOTICE file distributed with
-#    this work for additional information regarding copyright ownership.
-#    The ASF licenses this file to You under the Apache License, Version 2.0
-#    (the "License"); you may not use this file except in compliance with
-#    the License.  You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS,
-#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#    See the License for the specific language governing permissions and
-#    limitations under the License.
-#
-################################################################################
-
-#
-# Java platform package export properties.
-#
-
-# Standard package set.  Note that:
-#   - javax.transaction* is exported with a mandatory attribute
-jre-1.6= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-# Standard package set.  Note that:
-#   - javax.transaction* is exported with a mandatory attribute
-jre-1.7= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-jre-1.8= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties
deleted file mode 100644 (file)
index ca8c83c..0000000
+++ /dev/null
@@ -1,53 +0,0 @@
-#Bundles to be started on startup, with startlevel
-
-# feature: framework version: 3.0.1
-mvn\:org.ops4j.base/ops4j-base-lang/1.4.0 = 5
-mvn\:biz.aQute.bnd/bndlib/2.2.0 = 5
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-bnd/1.7.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-maven-commons/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-aether/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-wrap/1.6.0 = 5
-mvn\:javax.annotation/javax.annotation-api/1.2 = 5
-mvn\:org.ops4j.pax.logging/pax-logging-api/1.7.2 = 8
-mvn\:org.ops4j.pax.logging/pax-logging-service/1.7.2 = 8
-mvn\:org.apache.karaf.service/org.apache.karaf.service.guard/3.0.1 = 10
-mvn\:org.apache.felix/org.apache.felix.configadmin/1.6.0 = 10
-mvn\:org.apache.felix/org.apache.felix.fileinstall/3.2.8 = 11
-mvn\:org.ow2.asm/asm-all/4.1 = 12
-mvn\:org.apache.aries/org.apache.aries.util/1.1.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.api/1.0.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.impl/1.0.2 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.api/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.cm/1.0.3 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core.compatibility/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core/1.4.0 = 20
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.spring/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.blueprint/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.wrap/3.0.1 = 24
-mvn\:org.apache.karaf.region/org.apache.karaf.region.core/3.0.1 = 25
-mvn\:org.apache.karaf.features/org.apache.karaf.features.core/3.0.1 = 25
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.features/3.0.1 = 26
-mvn\:jline/jline/2.11 = 30
-mvn\:org.jledit/core/0.2.1 = 30
-mvn\:org.fusesource.jansi/jansi/1.11 = 30
-mvn\:org.ops4j.base/ops4j-base-util-property/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-xml/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-collections/1.4.0 = 30
-mvn\:org.ops4j.pax.url/pax-url-commons/1.6.0 = 30
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-property/1.7.0 = 30
-mvn\:org.ops4j.base/ops4j-base-net/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-monitors/1.4.0 = 30
-mvn\:org.apache.karaf.features/org.apache.karaf.features.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.console/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.modules/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.config/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.boot/3.0.1 = 30
-mvn\:org.apache.sshd/sshd-core/0.9.0 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.table/3.0.1 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.core/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.help/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.core/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.commands/3.0.1 = 30
-mvn\:org.apache.aries.quiesce/org.apache.aries.quiesce.api/1.0.0 = 30
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties
deleted file mode 100644 (file)
index a312d66..0000000
+++ /dev/null
@@ -1,109 +0,0 @@
-#
-# The properties defined in this file will be made available through system
-# properties at the very beginning of the Karaf's boot process.
-#
-
-# Use Equinox as default OSGi Framework Implementation
-karaf.framework=equinox
-
-# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
-# Extend the framework to avoid the resources to be presented with
-# a URL of type bundleresource: but to be presented as file:
-osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
-
-
-# Log level when the pax-logging service is not available
-# This level will only be used while the pax-logging service bundle
-# is not fully available.
-# To change log levels, please refer to the org.ops4j.pax.logging.cfg file
-# instead.
-org.ops4j.pax.logging.DefaultServiceLog.level = ERROR
-
-#
-# Name of this Karaf instance.
-#
-karaf.name = root
-
-#
-# Default repository where bundles will be loaded from before using
-# other Maven repositories.  For the full Maven configuration, see
-# the org.ops4j.pax.url.mvn.cfg file.
-#
-karaf.default.repository = system
-
-#
-# Location of a shell script that will be run when starting a shell
-# session.  This script can be used to create aliases and define
-# additional commands.
-#
-karaf.shell.init.script = ${karaf.etc}/shell.init.script
-
-#
-# Sets the maximum size of the shell command history. If not set,
-# defaults to 500 entries. Setting to 0 will disable history.
-#
-# karaf.shell.history.maxSize = 0
-
-#
-# Deletes the entire karaf.data directory at every start
-#
-karaf.clean.all = false
-
-#
-# Deletes the karaf.data/cache directory at every start
-#
-karaf.clean.cache = false
-
-#
-# Roles to use when logging into a local Karaf console.
-#
-# The syntax is the following:
-#   [classname:]principal
-# where classname is the class name of the principal object
-# (defaults to org.apache.karaf.jaas.modules.RolePrincipal)
-# and principal is the name of the principal of that class
-# (defaults to instance).
-#
-karaf.local.roles = admin,manager,viewer
-
-#
-# Set this empty property to avoid errors when validating xml documents.
-#
-xml.catalog.files =
-
-#
-# Suppress the bell in the console when hitting backspace too many times
-# for example
-#
-jline.nobell = true
-
-#
-# ServiceMix specs options
-#
-org.apache.servicemix.specs.debug = false
-org.apache.servicemix.specs.timeout = 0
-
-#
-# Settings for the OSGi 4.3 Weaving
-# By default, we will not weave any classes. Change this setting to include classes
-# that you application needs to have woven.
-#
-org.apache.aries.proxy.weaving.enabled = none
-# Classes not to weave - Aries default + Xerces which is known to have issues.
-org.apache.aries.proxy.weaving.disabled = org.objectweb.asm.*,org.slf4j.*,org.apache.log4j.*,javax.*,org.apache.xerces.*
-
-#
-# By default, only Karaf shell commands are secured, but additional services can be
-# secured by expanding this filter
-#
-karaf.secured.services = (&(osgi.command.scope=*)(osgi.command.function=*))
-
-#
-# Security properties
-#
-# To enable OSGi security, uncomment the properties below,
-# install the framework-security feature and restart.
-#
-#java.security.policy=${karaf.etc}/all.policy
-#org.osgi.framework.security=osgi
-#org.osgi.framework.trust.repositories=${karaf.etc}/trustStore.ks
index 022c623afd62d598dfd3d0897db5abf21c68e056..50368e46a6a3f100725ac2fa80f3bca4eb9dcec2 100644 (file)
@@ -16,10 +16,6 @@ module packet-processing {
         description "";
     }
 
-    typedef buffer-id {
-        type uint32;
-    }
-
     typedef connection-cookie {
         type uint32;
     }
@@ -63,11 +59,6 @@ module packet-processing {
     }
 
     notification packet-received {
-
-        leaf buffer-id {
-            type buffer-id;
-        }
-
        leaf connection-cookie {
                 type connection-cookie;
         }
@@ -105,7 +96,7 @@ module packet-processing {
                        type inv:node-connector-ref;
                }
             leaf buffer-id {
-                  type buffer-id;
+                  type uint32;
             }
 
                uses raw-packet;
index 6db4d3a094f43cb80c461418cfd56d54639dd5a2..4419d19f527c9c096718496041dee08933275106 100644 (file)
       <groupId>org.opendaylight.yangtools</groupId>
       <artifactId>yang-parser-impl</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>netconf-util</artifactId>
+    </dependency>
 
     <dependency>
       <groupId>xmlunit</groupId>
           <artifactId>jsr305</artifactId>
           <version>2.0.1</version>
       </dependency>
+
+      <dependency>
+          <groupId>com.codahale.metrics</groupId>
+          <artifactId>metrics-core</artifactId>
+          <version>3.0.1</version>
+      </dependency>
   </dependencies>
 
 </project>
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java
new file mode 100644 (file)
index 0000000..6464315
--- /dev/null
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorPath;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.dispatch.BoundedMailbox;
+import akka.dispatch.MailboxType;
+import akka.dispatch.MessageQueue;
+import akka.dispatch.ProducesMessageQueue;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Preconditions;
+import com.typesafe.config.Config;
+import org.opendaylight.controller.common.reporting.MetricsReporter;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+
+public class MeteredBoundedMailbox implements MailboxType, ProducesMessageQueue<BoundedMailbox.MessageQueue> {
+
+    private MeteredMessageQueue queue;
+    private Integer capacity;
+    private FiniteDuration pushTimeOut;
+    private ActorPath actorPath;
+    private MetricsReporter reporter;
+
+    private final String QUEUE_SIZE = "queue-size";
+    private final Long DEFAULT_TIMEOUT = 10L;
+
+    public MeteredBoundedMailbox(ActorSystem.Settings settings, Config config) {
+        Preconditions.checkArgument( config.hasPath("mailbox-capacity"), "Missing configuration [mailbox-capacity]" );
+        this.capacity = config.getInt("mailbox-capacity");
+        Preconditions.checkArgument( this.capacity > 0, "mailbox-capacity must be > 0");
+
+        Long timeout = -1L;
+        if ( config.hasPath("mailbox-push-timeout-time") ){
+            timeout = config.getDuration("mailbox-push-timeout-time", TimeUnit.NANOSECONDS);
+        } else {
+            timeout = DEFAULT_TIMEOUT;
+        }
+        Preconditions.checkArgument( timeout > 0, "mailbox-push-timeout-time must be > 0");
+        this.pushTimeOut = new FiniteDuration(timeout, TimeUnit.NANOSECONDS);
+
+        reporter = MetricsReporter.getInstance();
+    }
+
+
+    @Override
+    public MessageQueue create(final scala.Option<ActorRef> owner, scala.Option<ActorSystem> system) {
+        this.queue = new MeteredMessageQueue(this.capacity, this.pushTimeOut);
+        monitorQueueSize(owner, this.queue);
+        return this.queue;
+    }
+
+    private void monitorQueueSize(scala.Option<ActorRef> owner, final MeteredMessageQueue monitoredQueue) {
+        if (owner.isEmpty()) {
+            return; //there's no actor to monitor
+        }
+        actorPath = owner.get().path();
+        MetricRegistry registry = reporter.getMetricsRegistry();
+
+        String actorName = registry.name(actorPath.toString(), QUEUE_SIZE);
+
+        if (registry.getMetrics().containsKey(actorName))
+            return; //already registered
+
+        reporter.getMetricsRegistry().register(actorName,
+                new Gauge<Integer>() {
+                    @Override
+                    public Integer getValue() {
+                        return monitoredQueue.size();
+                    }
+                });
+    }
+
+
+    public static class MeteredMessageQueue extends BoundedMailbox.MessageQueue {
+
+        public MeteredMessageQueue(int capacity, FiniteDuration pushTimeOut) {
+            super(capacity, pushTimeOut);
+        }
+    }
+
+}
+
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java
new file mode 100644 (file)
index 0000000..5c3e11f
--- /dev/null
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.reporting;
+
+import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.MetricRegistry;
+
+/**
+ * Maintains metrics registry that is provided to reporters.
+ * At the moment only one reporter exists {@code JmxReporter}.
+ * More reporters can be added.
+ * <p/>
+ * The consumers of this class will only be interested in {@code MetricsRegistry}
+ * where metrics for that consumer gets stored.
+ */
+public class MetricsReporter implements AutoCloseable{
+
+    private final MetricRegistry METRICS_REGISTRY = new MetricRegistry();
+    private final String DOMAIN = "org.opendaylight.controller";
+
+    public final JmxReporter jmxReporter = JmxReporter.forRegistry(METRICS_REGISTRY).inDomain(DOMAIN).build();
+
+    private static MetricsReporter inst = new MetricsReporter();
+
+    private MetricsReporter(){
+        jmxReporter.start();
+    }
+
+    public static MetricsReporter getInstance(){
+        return inst;
+    }
+
+    public MetricRegistry getMetricsRegistry(){
+        return METRICS_REGISTRY;
+    }
+
+    @Override
+    public void close() throws Exception {
+        jmxReporter.close();
+    }
+}
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Splitter;
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import org.opendaylight.yangtools.yang.common.QName;
 
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import com.google.common.base.Function;
 import com.google.common.base.Objects;
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import com.google.common.annotations.Beta;
 import com.google.common.base.Preconditions;
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import com.google.common.base.Optional;
 import org.opendaylight.controller.netconf.util.xml.XmlUtil;
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java
new file mode 100644 (file)
index 0000000..bfdb093
--- /dev/null
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.DeadLetter;
+import akka.actor.Props;
+import akka.actor.UntypedActor;
+import akka.japi.Creator;
+import akka.testkit.JavaTestKit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class MeteredBoundedMailboxTest {
+
+    private static ActorSystem actorSystem;
+    private final ReentrantLock lock = new ReentrantLock();
+
+    @Before
+    public void setUp() throws Exception {
+        actorSystem = ActorSystem.create("testsystem");
+    }
+
+    @After
+    public void tearDown() throws Exception {
+       if (actorSystem != null)
+           actorSystem.shutdown();
+    }
+
+    @Test
+    public void test_WhenQueueIsFull_ShouldSendMsgToDeadLetter() throws InterruptedException {
+        final JavaTestKit mockReceiver = new JavaTestKit(actorSystem);
+        actorSystem.eventStream().subscribe(mockReceiver.getRef(), DeadLetter.class);
+
+
+        final FiniteDuration ONE_SEC = new FiniteDuration(1, TimeUnit.SECONDS);
+        String boundedMailBox = actorSystem.name() + ".bounded-mailbox";
+        ActorRef pingPongActor = actorSystem.actorOf(PingPongActor.props(lock).withMailbox(boundedMailBox),
+                                                     "pingpongactor");
+
+        actorSystem.mailboxes().settings();
+        lock.lock();
+        //queue capacity = 10
+        //need to send 12 messages; 1 message is dequeued and actor waits on lock,
+        //2nd to 11th messages are put on the queue
+        //12th message is sent to dead letter.
+        for (int i=0;i<12;i++){
+            pingPongActor.tell("ping", mockReceiver.getRef());
+        }
+
+        mockReceiver.expectMsgClass(ONE_SEC, DeadLetter.class);
+
+        lock.unlock();
+
+        Object[] eleven = mockReceiver.receiveN(11, ONE_SEC);
+    }
+
+    /**
+     * For testing
+     */
+    public static class PingPongActor extends UntypedActor{
+
+        ReentrantLock lock;
+
+        private PingPongActor(ReentrantLock lock){
+            this.lock = lock;
+        }
+
+        public static Props props(final ReentrantLock lock){
+            return Props.create(new Creator<PingPongActor>(){
+                @Override
+                public PingPongActor create() throws Exception {
+                    return new PingPongActor(lock);
+                }
+            });
+        }
+
+        @Override
+        public void onReceive(Object message) throws Exception {
+            lock.lock();
+            if ("ping".equals(message))
+                getSender().tell("pong", getSelf());
+        }
+    }
+}
\ No newline at end of file
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0392dec3dd43075c2df343526bafa5cbd59e4ada 100644 (file)
@@ -0,0 +1,8 @@
+testsystem {
+
+  bounded-mailbox {
+    mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+    mailbox-capacity = 10
+    mailbox-push-timeout-time = 100ms
+  }
+}
\ No newline at end of file
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf b/opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf
new file mode 100644 (file)
index 0000000..3481bae
--- /dev/null
@@ -0,0 +1,8 @@
+testsystem {
+
+  bounded-mailbox {
+    mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+    mailbox-capacity = 1000
+    mailbox-push-timeout-time = 10ms
+  }
+}
\ No newline at end of file
index 648e8d23d06892f8964ed65e322b7cb988ee8194..9c5129d6a4eb932f764a158530e304808dbb375d 100644 (file)
         <version>1.1-SNAPSHOT</version>
     </dependency>
 
+      <dependency>
+          <groupId>com.codahale.metrics</groupId>
+          <artifactId>metrics-core</artifactId>
+          <version>3.0.1</version>
+      </dependency>
     <!-- Test Dependencies -->
     <dependency>
       <groupId>junit</groupId>
             <Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
             <Export-package></Export-package>
             <Private-Package></Private-Package>
-            <Import-Package>!*snappy;!org.jboss.*;*</Import-Package>
+            <Import-Package>!*snappy;!org.jboss.*;!com.jcraft.*;*</Import-Package>
             <Embed-Dependency>
                 sal-clustering-commons;
                 sal-akka-raft;
+                *metrics*;
                 !sal*;
                 !*config-api*;
                 !*testkit*;
index 4fa26ffb2033407efb5b1fc1c86f96d04e4e3dd7..404a4e02033ea1c89f9fada4135cfe0e4b6a935e 100644 (file)
@@ -10,9 +10,8 @@ package org.opendaylight.controller.cluster.datastore;
 
 import akka.actor.ActorRef;
 import akka.actor.ActorSystem;
+
 import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
 import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
 import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
 import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
@@ -28,8 +27,6 @@ import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransactio
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.util.PropertyUtils;
-import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
@@ -42,37 +39,12 @@ import org.slf4j.LoggerFactory;
  */
 public class DistributedDataStore implements DOMStore, SchemaContextListener, AutoCloseable {
 
-    private static final Logger
-        LOG = LoggerFactory.getLogger(DistributedDataStore.class);
-
-    private static final String EXECUTOR_MAX_POOL_SIZE_PROP =
-            "mdsal.dist-datastore-executor-pool.size";
-    private static final int DEFAULT_EXECUTOR_MAX_POOL_SIZE = 10;
-
-    private static final String EXECUTOR_MAX_QUEUE_SIZE_PROP =
-            "mdsal.dist-datastore-executor-queue.size";
-    private static final int DEFAULT_EXECUTOR_MAX_QUEUE_SIZE = 5000;
+    private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
 
     private final ActorContext actorContext;
 
     private SchemaContext schemaContext;
 
-    /**
-     * Executor used to run FutureTask's
-     *
-     * This is typically used when we need to make a request to an actor and
-     * wait for it's response and the consumer needs to be provided a Future.
-     */
-    private final ListeningExecutorService executor =
-            MoreExecutors.listeningDecorator(
-                    SpecialExecutors.newBlockingBoundedFastThreadPool(
-                            PropertyUtils.getIntSystemProperty(
-                                    EXECUTOR_MAX_POOL_SIZE_PROP,
-                                    DEFAULT_EXECUTOR_MAX_POOL_SIZE),
-                            PropertyUtils.getIntSystemProperty(
-                                    EXECUTOR_MAX_QUEUE_SIZE_PROP,
-                                    DEFAULT_EXECUTOR_MAX_QUEUE_SIZE), "DistDataStore"));
-
     public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster,
             Configuration configuration, InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
         Preconditions.checkNotNull(actorSystem, "actorSystem should not be null");
@@ -95,15 +67,16 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
     }
 
 
+    @SuppressWarnings("unchecked")
     @Override
-    public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
+    public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>>
+                                              ListenerRegistration<L> registerChangeListener(
         YangInstanceIdentifier path, L listener,
         AsyncDataBroker.DataChangeScope scope) {
 
         Preconditions.checkNotNull(path, "path should not be null");
         Preconditions.checkNotNull(listener, "listener should not be null");
 
-
         LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
 
         ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
@@ -112,10 +85,8 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
         String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
 
         Object result = actorContext.executeLocalShardOperation(shardName,
-            new RegisterChangeListener(path, dataChangeListenerActor.path(),
-                scope),
-            ActorContext.ASK_DURATION
-        );
+            new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+            ActorContext.ASK_DURATION);
 
         if (result != null) {
             RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
@@ -127,34 +98,31 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
         LOG.debug(
             "No local shard for shardName {} was found so returning a noop registration",
             shardName);
+
         return new NoOpDataChangeListenerRegistration(listener);
     }
 
-
-
-
-
     @Override
     public DOMStoreTransactionChain createTransactionChain() {
-        return new TransactionChainProxy(actorContext, executor, schemaContext);
+        return new TransactionChainProxy(actorContext, schemaContext);
     }
 
     @Override
     public DOMStoreReadTransaction newReadOnlyTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override
     public DOMStoreWriteTransaction newWriteOnlyTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.WRITE_ONLY,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override
     public DOMStoreReadWriteTransaction newReadWriteTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_WRITE,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override public void onGlobalContextUpdated(SchemaContext schemaContext) {
index 97bb196f9fc36b9635e906bb654fafc68b2538c4..49c7b7e78f52295e9b3b1fc5b1ae2ea10926def2 100644 (file)
@@ -53,7 +53,7 @@ public class ShardReadWriteTransaction extends ShardTransaction {
     } else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
       mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
     } else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
-      deleteData(transaction,DeleteData.fromSerizalizable(message));
+      deleteData(transaction,DeleteData.fromSerializable(message));
     } else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
       readyTransaction(transaction,new ReadyTransaction());
     } else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
index 91e578b46d1f6de8f2e841f7d7f459541a9c3bdb..b01fe7d4ac11a4a0eea7cfa28b16f5bea8e7aec5 100644 (file)
@@ -50,7 +50,7 @@ public class ShardWriteTransaction extends ShardTransaction {
     } else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
       mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
     } else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
-      deleteData(transaction,DeleteData.fromSerizalizable(message));
+      deleteData(transaction,DeleteData.fromSerializable(message));
     } else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
       readyTransaction(transaction,new ReadyTransaction());
     }else {
index 5b447943ea7fd798e5572e55483ff8b4fcf7e331..fc455b193e27118f6dcfcc2de93032f2676c5619 100644 (file)
@@ -10,11 +10,13 @@ package org.opendaylight.controller.cluster.datastore;
 
 import akka.actor.ActorPath;
 import akka.actor.ActorSelection;
+import akka.dispatch.Futures;
+import akka.dispatch.OnComplete;
 
+import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.SettableFuture;
 
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
 import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
@@ -28,124 +30,156 @@ import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCoh
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import scala.concurrent.Future;
+
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Callable;
 
 /**
  * ThreePhaseCommitCohortProxy represents a set of remote cohort proxies
  */
-public class ThreePhaseCommitCohortProxy implements
-    DOMStoreThreePhaseCommitCohort{
+public class ThreePhaseCommitCohortProxy implements DOMStoreThreePhaseCommitCohort{
 
-    private static final Logger
-        LOG = LoggerFactory.getLogger(DistributedDataStore.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
 
     private final ActorContext actorContext;
     private final List<ActorPath> cohortPaths;
-    private final ListeningExecutorService executor;
     private final String transactionId;
 
-
-    public ThreePhaseCommitCohortProxy(ActorContext actorContext,
-        List<ActorPath> cohortPaths,
-        String transactionId,
-        ListeningExecutorService executor) {
-
+    public ThreePhaseCommitCohortProxy(ActorContext actorContext, List<ActorPath> cohortPaths,
+            String transactionId) {
         this.actorContext = actorContext;
         this.cohortPaths = cohortPaths;
         this.transactionId = transactionId;
-        this.executor = executor;
     }
 
-    @Override public ListenableFuture<Boolean> canCommit() {
+    @Override
+    public ListenableFuture<Boolean> canCommit() {
         LOG.debug("txn {} canCommit", transactionId);
-        Callable<Boolean> call = new Callable<Boolean>() {
 
+        Future<Iterable<Object>> combinedFuture =
+                invokeCohorts(new CanCommitTransaction().toSerializable());
+
+        final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+        combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
             @Override
-            public Boolean call() throws Exception {
-                for(ActorPath actorPath : cohortPaths){
-
-                    Object message = new CanCommitTransaction().toSerializable();
-                    LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
-                    ActorSelection cohort = actorContext.actorSelection(actorPath);
-
-                    try {
-                        Object response =
-                                actorContext.executeRemoteOperation(cohort,
-                                        message,
-                                        ActorContext.ASK_DURATION);
-
-                        if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
-                            CanCommitTransactionReply reply =
-                                    CanCommitTransactionReply.fromSerializable(response);
-                            if (!reply.getCanCommit()) {
-                                return false;
-                            }
+            public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+                if(failure != null) {
+                    returnFuture.setException(failure);
+                    return;
+                }
+
+                boolean result = true;
+                for(Object response: responses) {
+                    if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
+                        CanCommitTransactionReply reply =
+                                CanCommitTransactionReply.fromSerializable(response);
+                        if (!reply.getCanCommit()) {
+                            result = false;
+                            break;
                         }
-                    } catch(RuntimeException e){
-                        // FIXME : Need to properly handle this
-                        LOG.error("Unexpected Exception", e);
-                        return false;
+                    } else {
+                        LOG.error("Unexpected response type {}", response.getClass());
+                        returnFuture.setException(new IllegalArgumentException(
+                                String.format("Unexpected response type {}", response.getClass())));
+                        return;
                     }
                 }
 
-                return true;
+                returnFuture.set(Boolean.valueOf(result));
             }
-        };
+        }, actorContext.getActorSystem().dispatcher());
+
+        return returnFuture;
+    }
+
+    private Future<Iterable<Object>> invokeCohorts(Object message) {
+        List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
+        for(ActorPath actorPath : cohortPaths) {
+
+            LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
 
-        return executor.submit(call);
+            ActorSelection cohort = actorContext.actorSelection(actorPath);
+
+            futureList.add(actorContext.executeRemoteOperationAsync(cohort, message,
+                    ActorContext.ASK_DURATION));
+        }
+
+        return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
     }
 
-    @Override public ListenableFuture<Void> preCommit() {
+    @Override
+    public ListenableFuture<Void> preCommit() {
         LOG.debug("txn {} preCommit", transactionId);
-        return voidOperation(new PreCommitTransaction().toSerializable(), PreCommitTransactionReply.SERIALIZABLE_CLASS);
+        return voidOperation(new PreCommitTransaction().toSerializable(),
+                PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
     }
 
-    @Override public ListenableFuture<Void> abort() {
+    @Override
+    public ListenableFuture<Void> abort() {
         LOG.debug("txn {} abort", transactionId);
-        return voidOperation(new AbortTransaction().toSerializable(), AbortTransactionReply.SERIALIZABLE_CLASS);
+
+        // Note - we pass false for propagateException. In the front-end data broker, this method
+        // is called when one of the 3 phases fails with an exception. We'd rather have that
+        // original exception propagated to the client. If our abort fails and we propagate the
+        // exception then that exception will supersede and suppress the original exception. But
+        // it's the original exception that is the root cause and of more interest to the client.
+
+        return voidOperation(new AbortTransaction().toSerializable(),
+                AbortTransactionReply.SERIALIZABLE_CLASS, false);
     }
 
-    @Override public ListenableFuture<Void> commit() {
+    @Override
+    public ListenableFuture<Void> commit() {
         LOG.debug("txn {} commit", transactionId);
-        return voidOperation(new CommitTransaction().toSerializable(), CommitTransactionReply.SERIALIZABLE_CLASS);
+        return voidOperation(new CommitTransaction().toSerializable(),
+                CommitTransactionReply.SERIALIZABLE_CLASS, true);
     }
 
-    private ListenableFuture<Void> voidOperation(final Object message, final Class expectedResponseClass){
-        Callable<Void> call = new Callable<Void>() {
-
-            @Override public Void call() throws Exception {
-                for(ActorPath actorPath : cohortPaths){
-                    ActorSelection cohort = actorContext.actorSelection(actorPath);
-
-                    LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
-                    try {
-                        Object response =
-                            actorContext.executeRemoteOperation(cohort,
-                                message,
-                                ActorContext.ASK_DURATION);
-
-                        if (response != null && !response.getClass()
-                            .equals(expectedResponseClass)) {
-                            throw new RuntimeException(
-                                String.format(
-                                    "did not get the expected response \n\t\t expected : %s \n\t\t actual   : %s",
-                                    expectedResponseClass.toString(),
-                                    response.getClass().toString())
-                            );
+    private ListenableFuture<Void> voidOperation(final Object message,
+            final Class<?> expectedResponseClass, final boolean propagateException) {
+
+        Future<Iterable<Object>> combinedFuture = invokeCohorts(message);
+
+        final SettableFuture<Void> returnFuture = SettableFuture.create();
+
+        combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
+            @Override
+            public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+
+                Throwable exceptionToPropagate = failure;
+                if(exceptionToPropagate == null) {
+                    for(Object response: responses) {
+                        if(!response.getClass().equals(expectedResponseClass)) {
+                            exceptionToPropagate = new IllegalArgumentException(
+                                    String.format("Unexpected response type {}",
+                                            response.getClass()));
+                            break;
                         }
-                    } catch(TimeoutException e){
-                        LOG.error(String.format("A timeout occurred when processing operation : %s", message));
                     }
                 }
-                return null;
+
+                if(exceptionToPropagate != null) {
+                    if(propagateException) {
+                        // We don't log the exception here to avoid redundant logging since we're
+                        // propagating to the caller in MD-SAL core who will log it.
+                        returnFuture.setException(exceptionToPropagate);
+                    } else {
+                        // Since the caller doesn't want us to propagate the exception we'll also
+                        // not log it normally. But it's usually not good to totally silence
+                        // exceptions so we'll log it to debug level.
+                        LOG.debug(String.format("%s failed",  message.getClass().getSimpleName()),
+                                exceptionToPropagate);
+                        returnFuture.set(null);
+                    }
+                } else {
+                    returnFuture.set(null);
+                }
             }
-        };
+        }, actorContext.getActorSystem().dispatcher());
 
-        return executor.submit(call);
+        return returnFuture;
     }
 
     public List<ActorPath> getCohortPaths() {
index 5e9defa5b59c5db64033c1a92fbdb611766152a2..c4ec760b40cd57d54579144f010b5e0f7425363e 100644 (file)
@@ -15,39 +15,34 @@ import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 
-import com.google.common.util.concurrent.ListeningExecutorService;
-
 /**
  * TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
  */
 public class TransactionChainProxy implements DOMStoreTransactionChain{
     private final ActorContext actorContext;
-    private final ListeningExecutorService transactionExecutor;
     private final SchemaContext schemaContext;
 
-    public TransactionChainProxy(ActorContext actorContext, ListeningExecutorService transactionExecutor,
-            SchemaContext schemaContext) {
+    public TransactionChainProxy(ActorContext actorContext, SchemaContext schemaContext) {
         this.actorContext = actorContext;
-        this.transactionExecutor = transactionExecutor;
         this.schemaContext = schemaContext;
     }
 
     @Override
     public DOMStoreReadTransaction newReadOnlyTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.READ_ONLY, schemaContext);
     }
 
     @Override
     public DOMStoreReadWriteTransaction newReadWriteTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.WRITE_ONLY, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
     }
 
     @Override
     public DOMStoreWriteTransaction newWriteOnlyTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.READ_WRITE, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.READ_WRITE, schemaContext);
     }
 
     @Override
index 95862ae9d93670de67dbdd8bd99074a9ba49e8b8..5b5b1296af08e42c2278b6db80ac05cbf08329c6 100644 (file)
@@ -12,13 +12,14 @@ import akka.actor.ActorPath;
 import akka.actor.ActorRef;
 import akka.actor.ActorSelection;
 import akka.actor.Props;
+import akka.dispatch.OnComplete;
+
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.CheckedFuture;
 import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import com.google.common.util.concurrent.SettableFuture;
+
 import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
 import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
@@ -44,11 +45,12 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import scala.concurrent.Future;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Callable;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -80,25 +82,22 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     private final ActorContext actorContext;
     private final Map<String, TransactionContext> remoteTransactionPaths = new HashMap<>();
     private final TransactionIdentifier identifier;
-    private final ListeningExecutorService executor;
     private final SchemaContext schemaContext;
+    private boolean inReadyState;
 
-    public TransactionProxy(
-        ActorContext actorContext,
-        TransactionType transactionType,
-        ListeningExecutorService executor,
-        SchemaContext schemaContext
-    ) {
+    public TransactionProxy(ActorContext actorContext, TransactionType transactionType,
+            SchemaContext schemaContext) {
         this.actorContext = Preconditions.checkNotNull(actorContext, "actorContext should not be null");
         this.transactionType = Preconditions.checkNotNull(transactionType, "transactionType should not be null");
-        this.executor = Preconditions.checkNotNull(executor, "executor should not be null");
         this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null");
 
         String memberName = actorContext.getCurrentMemberName();
         if(memberName == null){
             memberName = "UNKNOWN-MEMBER";
         }
-        this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(counter.getAndIncrement()).build();
+
+        this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(
+                counter.getAndIncrement()).build();
 
         LOG.debug("Created txn {}", identifier);
 
@@ -108,6 +107,9 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read(
             final YangInstanceIdentifier path) {
 
+        Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+                "Read operation on write-only transaction is not allowed");
+
         LOG.debug("txn {} read {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -115,8 +117,12 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         return transactionContext(path).readData(path);
     }
 
-    @Override public CheckedFuture<Boolean, ReadFailedException> exists(
-        YangInstanceIdentifier path) {
+    @Override
+    public CheckedFuture<Boolean, ReadFailedException> exists(YangInstanceIdentifier path) {
+
+        Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+                "Exists operation on write-only transaction is not allowed");
+
         LOG.debug("txn {} exists {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -124,9 +130,18 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         return transactionContext(path).dataExists(path);
     }
 
+    private void checkModificationState() {
+        Preconditions.checkState(transactionType != TransactionType.READ_ONLY,
+                "Modification operation on read-only transaction is not allowed");
+        Preconditions.checkState(!inReadyState,
+                "Transaction is sealed - further modifications are allowed");
+    }
+
     @Override
     public void write(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
 
+        checkModificationState();
+
         LOG.debug("txn {} write {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -137,6 +152,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     @Override
     public void merge(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
 
+        checkModificationState();
+
         LOG.debug("txn {} merge {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -147,6 +164,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     @Override
     public void delete(YangInstanceIdentifier path) {
 
+        checkModificationState();
+
         LOG.debug("txn {} delete {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -156,25 +175,36 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
 
     @Override
     public DOMStoreThreePhaseCommitCohort ready() {
+
+        checkModificationState();
+
+        inReadyState = true;
+
         List<ActorPath> cohortPaths = new ArrayList<>();
 
-        LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier, remoteTransactionPaths.size());
+        LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier,
+                remoteTransactionPaths.size());
 
         for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
 
-            LOG.debug("txn {} Readying transaction for shard {}", identifier, transactionContext.getShardName());
+            LOG.debug("txn {} Readying transaction for shard {}", identifier,
+                    transactionContext.getShardName());
 
             Object result = transactionContext.readyTransaction();
 
             if(result.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)){
-                ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(actorContext.getActorSystem(),result);
-                String resolvedCohortPath = transactionContext
-                    .getResolvedCohortPath(reply.getCohortPath().toString());
+                ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
+                        actorContext.getActorSystem(),result);
+                String resolvedCohortPath = transactionContext.getResolvedCohortPath(
+                        reply.getCohortPath().toString());
                 cohortPaths.add(actorContext.actorFor(resolvedCohortPath));
+            } else {
+                LOG.error("Was expecting {} but got {}", ReadyTransactionReply.SERIALIZABLE_CLASS,
+                        result.getClass());
             }
         }
 
-        return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString(), executor);
+        return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString());
     }
 
     @Override
@@ -213,8 +243,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             Object response = actorContext.executeShardOperation(shardName,
                 new CreateTransaction(identifier.toString(),this.transactionType.ordinal() ).toSerializable(),
                 ActorContext.ASK_DURATION);
-            if (response.getClass()
-                .equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
+            if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
                 CreateTransactionReply reply =
                     CreateTransactionReply.fromSerializable(response);
 
@@ -229,11 +258,13 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
                         transactionActor);
 
                 remoteTransactionPaths.put(shardName, transactionContext);
+            } else {
+                LOG.error("Was expecting {} but got {}", CreateTransactionReply.SERIALIZABLE_CLASS,
+                        response.getClass());
             }
-        } catch(TimeoutException | PrimaryNotFoundException e){
+        } catch(Exception e){
             LOG.error("txn {} Creating NoOpTransaction because of : {}", identifier, e.getMessage());
-            remoteTransactionPaths.put(shardName,
-                new NoOpTransactionContext(shardName));
+            remoteTransactionPaths.put(shardName, new NoOpTransactionContext(shardName, e));
         }
     }
 
@@ -272,7 +303,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             this.actor = actor;
         }
 
-        @Override public String getShardName() {
+        @Override
+        public String getShardName() {
             return shardName;
         }
 
@@ -280,96 +312,105 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             return actor;
         }
 
-        @Override public String getResolvedCohortPath(String cohortPath) {
+        @Override
+        public String getResolvedCohortPath(String cohortPath) {
             return actorContext.resolvePath(actorPath, cohortPath);
         }
 
-        @Override public void closeTransaction() {
-            getActor().tell(
-                new CloseTransaction().toSerializable(), null);
+        @Override
+        public void closeTransaction() {
+            actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
         }
 
-        @Override public Object readyTransaction() {
+        @Override
+        public Object readyTransaction() {
             return actorContext.executeRemoteOperation(getActor(),
-                new ReadyTransaction().toSerializable(),
-                ActorContext.ASK_DURATION
-            );
-
+                    new ReadyTransaction().toSerializable(), ActorContext.ASK_DURATION);
         }
 
-        @Override public void deleteData(YangInstanceIdentifier path) {
-            getActor().tell(new DeleteData(path).toSerializable(), null);
+        @Override
+        public void deleteData(YangInstanceIdentifier path) {
+            actorContext.sendRemoteOperationAsync(getActor(), new DeleteData(path).toSerializable() );
         }
 
-        @Override public void mergeData(YangInstanceIdentifier path,
-            NormalizedNode<?, ?> data) {
-            getActor()
-                .tell(new MergeData(path, data, schemaContext).toSerializable(),
-                    null);
+        @Override
+        public void mergeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+            actorContext.sendRemoteOperationAsync(getActor(),
+                    new MergeData(path, data, schemaContext).toSerializable());
         }
 
         @Override
         public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
             final YangInstanceIdentifier path) {
 
-            Callable<Optional<NormalizedNode<?, ?>>> call =
-                new Callable<Optional<NormalizedNode<?, ?>>>() {
-
-                    @Override public Optional<NormalizedNode<?, ?>> call()
-                        throws Exception {
-                        Object response = actorContext
-                            .executeRemoteOperation(getActor(),
-                                new ReadData(path).toSerializable(),
-                                ActorContext.ASK_DURATION);
-                        if (response.getClass()
-                            .equals(ReadDataReply.SERIALIZABLE_CLASS)) {
-                            ReadDataReply reply = ReadDataReply
-                                .fromSerializable(schemaContext, path,
-                                    response);
+            final SettableFuture<Optional<NormalizedNode<?, ?>>> returnFuture = SettableFuture.create();
+
+            OnComplete<Object> onComplete = new OnComplete<Object>() {
+                @Override
+                public void onComplete(Throwable failure, Object response) throws Throwable {
+                    if(failure != null) {
+                        returnFuture.setException(new ReadFailedException(
+                                "Error reading data for path " + path, failure));
+                    } else {
+                        if (response.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
+                            ReadDataReply reply = ReadDataReply.fromSerializable(schemaContext,
+                                    path, response);
                             if (reply.getNormalizedNode() == null) {
-                                return Optional.absent();
+                                returnFuture.set(Optional.<NormalizedNode<?, ?>>absent());
+                            } else {
+                                returnFuture.set(Optional.<NormalizedNode<?, ?>>of(
+                                        reply.getNormalizedNode()));
                             }
-                            return Optional.<NormalizedNode<?, ?>>of(
-                                reply.getNormalizedNode());
+                        } else {
+                            returnFuture.setException(new ReadFailedException(
+                                    "Invalid response reading data for path " + path));
                         }
-
-                        throw new ReadFailedException("Read Failed " + path);
                     }
-                };
+                }
+            };
 
-            return MappingCheckedFuture
-                .create(executor.submit(call), ReadFailedException.MAPPER);
-        }
+            Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+                    new ReadData(path).toSerializable(), ActorContext.ASK_DURATION);
+            future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
 
-        @Override public void writeData(YangInstanceIdentifier path,
-            NormalizedNode<?, ?> data) {
-            getActor()
-                .tell(new WriteData(path, data, schemaContext).toSerializable(),
-                    null);
+            return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
         }
 
-        @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
-            final YangInstanceIdentifier path) {
-
-            Callable<Boolean> call = new Callable<Boolean>() {
-
-                @Override public Boolean call() throws Exception {
-                    Object o = actorContext.executeRemoteOperation(getActor(),
-                        new DataExists(path).toSerializable(),
-                        ActorContext.ASK_DURATION
-                    );
-
+        @Override
+        public void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+            actorContext.sendRemoteOperationAsync(getActor(),
+                    new WriteData(path, data, schemaContext).toSerializable());
+        }
 
-                    if (DataExistsReply.SERIALIZABLE_CLASS
-                        .equals(o.getClass())) {
-                        return DataExistsReply.fromSerializable(o).exists();
+        @Override
+        public CheckedFuture<Boolean, ReadFailedException> dataExists(
+                final YangInstanceIdentifier path) {
+
+            final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+            OnComplete<Object> onComplete = new OnComplete<Object>() {
+                @Override
+                public void onComplete(Throwable failure, Object response) throws Throwable {
+                    if(failure != null) {
+                        returnFuture.setException(new ReadFailedException(
+                                "Error checking exists for path " + path, failure));
+                    } else {
+                        if (response.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) {
+                            returnFuture.set(Boolean.valueOf(DataExistsReply.
+                                        fromSerializable(response).exists()));
+                        } else {
+                            returnFuture.setException(new ReadFailedException(
+                                    "Invalid response checking exists for path " + path));
+                        }
                     }
-
-                    throw new ReadFailedException("Exists Failed " + path);
                 }
             };
-            return MappingCheckedFuture
-                .create(executor.submit(call), ReadFailedException.MAPPER);
+
+            Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+                    new DataExists(path).toSerializable(), ActorContext.ASK_DURATION);
+            future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
+
+            return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
         }
     }
 
@@ -379,22 +420,28 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             LOG = LoggerFactory.getLogger(NoOpTransactionContext.class);
 
         private final String shardName;
+        private final Exception failure;
 
         private ActorRef cohort;
 
-        public NoOpTransactionContext(String shardName){
+        public NoOpTransactionContext(String shardName, Exception failure){
             this.shardName = shardName;
+            this.failure = failure;
         }
-        @Override public String getShardName() {
+
+        @Override
+        public String getShardName() {
             return  shardName;
 
         }
 
-        @Override public String getResolvedCohortPath(String cohortPath) {
+        @Override
+        public String getResolvedCohortPath(String cohortPath) {
             return cohort.path().toString();
         }
 
-        @Override public void closeTransaction() {
+        @Override
+        public void closeTransaction() {
             LOG.warn("txn {} closeTransaction called", identifier);
         }
 
@@ -404,11 +451,13 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             return new ReadyTransactionReply(cohort.path()).toSerializable();
         }
 
-        @Override public void deleteData(YangInstanceIdentifier path) {
+        @Override
+        public void deleteData(YangInstanceIdentifier path) {
             LOG.warn("txt {} deleteData called path = {}", identifier, path);
         }
 
-        @Override public void mergeData(YangInstanceIdentifier path,
+        @Override
+        public void mergeData(YangInstanceIdentifier path,
             NormalizedNode<?, ?> data) {
             LOG.warn("txn {} mergeData called path = {}", identifier, path);
         }
@@ -417,8 +466,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
             YangInstanceIdentifier path) {
             LOG.warn("txn {} readData called path = {}", identifier, path);
-            return Futures.immediateCheckedFuture(
-                Optional.<NormalizedNode<?, ?>>absent());
+            return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+                    "Error reading data for path " + path, failure));
         }
 
         @Override public void writeData(YangInstanceIdentifier path,
@@ -429,10 +478,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
             YangInstanceIdentifier path) {
             LOG.warn("txn {} dataExists called path = {}", identifier, path);
-
-            // Returning false instead of an exception to keep this aligned with
-            // read
-            return Futures.immediateCheckedFuture(false);
+            return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+                    "Error checking exists for path " + path, failure));
         }
     }
 
index 17861a5a68b0e7da27f7b7996ddb2c87cdcd22ad..9ae851e76c0f4db04776d14e7cfeab1ab20fefaa 100644 (file)
@@ -31,7 +31,7 @@ public class DeleteData implements SerializableMessage {
             .setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path)).build();
     }
 
-    public static DeleteData fromSerizalizable(Object serializable){
+    public static DeleteData fromSerializable(Object serializable){
         ShardTransactionMessages.DeleteData o = (ShardTransactionMessages.DeleteData) serializable;
         return new DeleteData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
     }
index 4706c66e2594eae1384b465bf5d0b246c72d8223..e12a9663d1fca8a969c25166f38986ea19eab51a 100644 (file)
@@ -14,6 +14,7 @@ import akka.actor.ActorSelection;
 import akka.actor.ActorSystem;
 import akka.actor.PoisonPill;
 import akka.util.Timeout;
+
 import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
 import org.opendaylight.controller.cluster.datastore.Configuration;
 import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
@@ -22,9 +23,9 @@ import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
 import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
 import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
 import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import scala.concurrent.Await;
 import scala.concurrent.Future;
 import scala.concurrent.duration.Duration;
@@ -54,8 +55,6 @@ public class ActorContext {
     private final ClusterWrapper clusterWrapper;
     private final Configuration configuration;
 
-    private SchemaContext schemaContext = null;
-
     public ActorContext(ActorSystem actorSystem, ActorRef shardManager,
         ClusterWrapper clusterWrapper,
         Configuration configuration) {
@@ -174,6 +173,33 @@ public class ActorContext {
         }
     }
 
+    /**
+     * Execute an operation on a remote actor asynchronously.
+     *
+     * @param actor the ActorSelection
+     * @param message the message to send
+     * @param duration the maximum amount of time to send he message
+     * @return a Future containing the eventual result
+     */
+    public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message,
+            FiniteDuration duration) {
+
+        LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
+
+        return ask(actor, message, new Timeout(duration));
+    }
+
+    /**
+     * Sends an operation to be executed by a remote actor asynchronously without waiting for a
+     * reply (essentially set and forget).
+     *
+     * @param actor the ActorSelection
+     * @param message the message to send
+     */
+    public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
+        actor.tell(message, ActorRef.noSender());
+    }
+
     /**
      * Execute an operation on the primary for a given shard
      * <p>
index 4eca5671f626543b5c9ca8052f1427945c72a302..87231f08849ed02398472bb792a40a48753a96be 100644 (file)
 package org.opendaylight.controller.cluster.datastore;
 
-import akka.actor.ActorRef;
+import akka.actor.ActorPath;
+import akka.actor.ActorSelection;
 import akka.actor.Props;
+import akka.dispatch.Futures;
 
+import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.isA;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.times;
 
-import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.stubbing.Stubber;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.SerializableMessage;
+import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
 import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
+import scala.concurrent.duration.FiniteDuration;
 
-import java.util.Arrays;
-import java.util.concurrent.Executors;
-
-import static org.junit.Assert.assertNotNull;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
 
 public class ThreePhaseCommitCohortProxyTest extends AbstractActorTest {
 
-    private ThreePhaseCommitCohortProxy proxy;
-    private Props props;
-    private ActorRef actorRef;
-    private MockActorContext actorContext;
-    private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(
-                        Executors.newSingleThreadExecutor());
+    @Mock
+    private ActorContext actorContext;
 
     @Before
-    public void setUp(){
-        props = Props.create(MessageCollectorActor.class);
-        actorRef = getSystem().actorOf(props);
-        actorContext = new MockActorContext(this.getSystem());
+    public void setUp() {
+        MockitoAnnotations.initMocks(this);
 
-        proxy =
-            new ThreePhaseCommitCohortProxy(actorContext,
-                Arrays.asList(actorRef.path()), "txn-1", executor);
+        doReturn(getSystem()).when(actorContext).getActorSystem();
+    }
 
+    private ThreePhaseCommitCohortProxy setupProxy(int nCohorts) {
+        List<ActorPath> cohorts = Lists.newArrayList();
+        for(int i = 1; i <= nCohorts; i++) {
+            ActorPath path = getSystem().actorOf(Props.create(MessageCollectorActor.class)).path();
+            cohorts.add(path);
+            doReturn(mock(ActorSelection.class)).when(actorContext).actorSelection(path);
+        }
+
+        return new ThreePhaseCommitCohortProxy(actorContext, cohorts, "txn-1");
     }
 
-    @After
-    public void tearDown() {
-        executor.shutdownNow();
+    private void setupMockActorContext(Class<?> requestType, Object... responses) {
+        Stubber stubber = doReturn(responses[0] instanceof Throwable ? Futures
+                .failed((Throwable) responses[0]) : Futures
+                .successful(((SerializableMessage) responses[0]).toSerializable()));
+
+        for(int i = 1; i < responses.length; i++) {
+            stubber = stubber.doReturn(responses[i] instanceof Throwable ? Futures
+                    .failed((Throwable) responses[i]) : Futures
+                    .successful(((SerializableMessage) responses[i]).toSerializable()));
+        }
+
+        stubber.when(actorContext).executeRemoteOperationAsync(any(ActorSelection.class),
+                isA(requestType), any(FiniteDuration.class));
+    }
+
+    private void verifyCohortInvocations(int nCohorts, Class<?> requestType) {
+        verify(actorContext, times(nCohorts)).executeRemoteOperationAsync(
+                any(ActorSelection.class), isA(requestType), any(FiniteDuration.class));
+    }
+
+    @Test
+    public void testCanCommitWithOneCohort() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+                new CanCommitTransactionReply(true));
+
+        ListenableFuture<Boolean> future = proxy.canCommit();
+
+        assertEquals("canCommit", true, future.get());
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+                new CanCommitTransactionReply(false));
+
+        future = proxy.canCommit();
+
+        assertEquals("canCommit", false, future.get());
+
+        verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
     }
 
     @Test
-    public void testCanCommit() throws Exception {
-        actorContext.setExecuteRemoteOperationResponse(new CanCommitTransactionReply(true).toSerializable());
+    public void testCanCommitWithMultipleCohorts() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+                new CanCommitTransactionReply(true), new CanCommitTransactionReply(true));
 
         ListenableFuture<Boolean> future = proxy.canCommit();
 
-        Assert.assertTrue(future.get().booleanValue());
+        assertEquals("canCommit", true, future.get());
 
+        verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
+    }
+
+    @Test
+    public void testCanCommitWithMultipleCohortsAndOneFailure() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(3);
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+                new CanCommitTransactionReply(true), new CanCommitTransactionReply(false),
+                new CanCommitTransactionReply(true));
+
+        ListenableFuture<Boolean> future = proxy.canCommit();
+
+        assertEquals("canCommit", false, future.get());
+
+        verifyCohortInvocations(3, CanCommitTransaction.SERIALIZABLE_CLASS);
+    }
+
+    @Test(expected = ExecutionException.class)
+    public void testCanCommitWithExceptionFailure() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+        proxy.canCommit().get();
+    }
+
+    @Test(expected = ExecutionException.class)
+    public void testCanCommitWithInvalidResponseType() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+        setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+                new PreCommitTransactionReply());
+
+        proxy.canCommit().get();
     }
 
     @Test
     public void testPreCommit() throws Exception {
-        actorContext.setExecuteRemoteOperationResponse(new PreCommitTransactionReply().toSerializable());
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
 
-        ListenableFuture<Void> future = proxy.preCommit();
+        setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+                new PreCommitTransactionReply());
 
-        future.get();
+        proxy.preCommit().get();
 
+        verifyCohortInvocations(1, PreCommitTransaction.SERIALIZABLE_CLASS);
+    }
+
+    @Test(expected = ExecutionException.class)
+    public void testPreCommitWithFailure() throws Exception {
+        ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+        setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+                new PreCommitTransactionReply(), new RuntimeException("mock"));
+
+        proxy.preCommit().get();
     }
 
     @Test
     public void testAbort() throws Exception {
-        actorContext.setExecuteRemoteOperationResponse(new AbortTransactionReply().toSerializable());
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
 
-        ListenableFuture<Void> future = proxy.abort();
+        setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new AbortTransactionReply());
 
-        future.get();
+        proxy.abort().get();
 
+        verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
+    }
+
+    @Test
+    public void testAbortWithFailure() throws Exception {
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+        setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+        // The exception should not get propagated.
+        proxy.abort().get();
+
+        verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
     }
 
     @Test
     public void testCommit() throws Exception {
-        actorContext.setExecuteRemoteOperationResponse(new CommitTransactionReply().toSerializable());
 
-        ListenableFuture<Void> future = proxy.commit();
+        ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+        setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+                new CommitTransactionReply());
+
+        proxy.commit().get();
+
+        verifyCohortInvocations(2, CommitTransaction.SERIALIZABLE_CLASS);
+    }
+
+    @Test(expected = ExecutionException.class)
+    public void testCommitWithFailure() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(2);
 
-        future.get();
+        setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+                new RuntimeException("mock"));
+
+        proxy.commit().get();
+    }
+
+    @Test(expected = ExecutionException.class)
+    public void teseCommitWithInvalidResponseType() throws Exception {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+        setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new PreCommitTransactionReply());
+
+        proxy.commit().get();
     }
 
     @Test
-    public void testGetCohortPaths() throws Exception {
-        assertNotNull(proxy.getCohortPaths());
+    public void testGetCohortPaths() {
+
+        ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+        List<ActorPath> paths = proxy.getCohortPaths();
+        assertNotNull("getCohortPaths returned null", paths);
+        assertEquals("getCohortPaths size", 2, paths.size());
     }
 }
index 62052f38ab89b6962dc31622332f85113492c924..14696f786e7e36888be3b5517c14ae4b9779340f 100644 (file)
@@ -1,32 +1,44 @@
 package org.opendaylight.controller.cluster.datastore;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+import akka.actor.ActorPath;
 import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
 import akka.actor.Props;
+import akka.dispatch.Futures;
 import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import junit.framework.Assert;
-import org.junit.After;
+
 import org.junit.Before;
 import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
+
+import org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType;
 import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
 import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
 import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.DataExists;
 import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
 import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
 import org.opendaylight.controller.cluster.datastore.messages.MergeData;
-import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
+import org.opendaylight.controller.cluster.datastore.messages.ReadData;
 import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
 import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.DefaultShardStrategy;
 import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
 import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
 import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
-import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
-import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
 import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration;
 import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
 import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
@@ -34,377 +46,433 @@ import org.opendaylight.controller.protobuff.messages.transaction.ShardTransacti
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
 import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+import scala.concurrent.Future;
 import scala.concurrent.duration.FiniteDuration;
 
-import java.util.List;
-import java.util.concurrent.Executors;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
 
-import static junit.framework.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.argThat;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.isA;
+
+@SuppressWarnings("resource")
 public class TransactionProxyTest extends AbstractActorTest {
 
+    @SuppressWarnings("serial")
+    static class TestException extends RuntimeException {
+    }
+
+    static interface Invoker {
+        void invoke(TransactionProxy proxy) throws Exception;
+    }
+
     private final Configuration configuration = new MockConfiguration();
 
-    private final ActorContext testContext =
-        new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)), new MockClusterWrapper(), configuration );
+    @Mock
+    private ActorContext mockActorContext;
 
-    private final ListeningExecutorService transactionExecutor =
-        MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
+    private SchemaContext schemaContext;
+
+    String memberName = "mock-member";
 
     @Before
     public void setUp(){
-        ShardStrategyFactory.setConfiguration(configuration);
-    }
+        MockitoAnnotations.initMocks(this);
 
-    @After
-    public void tearDown() {
-        transactionExecutor.shutdownNow();
-    }
+        schemaContext = TestModel.createTestContext();
 
-    @Test
-    public void testRead() throws Exception {
-        final Props props = Props.create(DoNothingActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
+        doReturn(getSystem()).when(mockActorContext).getActorSystem();
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+        ShardStrategyFactory.setConfiguration(configuration);
+    }
 
+    private CreateTransaction eqCreateTransaction(final String memberName,
+            final TransactionType type) {
+        ArgumentMatcher<CreateTransaction> matcher = new ArgumentMatcher<CreateTransaction>() {
+            @Override
+            public boolean matches(Object argument) {
+                CreateTransaction obj = CreateTransaction.fromSerializable(argument);
+                return obj.getTransactionId().startsWith(memberName) &&
+                       obj.getTransactionType() == type.ordinal();
+            }
+        };
+
+        return argThat(matcher);
+    }
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+    private DataExists eqDataExists() {
+        ArgumentMatcher<DataExists> matcher = new ArgumentMatcher<DataExists>() {
+            @Override
+            public boolean matches(Object argument) {
+                DataExists obj = DataExists.fromSerializable(argument);
+                return obj.getPath().equals(TestModel.TEST_PATH);
+            }
+        };
 
+        return argThat(matcher);
+    }
 
-        actorContext.setExecuteRemoteOperationResponse(
-            new ReadDataReply(TestModel.createTestContext(), null)
-                .toSerializable());
+    private ReadData eqReadData() {
+        ArgumentMatcher<ReadData> matcher = new ArgumentMatcher<ReadData>() {
+            @Override
+            public boolean matches(Object argument) {
+                ReadData obj = ReadData.fromSerializable(argument);
+                return obj.getPath().equals(TestModel.TEST_PATH);
+            }
+        };
 
-        ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
-            transactionProxy.read(TestModel.TEST_PATH);
+        return argThat(matcher);
+    }
 
-        Optional<NormalizedNode<?, ?>> normalizedNodeOptional = read.get();
+    private WriteData eqWriteData(final NormalizedNode<?, ?> nodeToWrite) {
+        ArgumentMatcher<WriteData> matcher = new ArgumentMatcher<WriteData>() {
+            @Override
+            public boolean matches(Object argument) {
+                WriteData obj = WriteData.fromSerializable(argument, schemaContext);
+                return obj.getPath().equals(TestModel.TEST_PATH) &&
+                       obj.getData().equals(nodeToWrite);
+            }
+        };
+
+        return argThat(matcher);
+    }
 
-        Assert.assertFalse(normalizedNodeOptional.isPresent());
+    private MergeData eqMergeData(final NormalizedNode<?, ?> nodeToWrite) {
+        ArgumentMatcher<MergeData> matcher = new ArgumentMatcher<MergeData>() {
+            @Override
+            public boolean matches(Object argument) {
+                MergeData obj = MergeData.fromSerializable(argument, schemaContext);
+                return obj.getPath().equals(TestModel.TEST_PATH) &&
+                       obj.getData().equals(nodeToWrite);
+            }
+        };
+
+        return argThat(matcher);
+    }
 
-        actorContext.setExecuteRemoteOperationResponse(new ReadDataReply(
-            TestModel.createTestContext(),ImmutableNodes.containerNode(TestModel.TEST_QNAME)).toSerializable());
+    private DeleteData eqDeleteData() {
+        ArgumentMatcher<DeleteData> matcher = new ArgumentMatcher<DeleteData>() {
+            @Override
+            public boolean matches(Object argument) {
+                DeleteData obj = DeleteData.fromSerializable(argument);
+                return obj.getPath().equals(TestModel.TEST_PATH);
+            }
+        };
 
-        read = transactionProxy.read(TestModel.TEST_PATH);
+        return argThat(matcher);
+    }
 
-        normalizedNodeOptional = read.get();
+    private Object readyTxReply(ActorPath path) {
+        return new ReadyTransactionReply(path).toSerializable();
+    }
 
-        Assert.assertTrue(normalizedNodeOptional.isPresent());
+    private Future<Object> readDataReply(NormalizedNode<?, ?> data) {
+        return Futures.successful(new ReadDataReply(schemaContext, data)
+                .toSerializable());
     }
 
-    @Test
-    public void testExists() throws Exception {
-        final Props props = Props.create(DoNothingActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
+    private Future<Object> dataExistsReply(boolean exists) {
+        return Futures.successful(new DataExistsReply(exists).toSerializable());
+    }
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+    private ActorSelection actorSelection(ActorRef actorRef) {
+        return getSystem().actorSelection(actorRef.path());
+    }
 
+    private FiniteDuration anyDuration() {
+        return any(FiniteDuration.class);
+    }
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+    private CreateTransactionReply createTransactionReply(ActorRef actorRef){
+        return CreateTransactionReply.newBuilder()
+            .setTransactionActorPath(actorRef.path().toString())
+            .setTransactionId("txn-1").build();
+    }
 
+    private ActorRef setupActorContextWithInitialCreateTransaction(TransactionType type) {
+        ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
+        doReturn(getSystem().actorSelection(actorRef.path())).
+                when(mockActorContext).actorSelection(actorRef.path().toString());
+        doReturn(memberName).when(mockActorContext).getCurrentMemberName();
+        doReturn(createTransactionReply(actorRef)).when(mockActorContext).
+                executeShardOperation(eq(DefaultShardStrategy.DEFAULT_SHARD),
+                        eqCreateTransaction(memberName, type), anyDuration());
+        doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(
+                anyString(), eq(actorRef.path().toString()));
+        doReturn(actorRef.path()).when(mockActorContext).actorFor(actorRef.path().toString());
+
+        return actorRef;
+    }
 
-        actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(false).toSerializable());
+    @Test
+    public void testRead() throws Exception {
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
-        CheckedFuture<Boolean, ReadFailedException> exists =
-            transactionProxy.exists(TestModel.TEST_PATH);
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        Assert.assertFalse(exists.checkedGet());
+        doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqReadData(), anyDuration());
 
-        actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(true).toSerializable());
+        Optional<NormalizedNode<?, ?>> readOptional = transactionProxy.read(
+                TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
 
-        exists = transactionProxy.exists(TestModel.TEST_PATH);
+        assertEquals("NormalizedNode isPresent", false, readOptional.isPresent());
 
-        Assert.assertTrue(exists.checkedGet());
+        NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
 
-        actorContext.setExecuteRemoteOperationResponse("bad message");
+        doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqReadData(), anyDuration());
 
-        exists = transactionProxy.exists(TestModel.TEST_PATH);
+        readOptional = transactionProxy.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
 
-        try {
-            exists.checkedGet();
-            fail();
-        } catch(ReadFailedException e){
-        }
+        assertEquals("NormalizedNode isPresent", true, readOptional.isPresent());
 
+        assertEquals("Response NormalizedNode", expectedNode, readOptional.get());
     }
 
     @Test(expected = ReadFailedException.class)
     public void testReadWhenAnInvalidMessageIsSentInReply() throws Exception {
-        final Props props = Props.create(DoNothingActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
-
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
-
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
+        doReturn(Futures.successful(new Object())).when(mockActorContext).
+                executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
 
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException>
-            read = transactionProxy.read(TestModel.TEST_PATH);
-
-        read.checkedGet();
+        transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
     }
 
-    @Test
-    public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Exception {
-        final ActorContext actorContext = mock(ActorContext.class);
-
-        when(actorContext.executeShardOperation(anyString(), any(), any(
-            FiniteDuration.class))).thenThrow(new PrimaryNotFoundException("test"));
+    @Test(expected = TestException.class)
+    public void testReadWithAsyncRemoteOperatonFailure() throws Throwable {
+        setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        doThrow(new TestException()).when(mockActorContext).
+                executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
 
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
-            transactionProxy.read(TestModel.TEST_PATH);
-
-        Assert.assertFalse(read.get().isPresent());
-
+        try {
+            transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+            fail("Expected ReadFailedException");
+        } catch(ReadFailedException e) {
+            // Expected - throw cause - expects TestException.
+            throw e.getCause();
+        }
     }
 
+    private void testExceptionOnInitialCreateTransaction(Exception exToThrow, Invoker invoker)
+            throws Throwable {
 
-    @Test
-    public void testReadWhenATimeoutExceptionIsThrown() throws Exception {
-        final ActorContext actorContext = mock(ActorContext.class);
+        doThrow(exToThrow).when(mockActorContext).executeShardOperation(
+                anyString(), any(), anyDuration());
 
-        when(actorContext.executeShardOperation(anyString(), any(), any(
-            FiniteDuration.class))).thenThrow(new TimeoutException("test", new Exception("reason")));
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        try {
+            invoker.invoke(transactionProxy);
+            fail("Expected ReadFailedException");
+        } catch(ReadFailedException e) {
+            // Expected - throw cause - expects TestException.
+            throw e.getCause();
+        }
+    }
 
+    private void testReadWithExceptionOnInitialCreateTransaction(Exception exToThrow) throws Throwable {
+        testExceptionOnInitialCreateTransaction(exToThrow, new Invoker() {
+            @Override
+            public void invoke(TransactionProxy proxy) throws Exception {
+                proxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+            }
+        });
+    }
 
-        ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
-            transactionProxy.read(TestModel.TEST_PATH);
+    @Test(expected = PrimaryNotFoundException.class)
+    public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+        testReadWithExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"));
+    }
 
-        Assert.assertFalse(read.get().isPresent());
+    @Test(expected = TimeoutException.class)
+    public void testReadWhenATimeoutExceptionIsThrown() throws Throwable {
+        testReadWithExceptionOnInitialCreateTransaction(new TimeoutException("test",
+                new Exception("reason")));
+    }
 
+    @Test(expected = TestException.class)
+    public void testReadWhenAnyOtherExceptionIsThrown() throws Throwable {
+        testReadWithExceptionOnInitialCreateTransaction(new TestException());
     }
 
     @Test
-    public void testReadWhenAAnyOtherExceptionIsThrown() throws Exception {
-        final ActorContext actorContext = mock(ActorContext.class);
-
-        when(actorContext.executeShardOperation(anyString(), any(), any(
-            FiniteDuration.class))).thenThrow(new NullPointerException());
-
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+    public void testExists() throws Exception {
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        try {
-            ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
-                transactionProxy.read(TestModel.TEST_PATH);
-            fail("A null pointer exception was expected");
-        } catch(NullPointerException e){
+        doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
 
-        }
-    }
+        Boolean exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
 
+        assertEquals("Exists response", false, exists);
 
+        doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
 
-    @Test
-    public void testWrite() throws Exception {
-        final Props props = Props.create(MessageCollectorActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
+        exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+        assertEquals("Exists response", true, exists);
+    }
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+    @Test(expected = PrimaryNotFoundException.class)
+    public void testExistsWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+        testExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"), new Invoker() {
+            @Override
+            public void invoke(TransactionProxy proxy) throws Exception {
+                proxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+            }
+        });
+    }
 
-        transactionProxy.write(TestModel.TEST_PATH,
-            ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+    @Test(expected = ReadFailedException.class)
+    public void testExistsWhenAnInvalidMessageIsSentInReply() throws Exception {
+        setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
-        Object messages = testContext
-            .executeLocalOperation(actorRef, "messages",
-                ActorContext.ASK_DURATION);
+        doReturn(Futures.successful(new Object())).when(mockActorContext).
+                executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
 
-        Assert.assertNotNull(messages);
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-        Assert.assertTrue(messages instanceof List);
+        transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+    }
 
-        List<Object> listMessages = (List<Object>) messages;
+    @Test(expected = TestException.class)
+    public void testExistsWithAsyncRemoteOperatonFailure() throws Throwable {
+        setupActorContextWithInitialCreateTransaction(READ_ONLY);
 
-        Assert.assertEquals(1, listMessages.size());
+        doThrow(new TestException()).when(mockActorContext).
+                executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
 
-        Assert.assertEquals(WriteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
-    }
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_ONLY, schemaContext);
 
-    private Object createPrimaryFound(ActorRef actorRef) {
-        return new PrimaryFound(actorRef.path().toString()).toSerializable();
+        try {
+            transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+            fail("Expected ReadFailedException");
+        } catch(ReadFailedException e) {
+            // Expected - throw cause - expects TestException.
+            throw e.getCause();
+        }
     }
 
     @Test
-    public void testMerge() throws Exception {
-        final Props props = Props.create(MessageCollectorActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
+    public void testWrite() throws Exception {
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                WRITE_ONLY, schemaContext);
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
 
-        transactionProxy.merge(TestModel.TEST_PATH,
-            ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+        transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
 
-        Object messages = testContext
-            .executeLocalOperation(actorRef, "messages",
-                ActorContext.ASK_DURATION);
+        verify(mockActorContext).sendRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
+    }
 
-        Assert.assertNotNull(messages);
+    @Test
+    public void testMerge() throws Exception {
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
 
-        Assert.assertTrue(messages instanceof List);
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                WRITE_ONLY, schemaContext);
 
-        List<Object> listMessages = (List<Object>) messages;
+        NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
 
-        Assert.assertEquals(1, listMessages.size());
+        transactionProxy.merge(TestModel.TEST_PATH, nodeToWrite);
 
-        Assert.assertEquals(MergeData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+        verify(mockActorContext).sendRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
     }
 
     @Test
     public void testDelete() throws Exception {
-        final Props props = Props.create(MessageCollectorActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
-
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                WRITE_ONLY, schemaContext);
 
         transactionProxy.delete(TestModel.TEST_PATH);
 
-        Object messages = testContext
-            .executeLocalOperation(actorRef, "messages",
-                ActorContext.ASK_DURATION);
-
-        Assert.assertNotNull(messages);
-
-        Assert.assertTrue(messages instanceof List);
-
-        List<Object> listMessages = (List<Object>) messages;
-
-        Assert.assertEquals(1, listMessages.size());
-
-        Assert.assertEquals(DeleteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+        verify(mockActorContext).sendRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqDeleteData());
     }
 
+    @SuppressWarnings("unchecked")
     @Test
     public void testReady() throws Exception {
-        final Props props = Props.create(DoNothingActor.class);
-        final ActorRef doNothingActorRef = getSystem().actorOf(props);
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(doNothingActorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(doNothingActorRef));
-        actorContext.setExecuteRemoteOperationResponse(new ReadyTransactionReply(doNothingActorRef.path()).toSerializable());
+        doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqReadData(), anyDuration());
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperation(
+                eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS), anyDuration());
 
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_WRITE, schemaContext);
 
         transactionProxy.read(TestModel.TEST_PATH);
 
         DOMStoreThreePhaseCommitCohort ready = transactionProxy.ready();
 
-        Assert.assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
+        assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
 
         ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
 
-        Assert.assertTrue("No cohort paths returned", proxy.getCohortPaths().size() > 0);
-
+        assertEquals("getCohortPaths", Arrays.asList(actorRef.path()), proxy.getCohortPaths());
     }
 
     @Test
-    public void testGetIdentifier(){
-        final Props props = Props.create(DoNothingActor.class);
-        final ActorRef doNothingActorRef = getSystem().actorOf(props);
-
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteShardOperationResponse( createTransactionReply(doNothingActorRef) );
-
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
-
-        Assert.assertNotNull(transactionProxy.getIdentifier());
+    public void testGetIdentifier() {
+        setupActorContextWithInitialCreateTransaction(READ_ONLY);
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                TransactionProxy.TransactionType.READ_ONLY, schemaContext);
+
+        Object id = transactionProxy.getIdentifier();
+        assertNotNull("getIdentifier returned null", id);
+        assertTrue("Invalid identifier: " + id, id.toString().startsWith(memberName));
     }
 
+    @SuppressWarnings("unchecked")
     @Test
-    public void testClose(){
-        final Props props = Props.create(MessageCollectorActor.class);
-        final ActorRef actorRef = getSystem().actorOf(props);
+    public void testClose() throws Exception{
+        ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
 
-        final MockActorContext actorContext = new MockActorContext(this.getSystem());
-        actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
-        actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
-        actorContext.setExecuteRemoteOperationResponse("message");
+        doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+                eq(actorSelection(actorRef)), eqReadData(), anyDuration());
 
-        TransactionProxy transactionProxy =
-            new TransactionProxy(actorContext,
-                TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+        TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+                READ_WRITE, schemaContext);
 
         transactionProxy.read(TestModel.TEST_PATH);
 
         transactionProxy.close();
 
-        Object messages = testContext
-            .executeLocalOperation(actorRef, "messages",
-                ActorContext.ASK_DURATION);
-
-        Assert.assertNotNull(messages);
-
-        Assert.assertTrue(messages instanceof List);
-
-        List<Object> listMessages = (List<Object>) messages;
-
-        Assert.assertEquals(1, listMessages.size());
-
-        Assert.assertTrue(listMessages.get(0).getClass().equals(CloseTransaction.SERIALIZABLE_CLASS));
-    }
-
-    private CreateTransactionReply createTransactionReply(ActorRef actorRef){
-        return CreateTransactionReply.newBuilder()
-            .setTransactionActorPath(actorRef.path().toString())
-            .setTransactionId("txn-1")
-            .build();
+        verify(mockActorContext).sendRemoteOperationAsync(
+                eq(actorSelection(actorRef)), isA(CloseTransaction.SERIALIZABLE_CLASS));
     }
 }
index 5874eccda40f4f2d08f6b829757206213a743696..fda9ccdfdbbd44dda363c92b842d246e389466bb 100644 (file)
@@ -1,11 +1,14 @@
 package org.opendaylight.controller.cluster.datastore.utils;
 
+import java.util.concurrent.TimeUnit;
 import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
 import akka.actor.ActorSystem;
 import akka.actor.Props;
 import akka.actor.UntypedActor;
 import akka.japi.Creator;
 import akka.testkit.JavaTestKit;
+
 import org.junit.Test;
 import org.opendaylight.controller.cluster.datastore.AbstractActorTest;
 import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
@@ -14,6 +17,9 @@ import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
 import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
 import org.opendaylight.controller.cluster.datastore.messages.LocalShardNotFound;
 
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.mockito.Mockito.mock;
@@ -74,14 +80,23 @@ public class ActorContextTest extends AbstractActorTest{
         }
 
         private static Props props(final boolean found, final ActorRef actorRef){
-            return Props.create(new Creator<MockShardManager>() {
+            return Props.create(new MockShardManagerCreator(found, actorRef) );
+        }
 
-                @Override public MockShardManager create()
-                    throws Exception {
-                    return new MockShardManager(found,
-                        actorRef);
-                }
-            });
+        @SuppressWarnings("serial")
+        private static class MockShardManagerCreator implements Creator<MockShardManager> {
+            final boolean found;
+            final ActorRef actorRef;
+
+            MockShardManagerCreator(boolean found, ActorRef actorRef) {
+                this.found = found;
+                this.actorRef = actorRef;
+            }
+
+            @Override
+            public MockShardManager create() throws Exception {
+                return new MockShardManager(found, actorRef);
+            }
         }
     }
 
@@ -90,6 +105,7 @@ public class ActorContextTest extends AbstractActorTest{
         new JavaTestKit(getSystem()) {{
 
             new Within(duration("1 seconds")) {
+                @Override
                 protected void run() {
 
                     ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
@@ -118,6 +134,7 @@ public class ActorContextTest extends AbstractActorTest{
         new JavaTestKit(getSystem()) {{
 
             new Within(duration("1 seconds")) {
+                @Override
                 protected void run() {
 
                     ActorRef shardManagerActorRef = getSystem()
@@ -145,6 +162,7 @@ public class ActorContextTest extends AbstractActorTest{
         new JavaTestKit(getSystem()) {{
 
             new Within(duration("1 seconds")) {
+                @Override
                 protected void run() {
 
                     ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
@@ -173,6 +191,7 @@ public class ActorContextTest extends AbstractActorTest{
         new JavaTestKit(getSystem()) {{
 
             new Within(duration("1 seconds")) {
+                @Override
                 protected void run() {
 
                     ActorRef shardManagerActorRef = getSystem()
@@ -193,4 +212,68 @@ public class ActorContextTest extends AbstractActorTest{
         }};
 
     }
+
+    @Test
+    public void testExecuteRemoteOperation() {
+        new JavaTestKit(getSystem()) {{
+
+            new Within(duration("3 seconds")) {
+                @Override
+                protected void run() {
+
+                    ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+                    ActorRef shardManagerActorRef = getSystem()
+                        .actorOf(MockShardManager.props(true, shardActorRef));
+
+                    ActorContext actorContext =
+                        new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+                            mock(Configuration.class));
+
+                    ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+                    Object out = actorContext.executeRemoteOperation(actor, "hello", duration("3 seconds"));
+
+                    assertEquals("hello", out);
+
+                    expectNoMsg();
+                }
+            };
+        }};
+    }
+
+    @Test
+    public void testExecuteRemoteOperationAsync() {
+        new JavaTestKit(getSystem()) {{
+
+            new Within(duration("3 seconds")) {
+                @Override
+                protected void run() {
+
+                    ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+                    ActorRef shardManagerActorRef = getSystem()
+                        .actorOf(MockShardManager.props(true, shardActorRef));
+
+                    ActorContext actorContext =
+                        new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+                            mock(Configuration.class));
+
+                    ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+                    Future<Object> future = actorContext.executeRemoteOperationAsync(actor, "hello",
+                            Duration.create(3, TimeUnit.SECONDS));
+
+                    try {
+                        Object result = Await.result(future, Duration.create(3, TimeUnit.SECONDS));
+                        assertEquals("Result", "hello", result);
+                    } catch(Exception e) {
+                        throw new AssertionError(e);
+                    }
+
+                    expectNoMsg();
+                }
+            };
+        }};
+    }
 }
index 5d3853f311880d791fc177c9a1082f026fb1c5de..b19fd3a5290b2c8df92917403d7a501bcf79bfcf 100644 (file)
@@ -8,7 +8,7 @@
 
 package org.opendaylight.controller.cluster.datastore.utils;
 
-
+import static org.junit.Assert.assertNotNull;
 import akka.actor.ActorRef;
 import akka.actor.ActorSelection;
 import akka.actor.ActorSystem;
@@ -16,10 +16,12 @@ import scala.concurrent.duration.FiniteDuration;
 
 public class MockActorContext extends ActorContext {
 
-    private Object executeShardOperationResponse;
-    private Object executeRemoteOperationResponse;
-    private Object executeLocalOperationResponse;
-    private Object executeLocalShardOperationResponse;
+    private volatile Object executeShardOperationResponse;
+    private volatile Object executeRemoteOperationResponse;
+    private volatile Object executeLocalOperationResponse;
+    private volatile Object executeLocalShardOperationResponse;
+    private volatile Exception executeRemoteOperationFailure;
+    private volatile Object inputMessage;
 
     public MockActorContext(ActorSystem actorSystem) {
         super(actorSystem, null, new MockClusterWrapper(), new MockConfiguration());
@@ -52,6 +54,10 @@ public class MockActorContext extends ActorContext {
         executeRemoteOperationResponse = response;
     }
 
+    public void setExecuteRemoteOperationFailure(Exception executeRemoteOperationFailure) {
+        this.executeRemoteOperationFailure = executeRemoteOperationFailure;
+    }
+
     public void setExecuteLocalOperationResponse(
         Object executeLocalOperationResponse) {
         this.executeLocalOperationResponse = executeLocalOperationResponse;
@@ -62,12 +68,20 @@ public class MockActorContext extends ActorContext {
         this.executeLocalShardOperationResponse = executeLocalShardOperationResponse;
     }
 
-    @Override public Object executeLocalOperation(ActorRef actor,
+    @SuppressWarnings("unchecked")
+    public <T> T getInputMessage(Class<T> expType) throws Exception {
+        assertNotNull("Input message was null", inputMessage);
+        return (T) expType.getMethod("fromSerializable", Object.class).invoke(null, inputMessage);
+    }
+
+    @Override
+    public Object executeLocalOperation(ActorRef actor,
         Object message, FiniteDuration duration) {
         return this.executeLocalOperationResponse;
     }
 
-    @Override public Object executeLocalShardOperation(String shardName,
+    @Override
+    public Object executeLocalShardOperation(String shardName,
         Object message, FiniteDuration duration) {
         return this.executeLocalShardOperationResponse;
     }
index 38ec5f5ac2afda90ac5e8ea9eb454574dd4de14d..4e007f4c5d8e4447c1ea683312f40302bc91f2d9 100644 (file)
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>sal-core-api</artifactId>
     </dependency>
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>netconf-util</artifactId>
-    </dependency>
       <dependency>
-          <groupId>org.opendaylight.controller</groupId>
-          <artifactId>sal-core-spi</artifactId>
+        <groupId>org.opendaylight.controller</groupId>
+        <artifactId>sal-core-spi</artifactId>
+      </dependency>
+      <dependency>
+        <groupId>org.opendaylight.controller</groupId>
+        <artifactId>sal-common-impl</artifactId>
       </dependency>
       <dependency>
-          <groupId>org.opendaylight.controller</groupId>
-          <artifactId>sal-common-impl</artifactId>
+        <groupId>org.opendaylight.controller</groupId>
+        <artifactId>netconf-util</artifactId>
       </dependency>
+      <dependency>
+        <groupId>org.opendaylight.controller</groupId>
+        <artifactId>sal-clustering-commons</artifactId>
+      </dependency>
+
     <!-- Yang tools-->
 
     <dependency>
       <artifactId>scala-library</artifactId>
     </dependency>
 
+    <dependency>
+      <groupId>com.codahale.metrics</groupId>
+      <artifactId>metrics-core</artifactId>
+      <version>3.0.1</version>
+    </dependency>
     <!-- Test Dependencies -->
     <dependency>
       <groupId>junit</groupId>
             <Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
             <Export-package></Export-package>
             <Private-Package></Private-Package>
-            <Import-Package>!org.jboss.*;!com.jcraft.*;*</Import-Package>
+            <Import-Package>!org.iq80.*;!*snappy;!org.jboss.*;!com.jcraft.*;!org.fusesource.*;*</Import-Package>
             <Embed-Dependency>
+                sal-clustering-commons;
+                sal-akka-raft;
+                *metrics*;
                 !sal*;
                 !*config-api*;
                 !*testkit*;
index 02e2d1201518b22001da9882e41ee881624a16bc..4496bd3263f9f80c6d1594e2e51de474ecb22a46 100644 (file)
@@ -7,7 +7,7 @@ import org.opendaylight.controller.remote.rpc.messages.ErrorResponse;
 import org.opendaylight.controller.remote.rpc.messages.InvokeRpc;
 import org.opendaylight.controller.remote.rpc.messages.RpcResponse;
 import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
 import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
 import org.opendaylight.controller.sal.core.api.RpcImplementation;
 import org.opendaylight.yangtools.yang.common.QName;
index 611618f1f69b106d85a0959188646252fc89de60..4ec96c29cd2510175defa6327a235ffecb519b28 100644 (file)
@@ -20,7 +20,7 @@ import org.opendaylight.controller.remote.rpc.utils.LatestEntryRoutingLogic;
 import org.opendaylight.controller.remote.rpc.registry.RpcRegistry;
 import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
 import org.opendaylight.controller.remote.rpc.utils.RoutingLogic;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
 import org.opendaylight.controller.sal.connector.api.RpcRouter;
 import org.opendaylight.controller.sal.core.api.Broker;
 import org.opendaylight.yangtools.yang.common.RpcResult;
diff --git a/pom.xml b/pom.xml
index e4c51b7839cff2f9af808d3fc78866bd0f599d45..0bfc64f8929c8c90d66edf0a1bf87fc7a3d2d03e 100644 (file)
--- a/pom.xml
+++ b/pom.xml
     <!-- Karaf Distribution -->
     <module>opendaylight/dummy-console</module>
     <module>opendaylight/karaf-branding</module>
+    <module>opendaylight/distribution/opendaylight-karaf-empty</module>
     <module>opendaylight/distribution/opendaylight-karaf</module>
+    <module>opendaylight/distribution/opendaylight-karaf-resources</module>
     <module>features</module>
   </modules>
   <scm>