Merge "Revert "Revert "BUG-1425: Integrated new Binding to Normalized Node codec...
authorTony Tkacik <ttkacik@cisco.com>
Thu, 21 Aug 2014 09:58:22 +0000 (09:58 +0000)
committerGerrit Code Review <gerrit@opendaylight.org>
Thu, 21 Aug 2014 09:58:22 +0000 (09:58 +0000)
149 files changed:
.gitignore
features/config-netty/pom.xml
features/config-persister/pom.xml
features/config/pom.xml
features/config/src/main/resources/features.xml
features/flow/pom.xml
features/mdsal/pom.xml
features/protocol-framework/pom.xml
opendaylight/commons/opendaylight/pom.xml
opendaylight/distribution/opendaylight-karaf-empty/pom.xml [new file with mode: 0644]
opendaylight/distribution/opendaylight-karaf-resources/pom.xml [new file with mode: 0644]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/instance [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/instance with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/instance.bat [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/instance.bat with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/karaf [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/karaf with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin/karaf.bat [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/karaf/karaf.bat with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/context.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/context.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/logback.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/logback.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/tomcat-logging.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/tomcat-logging.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/configuration/tomcat-server.xml [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/configuration/tomcat-server.xml with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/etc/custom.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/custom.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/version.properties [moved from opendaylight/distribution/opendaylight-karaf/src/main/resources/version.properties with 100% similarity]
opendaylight/distribution/opendaylight-karaf/pom.xml
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties [deleted file]
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties [deleted file]
opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties [deleted file]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/AbstractChangeListener.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/FRMActivator.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/FlowCookieProducer.java [new file with mode: 0644]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/flow/FlowChangeListener.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/flow/FlowProvider.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/flow/FlowTransactionValidator.java [deleted file]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/group/GroupChangeListener.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/group/GroupProvider.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/group/GroupTransactionValidator.java [deleted file]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/meter/MeterChangeListener.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/meter/MeterProvider.java
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/meter/MeterTransactionValidator.java [deleted file]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilListener.java [new file with mode: 0644]
opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilProvider.java [new file with mode: 0644]
opendaylight/md-sal/model/model-flow-service/src/main/yang/flow-node-inventory.yang
opendaylight/md-sal/sal-binding-api/src/main/java/org/opendaylight/controller/sal/binding/api/AbstractBrokerAwareActivator.java
opendaylight/md-sal/sal-clustering-commons/pom.xml
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/InstanceIdentifierForXmlCodec.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/InstanceIdentifierForXmlCodec.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/RandomPrefix.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/RandomPrefix.java with 96% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlDocumentUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlDocumentUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlStreamUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlStreamUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/xml/codec/XmlUtils.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/utils/XmlUtils.java with 99% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java [new file with mode: 0644]
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java [moved from opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/utils/XmlUtilsTest.java with 98% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/resources/application.conf
opendaylight/md-sal/sal-clustering-commons/src/test/resources/org/opendaylight/controller/xml/codec/rpcTest.yang [moved from opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/org/opendaylight/controller/remote/rpc/utils/rpcTest.yang with 100% similarity]
opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf [new file with mode: 0644]
opendaylight/md-sal/sal-distributed-datastore/pom.xml
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractUntypedActor.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DistributedDataStore.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreFactory.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/Shard.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardManager.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardReadWriteTransaction.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ShardWriteTransaction.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohort.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionChainProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/TransactionProxy.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/messages/DeleteData.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/utils/ActorContext.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedConfigDataStoreProviderModule.java
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedOperationalDataStoreProviderModule.java
opendaylight/md-sal/sal-distributed-datastore/src/main/yang/distributed-datastore-provider.yang
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/BasicIntegrationTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreIntegrationTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/DistributedDataStoreTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardManagerTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionFailureTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ShardTransactionTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortFailureTest.java [new file with mode: 0644]
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/ThreePhaseCommitCohortProxyTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/TransactionProxyTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/utils/ActorContextTest.java
opendaylight/md-sal/sal-distributed-datastore/src/test/java/org/opendaylight/controller/cluster/datastore/utils/MockActorContext.java
opendaylight/md-sal/sal-dom-broker/src/main/java/org/opendaylight/controller/config/yang/md/sal/dom/impl/DomInmemoryDataBrokerModule.java
opendaylight/md-sal/sal-dom-broker/src/main/yang/opendaylight-dom-broker-impl.yang
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryConfigDataStoreProviderModule.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/config/yang/inmemory_datastore_provider/InMemoryOperationalDataStoreProviderModule.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ChangeListenerNotifyTask.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/DOMImmutableDataChangeEvent.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStore.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStoreConfigProperties.java [new file with mode: 0644]
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/InMemoryDOMDataStoreFactory.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeEventsTask.java
opendaylight/md-sal/sal-inmemory-datastore/src/main/java/org/opendaylight/controller/md/sal/dom/store/impl/ResolveDataChangeState.java [new file with mode: 0644]
opendaylight/md-sal/sal-inmemory-datastore/src/main/yang/opendaylight-inmemory-datastore-provider.yang
opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/AbstractDataChangeListenerTest.java
opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/DefaultDataChangeListenerTestSuite.java
opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeBaseTest.java
opendaylight/md-sal/sal-inmemory-datastore/src/test/java/org/opendaylight/controller/md/sal/dom/store/impl/WildcardedScopeOneTest.java
opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemas.java
opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/listener/NetconfSessionCapabilities.java
opendaylight/md-sal/sal-netconf-connector/src/test/java/org/opendaylight/controller/sal/connect/netconf/NetconfStateSchemasTest.java
opendaylight/md-sal/sal-remoterpc-connector/pom.xml
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RemoteRpcImplementation.java
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/RpcBroker.java
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistry.java
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/remote/rpc/registry/gossip/BucketStore.java
opendaylight/md-sal/sal-remoterpc-connector/src/main/java/org/opendaylight/controller/utils/ConditionalProbe.java [new file with mode: 0644]
opendaylight/md-sal/sal-remoterpc-connector/src/test/java/org/opendaylight/controller/remote/rpc/registry/RpcRegistryTest.java
opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/application.conf
opendaylight/md-sal/sal-remoterpc-connector/src/test/resources/logback.xml [new file with mode: 0644]
opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/BrokerFacade.java
opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfDocumentedException.java
opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfError.java
opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java
opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/InvokeRpcMethodTest.java
opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/RestPutOperationTest.java
opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/RestconfDocumentedExceptionMapperTest.java
opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/RestconfErrorTest.java
opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/BaseYangSwaggerGenerator.java
opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/ModelGenerator.java
opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/model/builder/OperationBuilder.java
opendaylight/md-sal/sal-rest-docgen/src/test/java/org/opendaylight/controller/sal/rest/doc/impl/ApiDocGeneratorTest.java
opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster.yang
opendaylight/md-sal/sal-rest-docgen/src/test/resources/yang/toaster_short.yang
opendaylight/md-sal/samples/toaster-provider/src/main/java/org/opendaylight/controller/config/yang/config/toaster_provider/impl/ToasterProviderModule.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/FlowComparator.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/FlowStatsTracker.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/NodeConnectorStatsTracker.java
opendaylight/netconf/netconf-client/src/main/java/org/opendaylight/controller/netconf/client/NetconfClientSession.java
opendaylight/netconf/netconf-client/src/main/java/org/opendaylight/controller/netconf/client/NetconfClientSessionNegotiator.java
opendaylight/netconf/netconf-it/src/test/java/org/opendaylight/controller/netconf/it/NetconfITSecureTest.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/SshClientAdapter.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/SshSession.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/authentication/AuthProvider.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/authentication/AuthProviderImpl.java [new file with mode: 0644]
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/osgi/NetconfSSHActivator.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/threads/Handshaker.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/SSHTest.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/ssh/authentication/SSHServerTest.java
opendaylight/netconf/netconf-testtool/pom.xml [new file with mode: 0644]
opendaylight/netconf/netconf-testtool/src/main/java/org/opendaylight/controller/netconf/test/tool/AcceptingAuthProvider.java [new file with mode: 0644]
opendaylight/netconf/netconf-testtool/src/main/java/org/opendaylight/controller/netconf/test/tool/Main.java [new file with mode: 0644]
opendaylight/netconf/netconf-testtool/src/main/java/org/opendaylight/controller/netconf/test/tool/ModuleBuilderCapability.java [new file with mode: 0644]
opendaylight/netconf/netconf-testtool/src/main/java/org/opendaylight/controller/netconf/test/tool/NetconfDeviceSimulator.java [new file with mode: 0644]
opendaylight/netconf/netconf-testtool/src/main/java/org/opendaylight/controller/netconf/test/tool/SimulatedGet.java [new file with mode: 0644]
opendaylight/netconf/netconf-util/src/main/java/org/opendaylight/controller/netconf/util/messages/SendErrorExceptionUtil.java
opendaylight/netconf/pom.xml
pom.xml

index 9144cda..b304ffc 100644 (file)
@@ -28,4 +28,6 @@ maven-eclipse.xml
 .DS_STORE
 .metadata
 opendaylight/md-sal/sal-distributed-datastore/journal
+!opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin
+
 
index 2f4b4b1..bf03697 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-config-netty</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
@@ -21,7 +21,6 @@
       <artifactId>features-config-persister</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>config-netty-config</artifactId>
+      <!--
+        note, the reason the type and classifier
+        are here instead of in opendaylight/commons/opendaylight/pom.xml
+        is because they are used as jars in distribution.
+      -->
+      <version>${config.version}</version>
+      <type>xml</type>
+      <classifier>config</classifier>
+    </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
     </dependency>
   </dependencies>
 
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 6dc8941..3346c75 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-config-persister</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <version>${yangtools.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-netconf</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.eclipse.persistence</groupId>
       <artifactId>org.eclipse.persistence.moxy</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index c69e11b..20feceb 100644 (file)
@@ -9,20 +9,26 @@
   </parent>
   <artifactId>features-config</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
   </properties>
 
   <dependencies>
+    <!-- dependency for opendaylight-karaf-empty for use by testing -->
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>opendaylight-karaf-empty</artifactId>
+      <version>1.4.2-SNAPSHOT</version>
+      <type>zip</type>
+    </dependency>
     <dependency>
       <groupId>org.opendaylight.yangtools</groupId>
       <artifactId>features-yangtools</artifactId>
       <version>${yangtools.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>config-manager</artifactId>
     </dependency>
+    <!-- test the features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 6c0d324..5027588 100644 (file)
@@ -6,7 +6,7 @@
   <repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
 
   <feature name='odl-config-all' version='${project.version}'>
-    <feature version='${project.version}'>odl-mdsal-common</feature>
+      <feature version='${mdsal.version}'>odl-mdsal-common</feature>
       <feature version='${project.version}'>odl-config-api</feature>
       <feature version='${project.version}'>odl-config-netty-config-api</feature>
       <feature version='${project.version}'>odl-config-core</feature>
index 09bb6c9..ac18973 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-flow</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
@@ -22,7 +22,6 @@
       <version>${mdsal.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller.model</groupId>
       <groupId>org.opendaylight.controller.md</groupId>
       <artifactId>forwardingrules-manager</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 4f1ba98..ac6b82b 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-mdsal</artifactId>
 
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <artifactId>features-yangtools</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config-persister</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-config-netty</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
@@ -87,6 +83,9 @@
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>md-sal-config</artifactId>
+      <version>${mdsal.version}</version>
+      <type>xml</type>
+      <classifier>config</classifier>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>netconf-connector-config</artifactId>
+      <version>${netconf.version}</version>
+      <type>xml</type>
+      <classifier>config</classifier>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>sal-rest-connector-config</artifactId>
+      <version>${mdsal.version}</version>
+      <type>xml</type>
+      <classifier>config</classifier>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller.samples</groupId>
     <dependency>
       <groupId>org.opendaylight.controller.samples</groupId>
       <artifactId>toaster-config</artifactId>
+      <version>${mdsal.version}</version>
+      <type>xml</type>
+      <classifier>config</classifier>
+    </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+      <version>0.6.2-SNAPSHOT</version>
     </dependency>
   </dependencies>
 
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 97836be..dcd24d6 100644 (file)
@@ -9,7 +9,7 @@
   </parent>
   <artifactId>features-protocol-framework</artifactId>
   <version>${protocol-framework.version}</version>
-  <packaging>pom</packaging>
+  <packaging>jar</packaging>
 
   <properties>
     <features.file>features.xml</features.file>
       <artifactId>features-config</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>protocol-framework</artifactId>
     </dependency>
+    <!-- test to validate features.xml -->
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>features-test</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+            <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+            <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+          </systemPropertyVariables>
+          <dependenciesToScan>
+           <dependency>org.opendaylight.yangtools:features-test</dependency>
+          </dependenciesToScan>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <scm>
index c74e7ae..1064afd 100644 (file)
         <artifactId>toaster-config</artifactId>
         <version>${mdsal.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.opendaylight.yangtools</groupId>
+        <artifactId>features-test</artifactId>
+        <version>${yangtools.version}</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.opendaylight.yangtools</groupId>
         <artifactId>features-yangtools</artifactId>
diff --git a/opendaylight/distribution/opendaylight-karaf-empty/pom.xml b/opendaylight/distribution/opendaylight-karaf-empty/pom.xml
new file mode 100644 (file)
index 0000000..d3dfe19
--- /dev/null
@@ -0,0 +1,235 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.opendaylight.controller</groupId>
+    <artifactId>commons.opendaylight</artifactId>
+    <version>1.4.2-SNAPSHOT</version>
+    <relativePath>../../commons/opendaylight</relativePath>
+  </parent>
+  <artifactId>opendaylight-karaf-empty</artifactId>
+  <packaging>pom</packaging>
+  <prerequisites>
+    <maven>3.0</maven>
+  </prerequisites>
+
+  <dependencies>
+    <dependency>
+      <!-- scope is compile so all features (there is only one) are installed
+            into startup.properties and the feature repo itself is not installed -->
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>framework</artifactId>
+      <version>${karaf.version}</version>
+      <type>kar</type>
+    </dependency>
+    <!-- scope is runtime so the feature repo is listed in the features
+      service config file, and features may be installed using the
+      karaf-maven-plugin configuration -->
+    <dependency>
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>standard</artifactId>
+      <version>${karaf.version}</version>
+      <classifier>features</classifier>
+      <type>xml</type>
+      <scope>runtime</scope>
+    </dependency>
+
+    <!-- ODL Branding -->
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>karaf.branding</artifactId>
+      <scope>compile</scope>
+    </dependency>
+
+    <!-- Resources needed -->
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>opendaylight-karaf-resources</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.felix</groupId>
+                    <artifactId>maven-bundle-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>cleanVersions</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>copy</goal>
+                      <goal>unpack</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.karaf.tooling</groupId>
+                    <artifactId>karaf-maven-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>commands-generate-help</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.fusesource.scalate</groupId>
+                    <artifactId>maven-scalate-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>sitegen</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.servicemix.tooling</groupId>
+                    <artifactId>depends-maven-plugin</artifactId>
+                    <versionRange>[0,)</versionRange>
+                    <goals>
+                      <goal>generate-depends-file</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore></ignore>
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.karaf.tooling</groupId>
+        <artifactId>karaf-maven-plugin</artifactId>
+        <version>${karaf.version}</version>
+        <extensions>true</extensions>
+        <executions>
+          <execution>
+            <id>process-resources</id>
+            <goals>
+              <goal>install-kars</goal>
+            </goals>
+            <phase>process-resources</phase>
+          </execution>
+          <execution>
+            <id>package</id>
+            <goals>
+              <goal>instance-create-archive</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <version>${checkstyle.version}</version>
+        <configuration>
+          <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/configuration\/initial\/</excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.6</version>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <!-- here the phase you need -->
+            <phase>generate-resources</phase>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.opendaylight.controller</groupId>
+                  <artifactId>karaf.branding</artifactId>
+                  <version>${karaf.branding.version}</version>
+                  <outputDirectory>target/assembly/lib</outputDirectory>
+                  <destFileName>karaf.branding-${branding.version}.jar</destFileName>
+                </artifactItem>
+              </artifactItems>
+            </configuration>
+          </execution>
+          <execution>
+            <id>unpack-karaf-resources</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>prepare-package</phase>
+            <configuration>
+             <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+             <groupId>org.opendaylight.controller</groupId>
+             <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+             <excludes>META-INF\/**</excludes>
+             <excludeTransitive>true</excludeTransitive>
+             <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+            <execution>
+                <phase>prepare-package</phase>
+                <goals>
+                    <goal>run</goal>
+                </goals>
+                <configuration>
+                  <tasks>
+                    <chmod perm="755">
+                        <fileset dir="${project.build.directory}/assembly/bin">
+                          <include name="karaf"/>
+                          <include name="instance"/>
+                        </fileset>
+                    </chmod>
+                  </tasks>
+                </configuration>
+            </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <scm>
+    <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+    <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+    <tag>HEAD</tag>
+    <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main</url>
+  </scm>
+</project>
diff --git a/opendaylight/distribution/opendaylight-karaf-resources/pom.xml b/opendaylight/distribution/opendaylight-karaf-resources/pom.xml
new file mode 100644 (file)
index 0000000..00495a3
--- /dev/null
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+
+ This program and the accompanying materials are made available under the
+ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ and is available at http://www.eclipse.org/legal/epl-v10.html
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.opendaylight.controller</groupId>
+    <artifactId>commons.opendaylight</artifactId>
+    <version>1.4.2-SNAPSHOT</version>
+    <relativePath>../../commons/opendaylight</relativePath>
+  </parent>
+  <artifactId>opendaylight-karaf-resources</artifactId>
+  <description>Resources for opendaylight-karaf</description>
+  <packaging>jar</packaging>
+</project>
index b3c3f20..5cbe412 100644 (file)
       <version>${karaf.version}</version>
       <type>kar</type>
     </dependency>
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>base-features</artifactId>
-      <version>${project.version}</version>
-      <type>kar</type>
-    </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>karaf.branding</artifactId>
       <scope>compile</scope>
     </dependency>
-    <!-- scope is runtime so the feature repo is listed in the features
-      service config file, and features may be installed using the
-      karaf-maven-plugin configuration -->
-    <dependency>
-      <groupId>org.apache.karaf.features</groupId>
-      <artifactId>standard</artifactId>
-      <version>${karaf.version}</version>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>base-features</artifactId>
-      <version>${project.parent.version}</version>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
-    </dependency>
-    <!-- scope is compile so all features (there is only one) are installed
-            into startup.properties and the feature repo itself is not installed -->
+
+    <!-- Resources needed -->
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
-      <artifactId>extras-features</artifactId>
+      <artifactId>opendaylight-karaf-resources</artifactId>
       <version>${project.version}</version>
-      <type>kar</type>
-      <scope>runtime</scope>
-    </dependency>
-    <!-- AD-SAL Related Features -->
-    <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>features-adsal</artifactId>
-      <classifier>features</classifier>
-      <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
+
+    <!-- scope is not runtime so the feature repo is pulled into the local
+    repo on build and thus you actually run.  Failure to do so can lead
+    to very confusing errors for devs -->
     <dependency>
-      <groupId>org.opendaylight.controller</groupId>
-      <artifactId>features-nsf</artifactId>
-      <version>${project.version}</version>
+      <groupId>org.apache.karaf.features</groupId>
+      <artifactId>standard</artifactId>
+      <version>${karaf.version}</version>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
 
+    <!--
+          controller provided features:
+          Note: Nothing should go here that is not locked
+          down with testing... ie, no broken feature repos
+    -->
+
     <!-- MD-SAL Related Features -->
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-mdsal</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
     <dependency>
       <groupId>org.opendaylight.controller</groupId>
       <artifactId>features-flow</artifactId>
       <classifier>features</classifier>
       <type>xml</type>
-      <scope>runtime</scope>
     </dependency>
   </dependencies>
 
               </artifactItems>
             </configuration>
           </execution>
+          <execution>
+            <id>unpack-karaf-resources</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>prepare-package</phase>
+            <configuration>
+             <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+             <groupId>org.opendaylight.controller</groupId>
+             <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+             <excludes>META-INF\/**</excludes>
+             <excludeTransitive>true</excludeTransitive>
+             <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+            <execution>
+                <phase>prepare-package</phase>
+                <goals>
+                    <goal>run</goal>
+                </goals>
+                <configuration>
+                  <tasks>
+                    <chmod perm="755">
+                        <fileset dir="${project.build.directory}/assembly/bin">
+                          <include name="karaf"/>
+                          <include name="instance"/>
+                        </fileset>
+                    </chmod>
+                  </tasks>
+                </configuration>
+            </execution>
         </executions>
       </plugin>
-        <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-                <execution>
-                    <phase>prepare-package</phase>
-                    <goals>
-                        <goal>run</goal>
-                    </goals>
-                    <configuration>
-                        <tasks>
-                            <copy todir="${project.build.directory}/assembly/bin" overwrite="true">
-                                <fileset dir="${basedir}/src/main/resources/karaf/" includes="karaf,karaf.bat,instance,instance.bat"/>
-                            </copy>
-                        </tasks>
-                    </configuration>
-                </execution>
-            </executions>
-        </plugin>
     </plugins>
   </build>
   <scm>
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/jre.properties
deleted file mode 100644 (file)
index a98956e..0000000
+++ /dev/null
@@ -1,506 +0,0 @@
-################################################################################
-#
-#    Licensed to the Apache Software Foundation (ASF) under one or more
-#    contributor license agreements.  See the NOTICE file distributed with
-#    this work for additional information regarding copyright ownership.
-#    The ASF licenses this file to You under the Apache License, Version 2.0
-#    (the "License"); you may not use this file except in compliance with
-#    the License.  You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS,
-#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#    See the License for the specific language governing permissions and
-#    limitations under the License.
-#
-################################################################################
-
-#
-# Java platform package export properties.
-#
-
-# Standard package set.  Note that:
-#   - javax.transaction* is exported with a mandatory attribute
-jre-1.6= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-# Standard package set.  Note that:
-#   - javax.transaction* is exported with a mandatory attribute
-jre-1.7= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-jre-1.8= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/startup.properties
deleted file mode 100644 (file)
index ca8c83c..0000000
+++ /dev/null
@@ -1,53 +0,0 @@
-#Bundles to be started on startup, with startlevel
-
-# feature: framework version: 3.0.1
-mvn\:org.ops4j.base/ops4j-base-lang/1.4.0 = 5
-mvn\:biz.aQute.bnd/bndlib/2.2.0 = 5
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-bnd/1.7.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-maven-commons/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-aether/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-wrap/1.6.0 = 5
-mvn\:javax.annotation/javax.annotation-api/1.2 = 5
-mvn\:org.ops4j.pax.logging/pax-logging-api/1.7.2 = 8
-mvn\:org.ops4j.pax.logging/pax-logging-service/1.7.2 = 8
-mvn\:org.apache.karaf.service/org.apache.karaf.service.guard/3.0.1 = 10
-mvn\:org.apache.felix/org.apache.felix.configadmin/1.6.0 = 10
-mvn\:org.apache.felix/org.apache.felix.fileinstall/3.2.8 = 11
-mvn\:org.ow2.asm/asm-all/4.1 = 12
-mvn\:org.apache.aries/org.apache.aries.util/1.1.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.api/1.0.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.impl/1.0.2 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.api/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.cm/1.0.3 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core.compatibility/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core/1.4.0 = 20
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.spring/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.blueprint/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.wrap/3.0.1 = 24
-mvn\:org.apache.karaf.region/org.apache.karaf.region.core/3.0.1 = 25
-mvn\:org.apache.karaf.features/org.apache.karaf.features.core/3.0.1 = 25
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.features/3.0.1 = 26
-mvn\:jline/jline/2.11 = 30
-mvn\:org.jledit/core/0.2.1 = 30
-mvn\:org.fusesource.jansi/jansi/1.11 = 30
-mvn\:org.ops4j.base/ops4j-base-util-property/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-xml/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-collections/1.4.0 = 30
-mvn\:org.ops4j.pax.url/pax-url-commons/1.6.0 = 30
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-property/1.7.0 = 30
-mvn\:org.ops4j.base/ops4j-base-net/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-monitors/1.4.0 = 30
-mvn\:org.apache.karaf.features/org.apache.karaf.features.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.console/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.modules/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.config/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.boot/3.0.1 = 30
-mvn\:org.apache.sshd/sshd-core/0.9.0 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.table/3.0.1 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.core/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.help/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.core/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.commands/3.0.1 = 30
-mvn\:org.apache.aries.quiesce/org.apache.aries.quiesce.api/1.0.0 = 30
diff --git a/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties b/opendaylight/distribution/opendaylight-karaf/src/main/resources/etc/system.properties
deleted file mode 100644 (file)
index a312d66..0000000
+++ /dev/null
@@ -1,109 +0,0 @@
-#
-# The properties defined in this file will be made available through system
-# properties at the very beginning of the Karaf's boot process.
-#
-
-# Use Equinox as default OSGi Framework Implementation
-karaf.framework=equinox
-
-# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
-# Extend the framework to avoid the resources to be presented with
-# a URL of type bundleresource: but to be presented as file:
-osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
-
-
-# Log level when the pax-logging service is not available
-# This level will only be used while the pax-logging service bundle
-# is not fully available.
-# To change log levels, please refer to the org.ops4j.pax.logging.cfg file
-# instead.
-org.ops4j.pax.logging.DefaultServiceLog.level = ERROR
-
-#
-# Name of this Karaf instance.
-#
-karaf.name = root
-
-#
-# Default repository where bundles will be loaded from before using
-# other Maven repositories.  For the full Maven configuration, see
-# the org.ops4j.pax.url.mvn.cfg file.
-#
-karaf.default.repository = system
-
-#
-# Location of a shell script that will be run when starting a shell
-# session.  This script can be used to create aliases and define
-# additional commands.
-#
-karaf.shell.init.script = ${karaf.etc}/shell.init.script
-
-#
-# Sets the maximum size of the shell command history. If not set,
-# defaults to 500 entries. Setting to 0 will disable history.
-#
-# karaf.shell.history.maxSize = 0
-
-#
-# Deletes the entire karaf.data directory at every start
-#
-karaf.clean.all = false
-
-#
-# Deletes the karaf.data/cache directory at every start
-#
-karaf.clean.cache = false
-
-#
-# Roles to use when logging into a local Karaf console.
-#
-# The syntax is the following:
-#   [classname:]principal
-# where classname is the class name of the principal object
-# (defaults to org.apache.karaf.jaas.modules.RolePrincipal)
-# and principal is the name of the principal of that class
-# (defaults to instance).
-#
-karaf.local.roles = admin,manager,viewer
-
-#
-# Set this empty property to avoid errors when validating xml documents.
-#
-xml.catalog.files =
-
-#
-# Suppress the bell in the console when hitting backspace too many times
-# for example
-#
-jline.nobell = true
-
-#
-# ServiceMix specs options
-#
-org.apache.servicemix.specs.debug = false
-org.apache.servicemix.specs.timeout = 0
-
-#
-# Settings for the OSGi 4.3 Weaving
-# By default, we will not weave any classes. Change this setting to include classes
-# that you application needs to have woven.
-#
-org.apache.aries.proxy.weaving.enabled = none
-# Classes not to weave - Aries default + Xerces which is known to have issues.
-org.apache.aries.proxy.weaving.disabled = org.objectweb.asm.*,org.slf4j.*,org.apache.log4j.*,javax.*,org.apache.xerces.*
-
-#
-# By default, only Karaf shell commands are secured, but additional services can be
-# secured by expanding this filter
-#
-karaf.secured.services = (&(osgi.command.scope=*)(osgi.command.function=*))
-
-#
-# Security properties
-#
-# To enable OSGi security, uncomment the properties below,
-# install the framework-security feature and restart.
-#
-#java.security.policy=${karaf.etc}/all.policy
-#org.osgi.framework.security=osgi
-#org.osgi.framework.trust.repositories=${karaf.etc}/trustStore.ks
index c8a7f01..130c096 100644 (file)
@@ -9,67 +9,89 @@ package org.opendaylight.controller.frm;
 
 import java.util.HashSet;
 import java.util.Map;
-import java.util.Set;
 import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
 import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
+ * AbstractChangeListner implemented basic {@link AsyncDataChangeEvent} processing for
+ * flow node subDataObject (flows, groups and meters).
  *
  * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
  *
  */
 public abstract class AbstractChangeListener implements DataChangeListener {
 
+    private final static Logger LOG = LoggerFactory.getLogger(AbstractChangeListener.class);
+
     private final AtomicLong txNum = new AtomicLong();
     private String transactionId;
 
     @Override
-    public void onDataChanged(DataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+    public void onDataChanged(final AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
         this.transactionId = this.newTransactionIdentifier().toString();
-
+        /* All DataObjects for create */
         final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
-                changeEvent.getCreatedConfigurationData().entrySet();
-        final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries =
-                new HashSet<Entry<InstanceIdentifier<? extends DataObject>, DataObject>>();
-
+                changeEvent.getCreatedData().entrySet();
+        /* All DataObjects for updates - init HashSet */
+        final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries = new HashSet<>();
+        /* Filtered DataObject for update processing only */
         Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updateConfigEntrySet =
-                changeEvent.getUpdatedConfigurationData().entrySet();
+                changeEvent.getUpdatedData().entrySet();
         updatedEntries.addAll(updateConfigEntrySet);
         updatedEntries.removeAll(createdEntries);
-
+        /* All DataObjects for remove */
         final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
-                changeEvent.getRemovedConfigurationData();
-
+                changeEvent.getRemovedPaths();
+        /* Create DataObject processing (send to device) */
         for (final Entry<InstanceIdentifier<? extends DataObject>, DataObject> createdEntry : createdEntries) {
-            InstanceIdentifier<? extends DataObject> c_key = createdEntry.getKey();
-            DataObject c_value = createdEntry.getValue();
-            this.add(c_key, c_value);
+            InstanceIdentifier<? extends DataObject> entryKey = createdEntry.getKey();
+            DataObject entryValue = createdEntry.getValue();
+            if (preconditionForChange(entryKey, entryValue, null)) {
+                this.add(entryKey, entryValue);
+            }
         }
 
         for (final Entry<InstanceIdentifier<?>, DataObject> updatedEntrie : updatedEntries) {
             Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
-                    changeEvent.getOriginalConfigurationData();
-
-            InstanceIdentifier<? extends Object> u_key = updatedEntrie.getKey();
-            final DataObject originalFlow = origConfigData.get(u_key);
-            final DataObject updatedFlow = updatedEntrie.getValue();
-            this.update(u_key, originalFlow, updatedFlow);
+                    changeEvent.getOriginalData();
+
+            InstanceIdentifier<? extends Object> entryKey = updatedEntrie.getKey();
+            final DataObject original = origConfigData.get(entryKey);
+            final DataObject updated = updatedEntrie.getValue();
+            if (preconditionForChange(entryKey, original, updated)) {
+                this.update(entryKey, original, updated);
+            }
         }
 
         for (final InstanceIdentifier<?> instanceId : removeEntriesInstanceIdentifiers) {
             Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
-                    changeEvent.getOriginalConfigurationData();
+                    changeEvent.getOriginalData();
 
             final DataObject removeValue = origConfigData.get(instanceId);
-            this.remove(instanceId, removeValue);
+            if (preconditionForChange(instanceId, removeValue, null)) {
+                this.remove(instanceId, removeValue);
+            }
         }
     }
 
+    /**
+     * Method returns generated transaction ID, which is unique for
+     * every transaction. ID is composite from prefix ("DOM") and unique number.
+     *
+     * @return String transactionID
+     */
     public String getTransactionId() {
         return this.transactionId;
     }
@@ -78,17 +100,74 @@ public abstract class AbstractChangeListener implements DataChangeListener {
         return "DOM-" + txNum.getAndIncrement();
     }
 
-    protected abstract void validate() throws IllegalStateException;
-
-    protected abstract void remove(
+    /**
+     * Method check all local preconditions for apply relevant changes.
+     *
+     * @param InstanceIdentifier identifier - the whole path to DataObject
+     * @param DataObject original - original DataObject (for update)
+     *                              or relevant DataObject (add/delete operations)
+     * @param DataObject update - changed DataObject (contain updates)
+     *                              or should be null for (add/delete operations)
+     *
+     * @return boolean - applicable
+     */
+    protected abstract boolean preconditionForChange(
             final InstanceIdentifier<? extends DataObject> identifier,
+            final DataObject original, final DataObject update);
+
+    /**
+     * Method checks the node data path in DataStore/OPERATIONAL because
+     * without the Node Identifier in DataStore/OPERATIONAL, device
+     * is not connected and device pre-configuration is allowed only.
+     *
+     * @param InstanceIdentifier identifier - could be whole path to DataObject,
+     *            but parent Node.class InstanceIdentifier is used for a check only
+     *
+     * @return boolean - is the Node available in DataStore/OPERATIONAL (is connected)
+     */
+    protected boolean isNodeAvailable(final InstanceIdentifier<? extends DataObject> identifier,
+            final ReadOnlyTransaction readTrans) {
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+        try {
+            return readTrans.read(LogicalDatastoreType.OPERATIONAL, nodeInstanceId).get().isPresent();
+        }
+        catch (InterruptedException | ExecutionException e) {
+            LOG.error("Unexpected exception by reading Node ".concat(nodeInstanceId.toString()), e);
+            return false;
+        }
+        finally {
+            readTrans.close();
+        }
+    }
+
+    /**
+     * Method removes DataObject which is identified by InstanceIdentifier
+     * from device.
+     *
+     * @param InstanceIdentifier identifier - the whole path to DataObject
+     * @param DataObject remove - DataObject for removing
+     */
+    protected abstract void remove(final InstanceIdentifier<? extends DataObject> identifier,
             final DataObject remove);
 
-    protected abstract void update(
-            final InstanceIdentifier<? extends DataObject> identifier,
+    /**
+     * Method updates the original DataObject to the update DataObject
+     * in device. Both are identified by same InstanceIdentifier
+     *
+     * @param InstanceIdentifier identifier - the whole path to DataObject
+     * @param DataObject original - original DataObject (for update)
+     * @param DataObject update - changed DataObject (contain updates)
+     */
+    protected abstract void update(final InstanceIdentifier<? extends DataObject> identifier,
             final DataObject original, final DataObject update);
 
-    protected abstract void add(
-            final InstanceIdentifier<? extends DataObject> identifier,
+    /**
+     * Method adds the DataObject which is identified by InstanceIdentifier
+     * to device.
+     *
+     * @param InstanceIdentifier identifier - the whole path to new DataObject
+     * @param DataObject add - new DataObject
+     */
+    protected abstract void add(final InstanceIdentifier<? extends DataObject> identifier,
             final DataObject add);
 }
index 2f986ea..c75c644 100644 (file)
@@ -10,52 +10,79 @@ package org.opendaylight.controller.frm;
 import org.opendaylight.controller.frm.flow.FlowProvider;
 import org.opendaylight.controller.frm.group.GroupProvider;
 import org.opendaylight.controller.frm.meter.MeterProvider;
+import org.opendaylight.controller.frm.reconil.FlowNodeReconcilProvider;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.controller.sal.binding.api.AbstractBindingAwareProvider;
 import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ProviderContext;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
 import org.osgi.framework.BundleContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Forwarding Rules Manager Activator
+ *
+ * Activator manages all Providers ({@link FlowProvider}, {@link GroupProvider},
+ * {@link MeterProvider} and the {@link FlowNodeReconcilProvider}).
+ * It registers all listeners (DataChangeEvent, ReconcilNotification)
+ * in the Session Initialization phase.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ * *
+ */
 public class FRMActivator extends AbstractBindingAwareProvider {
 
     private final static Logger LOG = LoggerFactory.getLogger(FRMActivator.class);
 
-    private static FlowProvider flowProvider = new FlowProvider();
-    private static GroupProvider groupProvider = new GroupProvider();
-    private static MeterProvider meterProvider = new MeterProvider();
+    private final FlowProvider flowProvider;
+    private final GroupProvider groupProvider;
+    private final MeterProvider meterProvider;
+    private final FlowNodeReconcilProvider flowNodeReconcilProvider;
+
+    public FRMActivator() {
+        this.flowProvider = new FlowProvider();
+        this.groupProvider = new GroupProvider();
+        this.meterProvider = new MeterProvider();
+        this.flowNodeReconcilProvider = new FlowNodeReconcilProvider();
+    }
 
     @Override
     public void onSessionInitiated(final ProviderContext session) {
-        DataProviderService flowSalService = session.<DataProviderService>getSALService(DataProviderService.class);
-        FRMActivator.flowProvider.setDataService(flowSalService);
-        SalFlowService rpcFlowSalService = session.<SalFlowService>getRpcService(SalFlowService.class);
-        FRMActivator.flowProvider.setSalFlowService(rpcFlowSalService);
-        FRMActivator.flowProvider.start();
-        DataProviderService groupSalService = session.<DataProviderService>getSALService(DataProviderService.class);
-        FRMActivator.groupProvider.setDataService(groupSalService);
-        SalGroupService rpcGroupSalService = session.<SalGroupService>getRpcService(SalGroupService.class);
-        FRMActivator.groupProvider.setSalGroupService(rpcGroupSalService);
-        FRMActivator.groupProvider.start();
-        DataProviderService meterSalService = session.<DataProviderService>getSALService(DataProviderService.class);
-        FRMActivator.meterProvider.setDataService(meterSalService);
-        SalMeterService rpcMeterSalService = session.<SalMeterService>getRpcService(SalMeterService.class);
-        FRMActivator.meterProvider.setSalMeterService(rpcMeterSalService);
-        FRMActivator.meterProvider.start();
+        LOG.info("FRMActivator initialization.");
+        /* Flow */
+        try {
+            final DataBroker flowSalService = session.getSALService(DataBroker.class);
+            this.flowProvider.init(flowSalService);
+            this.flowProvider.start(session);
+            /* Group */
+            final DataBroker groupSalService = session.getSALService(DataBroker.class);
+            this.groupProvider.init(groupSalService);
+            this.groupProvider.start(session);
+            /* Meter */
+            final DataBroker meterSalService = session.getSALService(DataBroker.class);
+            this.meterProvider.init(meterSalService);
+            this.meterProvider.start(session);
+            /* FlowNode Reconciliation */
+            final DataBroker dbs = session.getSALService(DataBroker.class);
+            this.flowNodeReconcilProvider.init(dbs);
+            this.flowNodeReconcilProvider.start(session);
+
+            LOG.info("FRMActivator started successfully");
+        } catch (Exception e) {
+            String errMsg = "Unexpected error by starting FRMActivator";
+            LOG.error(errMsg, e);
+            throw new IllegalStateException(errMsg, e);
+        }
     }
 
     @Override
     protected void stopImpl(final BundleContext context) {
         try {
-            FRMActivator.flowProvider.close();
-            FRMActivator.groupProvider.close();
-            FRMActivator.meterProvider.close();
-        } catch (Throwable e) {
+            this.flowProvider.close();
+            this.groupProvider.close();
+            this.meterProvider.close();
+            this.flowNodeReconcilProvider.close();
+        } catch (Exception e) {
             LOG.error("Unexpected error by stopping FRMActivator", e);
-            throw new RuntimeException(e);
         }
     }
   }
\ No newline at end of file
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/FlowCookieProducer.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/FlowCookieProducer.java
new file mode 100644 (file)
index 0000000..d7b54e8
--- /dev/null
@@ -0,0 +1,79 @@
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm;
+
+import java.math.BigInteger;
+
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.AtomicLongMap;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * Singleton FlowCookieProducer contains a FlowCookie generator which is generated unique
+ * flowCookie identifier for every flow in same Table. That could help with quick
+ * identification of flow statistic because DataStore/CONFIGURATION could contains
+ * a lot of flows with same flowCookie. So we are replacing original flowCookie
+ * with unique and we are building final FlowCookieMap in DataStore/OPERATIONAL
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public enum FlowCookieProducer {
+
+    INSTANCE;
+
+    /* Flow_Cookie_Key and Flow_Ids MapHolder */
+    private static final AtomicLongMap<InstanceIdentifier<Table>> cookieKeys = AtomicLongMap.create();
+
+    /**
+     * Method returns the unique cookie for a node table.
+     * Flow Cookie Key signs List<FlowId> for a right flow statistic identification
+     * in the DataStore/operational.
+     * We need a List<FlowId> because system doesn't guarantee unique mapping
+     * from flow_cookie to flow_id. REST Operations doesn't used FRM yet, so
+     * cookie from user input could have a user input flow ID and an alien system ID
+     * which is generated by system.
+     *
+     * @param InstanceIdentifier<Table> tableIdentifier
+     * @return unique BigInteger flowCookie for a node table
+     */
+    public BigInteger getNewCookie(final InstanceIdentifier<Table> tableIdentifier) {
+        FlowCookieProducer.validationTableIdentifier(tableIdentifier);
+        if ( cookieKeys.containsKey(tableIdentifier)) {
+            /* new identifier always starts from ONE because
+             * ZERO is reserved for the NO_COOKIES flows */
+            return BigInteger.valueOf(cookieKeys.addAndGet(tableIdentifier, 1L));
+        } else {
+            return BigInteger.valueOf(cookieKeys.incrementAndGet(tableIdentifier));
+        }
+    }
+
+    /**
+     * Method cleans the node table flow_cookie_key for the disconnected Node.
+     *
+     * @param InstanceIdentifier<Table> tableIdentifier
+     */
+    public void clean(final InstanceIdentifier<Table> tableIdentifier) {
+        FlowCookieProducer.validationTableIdentifier(tableIdentifier);
+        cookieKeys.remove(tableIdentifier);
+    }
+
+    /*
+     * Help the TableIdentifer input validation method
+     */
+    private static void validationTableIdentifier(final InstanceIdentifier<Table> tableIdent) {
+        Preconditions.checkArgument(tableIdent != null, "Input validation exception: TableIdentifier can not be null !");
+    }
+}
index b604245..c10b0da 100644 (file)
@@ -7,22 +7,21 @@
  */
 package org.opendaylight.controller.frm.flow;
 
+import java.math.BigInteger;
+
 import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.frm.FlowCookieProducer;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInputBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.FlowTableRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.RemoveFlowInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.RemoveFlowInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.UpdateFlowInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.UpdateFlowInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.OriginalFlow;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.OriginalFlowBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.UpdatedFlow;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.UpdatedFlowBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.Flow;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowRef;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
@@ -31,90 +30,97 @@ import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
+ * Flow Change Listener
+ *  add, update and remove {@link Flow} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
  *
  * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
  *
  */
 public class FlowChangeListener extends AbstractChangeListener {
 
-    private final static Logger LOG = LoggerFactory.getLogger(FlowChangeListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FlowChangeListener.class);
 
-    private final SalFlowService salFlowService;
-
-    public SalFlowService getSalFlowService() {
-        return this.salFlowService;
-    }
+    private final FlowProvider provider;
 
-    public FlowChangeListener(final SalFlowService manager) {
-        this.salFlowService = manager;
+    public FlowChangeListener (final FlowProvider provider) {
+        this.provider = Preconditions.checkNotNull(provider, "FlowProvider can not be null !");
     }
 
     @Override
-    protected void validate() throws IllegalStateException {
-        FlowTransactionValidator.validate(this);
-    }
+    protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject removeDataObj) {
 
-    @Override
-    protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
-        if ((removeDataObj instanceof Flow)) {
-
-            final Flow flow = ((Flow) removeDataObj);
-            final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final RemoveFlowInputBuilder builder = new RemoveFlowInputBuilder(flow);
-
-            builder.setFlowRef(new FlowRef(identifier));
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salFlowService.removeFlow((RemoveFlowInput) builder.build());
-            LOG.debug("Transaction {} - Removed Flow has removed flow: {}", new Object[]{uri, removeDataObj});
-        }
+        final Flow flow = ((Flow) removeDataObj);
+        final InstanceIdentifier<Table> tableIdent = identifier.firstIdentifierOf(Table.class);
+        final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+        final RemoveFlowInputBuilder builder = new RemoveFlowInputBuilder(flow);
+
+        // use empty cookie mask in order to delete flow even with generated cookie
+        builder.setCookieMask(new FlowCookie(BigInteger.ZERO));
+
+        builder.setFlowRef(new FlowRef(identifier));
+        builder.setNode(new NodeRef(nodeIdent));
+        builder.setFlowTable(new FlowTableRef(tableIdent));
+
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalFlowService().removeFlow(builder.build());
+        LOG.debug("Transaction {} - Removed Flow has removed flow: {}", new Object[]{uri, removeDataObj});
     }
 
     @Override
-    protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
-        if (original instanceof Flow && update instanceof Flow) {
+    protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject original, final DataObject update) {
+
+        final Flow originalFlow = ((Flow) original);
+        final Flow updatedFlow = ((Flow) update);
+        final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+        final UpdateFlowInputBuilder builder = new UpdateFlowInputBuilder();
 
-            final Flow originalFlow = ((Flow) original);
-            final Flow updatedFlow = ((Flow) update);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node>firstIdentifierOf(Node.class);
-            final UpdateFlowInputBuilder builder = new UpdateFlowInputBuilder();
+        builder.setNode(new NodeRef(nodeIdent));
+        builder.setFlowRef(new FlowRef(identifier));
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setFlowRef(new FlowRef(identifier));
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
+        builder.setUpdatedFlow((new UpdatedFlowBuilder(updatedFlow)).build());
+        builder.setOriginalFlow((new OriginalFlowBuilder(originalFlow)).build());
 
-            builder.setUpdatedFlow((UpdatedFlow) (new UpdatedFlowBuilder(updatedFlow)).build());
-            builder.setOriginalFlow((OriginalFlow) (new OriginalFlowBuilder(originalFlow)).build());
+        this.provider.getSalFlowService().updateFlow(builder.build());
+        LOG.debug("Transaction {} - Update Flow has updated flow {} with {}", new Object[]{uri, original, update});
+    }
 
-            this.salFlowService.updateFlow((UpdateFlowInput) builder.build());
-            LOG.debug("Transaction {} - Update Flow has updated flow {} with {}", new Object[]{uri, original, update});
-      }
+    @Override
+    protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+                       final DataObject addDataObj) {
+
+        final Flow flow = ((Flow) addDataObj);
+        final InstanceIdentifier<Table> tableIdent = identifier.firstIdentifierOf(Table.class);
+        final NodeRef nodeRef = new NodeRef(identifier.firstIdentifierOf(Node.class));
+        final FlowCookie flowCookie = new FlowCookie(FlowCookieProducer.INSTANCE.getNewCookie(tableIdent));
+        final AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
+
+        builder.setNode(nodeRef);
+        builder.setFlowRef(new FlowRef(identifier));
+        builder.setFlowTable(new FlowTableRef(tableIdent));
+        builder.setCookie( flowCookie );
+
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalFlowService().addFlow(builder.build());
+        LOG.debug("Transaction {} - Add Flow has added flow: {}", new Object[]{uri, addDataObj});
     }
 
     @Override
-    protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
-        if ((addDataObj instanceof Flow)) {
-
-            final Flow flow = ((Flow) addDataObj);
-            final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
-
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setFlowRef(new FlowRef(identifier));
-            builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salFlowService.addFlow((AddFlowInput) builder.build());
-            LOG.debug("Transaction {} - Add Flow has added flow: {}", new Object[]{uri, addDataObj});
-        }
+    protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+            final DataObject dataObj, final DataObject update) {
+
+        final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+        return update != null
+                ? (dataObj instanceof Flow && update instanceof Flow && isNodeAvailable(identifier, trans))
+                : (dataObj instanceof Flow && isNodeAvailable(identifier, trans));
     }
 }
index 33db529..8c248fa 100644 (file)
@@ -7,9 +7,11 @@
  */
 package org.opendaylight.controller.frm.flow;
 
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
@@ -17,54 +19,89 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalF
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
+/**
+ * Flow Provider registers the {@link FlowChangeListener} and it holds all needed
+ * services for {@link FlowChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
 public class FlowProvider implements AutoCloseable {
 
-    private final static Logger LOG = LoggerFactory.getLogger(FlowProvider.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FlowProvider.class);
 
     private SalFlowService salFlowService;
-    private DataProviderService dataService;
+    private DataBroker dataService;
 
     /* DataChangeListener */
-    private FlowChangeListener flowDataChangeListener;
-    ListenerRegistration<DataChangeListener> flowDataChangeListenerRegistration;
+    private DataChangeListener flowDataChangeListener;
+    private ListenerRegistration<DataChangeListener> flowDataChangeListenerRegistration;
+
+    /**
+     * Provider Initialization Phase.
+     *
+     * @param DataProviderService dataService
+     */
+    public void init (final DataBroker dataService) {
+        LOG.info("FRM Flow Config Provider initialization.");
+        this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+    }
+
+    /**
+     * Listener Registration Phase
+     *
+     * @param RpcConsumerRegistry rpcRegistry
+     */
+    public void start(final RpcConsumerRegistry rpcRegistry) {
+        Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+
+        this.salFlowService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalFlowService.class),
+                "RPC SalFlowService not found.");
 
-    public void start() {
         /* Build Path */
-        InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
-        InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
-        InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
-        InstanceIdentifierBuilder<Table> tableChild = augmentFlowCapNode.<Table> child(Table.class);
-        InstanceIdentifierBuilder<Flow> flowChild = tableChild.<Flow> child(Flow.class);
-        final InstanceIdentifier<? extends DataObject> flowDataObjectPath = flowChild.toInstance();
+        InstanceIdentifier<Flow> flowIdentifier = InstanceIdentifier.create(Nodes.class)
+                .child(Node.class).augmentation(FlowCapableNode.class).child(Table.class).child(Flow.class);
 
         /* DataChangeListener registration */
-        this.flowDataChangeListener = new FlowChangeListener(this.salFlowService);
-        this.flowDataChangeListenerRegistration = this.dataService.registerDataChangeListener(flowDataObjectPath, flowDataChangeListener);
-        LOG.info("Flow Config Provider started.");
-    }
+        this.flowDataChangeListener = new FlowChangeListener(FlowProvider.this);
+        this.flowDataChangeListenerRegistration =
+                this.dataService.registerDataChangeListener(LogicalDatastoreType.CONFIGURATION,
+                        flowIdentifier, flowDataChangeListener, DataChangeScope.SUBTREE);
 
-    protected DataModificationTransaction startChange() {
-        return this.dataService.beginTransaction();
+        LOG.info("FRM Flow Config Provider started.");
     }
 
     @Override
-    public void close() throws Exception {
-        if(flowDataChangeListenerRegistration != null){
-            flowDataChangeListenerRegistration.close();
+    public void close() {
+        LOG.info("FRM Flow Config Provider stopped.");
+        if (flowDataChangeListenerRegistration != null) {
+            try {
+                flowDataChangeListenerRegistration.close();
+            } catch (Exception e) {
+                String errMsg = "Error by stop FRM Flow Config Provider.";
+                LOG.error(errMsg, e);
+                throw new IllegalStateException(errMsg, e);
+            } finally {
+                flowDataChangeListenerRegistration = null;
+            }
         }
     }
 
-    public void setDataService(final DataProviderService dataService) {
-        this.dataService = dataService;
+    public DataChangeListener getFlowDataChangeListener() {
+        return flowDataChangeListener;
+    }
+
+    public SalFlowService getSalFlowService() {
+        return salFlowService;
     }
 
-    public void setSalFlowService(final SalFlowService salFlowService) {
-        this.salFlowService = salFlowService;
+    public DataBroker getDataService() {
+        return dataService;
     }
 }
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/flow/FlowTransactionValidator.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/flow/FlowTransactionValidator.java
deleted file mode 100644 (file)
index 9cd4246..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.flow;
-
-public class FlowTransactionValidator {
-
-    public static void validate(FlowChangeListener transaction) throws IllegalStateException {
-        // NOOP
-    }
-}
index 54f12bf..9b03eaa 100644 (file)
@@ -8,17 +8,12 @@
 package org.opendaylight.controller.frm.group;
 
 import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.RemoveGroupInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.RemoveGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.UpdateGroupInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.UpdateGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.OriginalGroup;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.OriginalGroupBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.UpdatedGroup;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.UpdatedGroupBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.GroupRef;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
@@ -29,85 +24,88 @@ import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
+ * Group Change Listener
+ *  add, update and remove {@link Group} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
  *
  * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
  *
  */
 public class GroupChangeListener extends AbstractChangeListener {
 
-    private final static Logger LOG = LoggerFactory.getLogger(GroupChangeListener.class);
-
-    private final SalGroupService salGroupService;
+    private static final Logger LOG = LoggerFactory.getLogger(GroupChangeListener.class);
 
-    public SalGroupService getSalGroupService() {
-        return this.salGroupService;
-    }
+    private final GroupProvider provider;
 
-    public GroupChangeListener(final SalGroupService manager) {
-        this.salGroupService = manager;
+    public GroupChangeListener(final GroupProvider provider) {
+        this.provider = Preconditions.checkNotNull(provider, "GroupProvider can not be null !");
     }
 
     @Override
-    protected void validate() throws IllegalStateException {
-        GroupTransactionValidator.validate(this);
+    protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject removeDataObj) {
+
+        final Group group = ((Group) removeDataObj);
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+        final RemoveGroupInputBuilder builder = new RemoveGroupInputBuilder(group);
+
+        builder.setNode(new NodeRef(nodeInstanceId));
+        builder.setGroupRef(new GroupRef(identifier));
+
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalGroupService().removeGroup(builder.build());
+        LOG.debug("Transaction {} - Remove Group has removed group: {}", new Object[]{uri, removeDataObj});
     }
 
     @Override
-    protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
-        if ((removeDataObj instanceof Group)) {
+    protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject original, final DataObject update) {
 
-            final Group group = ((Group) removeDataObj);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final RemoveGroupInputBuilder builder = new RemoveGroupInputBuilder(group);
+        final Group originalGroup = ((Group) original);
+        final Group updatedGroup = ((Group) update);
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+        final UpdateGroupInputBuilder builder = new UpdateGroupInputBuilder();
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setGroupRef(new GroupRef(identifier));
+        builder.setNode(new NodeRef(nodeInstanceId));
+        builder.setGroupRef(new GroupRef(identifier));
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salGroupService.removeGroup((RemoveGroupInput) builder.build());
-            LOG.debug("Transaction {} - Remove Group has removed group: {}", new Object[]{uri, removeDataObj});
-        }
-    }
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
 
-    @Override
-    protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
-        if (original instanceof Group && update instanceof Group) {
+        builder.setUpdatedGroup((new UpdatedGroupBuilder(updatedGroup)).build());
+        builder.setOriginalGroup((new OriginalGroupBuilder(originalGroup)).build());
 
-            final Group originalGroup = ((Group) original);
-            final Group updatedGroup = ((Group) update);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final UpdateGroupInputBuilder builder = new UpdateGroupInputBuilder();
+        this.provider.getSalGroupService().updateGroup(builder.build());
+        LOG.debug("Transaction {} - Update Group has updated group {} with group {}", new Object[]{uri, original, update});
+    }
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setGroupRef(new GroupRef(identifier));
+    @Override
+    protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+                       final DataObject addDataObj) {
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
+        final Group group = ((Group) addDataObj);
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+        final AddGroupInputBuilder builder = new AddGroupInputBuilder(group);
 
-            builder.setUpdatedGroup((UpdatedGroup) (new UpdatedGroupBuilder(updatedGroup)).build());
-            builder.setOriginalGroup((OriginalGroup) (new OriginalGroupBuilder(originalGroup)).build());
+        builder.setNode(new NodeRef(nodeInstanceId));
+        builder.setGroupRef(new GroupRef(identifier));
 
-            this.salGroupService.updateGroup((UpdateGroupInput) builder.build());
-            LOG.debug("Transaction {} - Update Group has updated group {} with group {}", new Object[]{uri, original, update});
-        }
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalGroupService().addGroup(builder.build());
+        LOG.debug("Transaction {} - Add Group has added group: {}", new Object[]{uri, addDataObj});
     }
 
     @Override
-    protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
-        if ((addDataObj instanceof Group)) {
-            final Group group = ((Group) addDataObj);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final AddGroupInputBuilder builder = new AddGroupInputBuilder(group);
-
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setGroupRef(new GroupRef(identifier));
-
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salGroupService.addGroup((AddGroupInput) builder.build());
-            LOG.debug("Transaction {} - Add Group has added group: {}", new Object[]{uri, addDataObj});
-        }
+    protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+            final DataObject dataObj, final DataObject update) {
+
+        final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+        return update != null
+                ? (dataObj instanceof Group && update instanceof Group && isNodeAvailable(identifier, trans))
+                : (dataObj instanceof Group && isNodeAvailable(identifier, trans));
     }
 }
index 9f2806e..a999242 100644 (file)
@@ -7,61 +7,99 @@
  */
 package org.opendaylight.controller.frm.group;
 
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
+/**
+ * Group Provider registers the {@link GroupChangeListener} and it holds all needed
+ * services for {@link GroupChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
 public class GroupProvider implements AutoCloseable {
 
-    private final static Logger LOG = LoggerFactory.getLogger(GroupProvider.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GroupProvider.class);
 
     private SalGroupService salGroupService;
-    private DataProviderService dataService;
+    private DataBroker dataService;
 
     /* DataChangeListener */
-    private GroupChangeListener groupDataChangeListener;
-    ListenerRegistration<DataChangeListener> groupDataChangeListenerRegistration;
+    private DataChangeListener groupDataChangeListener;
+    private ListenerRegistration<DataChangeListener> groupDataChangeListenerRegistration;
+
+    /**
+     * Provider Initialization Phase.
+     *
+     * @param DataProviderService dataService
+     */
+    public void init (final DataBroker dataService) {
+        LOG.info("FRM Group Config Provider initialization.");
+        this.dataService = Preconditions.checkNotNull(dataService, "DataService can not be null !");
+    }
+
+    /**
+     * Listener Registration Phase
+     *
+     * @param RpcConsumerRegistry rpcRegistry
+     */
+    public void start(final RpcConsumerRegistry rpcRegistry) {
+        Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+
+        this.salGroupService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalGroupService.class),
+                "RPC SalGroupService not found.");
 
-    public void start() {
         /* Build Path */
-        InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
-        InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
-        InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
-        InstanceIdentifierBuilder<Group> groupChild = augmentFlowCapNode.<Group> child(Group.class);
-        final InstanceIdentifier<? extends DataObject> groupDataObjectPath = groupChild.toInstance();
+        InstanceIdentifier<Group> groupIdentifier = InstanceIdentifier.create(Nodes.class)
+                .child(Node.class).augmentation(FlowCapableNode.class).child(Group.class);
 
         /* DataChangeListener registration */
-        this.groupDataChangeListener = new GroupChangeListener(this.salGroupService);
-        this.groupDataChangeListenerRegistration = this.dataService.registerDataChangeListener(groupDataObjectPath, groupDataChangeListener);
-        LOG.info("Group Config Provider started.");
-    }
+        this.groupDataChangeListener = new GroupChangeListener(GroupProvider.this);
+        this.groupDataChangeListenerRegistration = this.dataService.registerDataChangeListener(
+                LogicalDatastoreType.CONFIGURATION, groupIdentifier, groupDataChangeListener, DataChangeScope.SUBTREE);
 
-    protected DataModificationTransaction startChange() {
-        return this.dataService.beginTransaction();
+        LOG.info("FRM Group Config Provider started.");
     }
 
-    public void close() throws Exception {
-        if(groupDataChangeListenerRegistration != null){
-            groupDataChangeListenerRegistration.close();
+    @Override
+    public void close() {
+        LOG.info("FRM Group Config Provider stopped.");
+        if (groupDataChangeListenerRegistration != null) {
+            try {
+                groupDataChangeListenerRegistration.close();
+            } catch (Exception e) {
+                String errMsg = "Error by stop FRM Group Config Provider.";
+                LOG.error(errMsg, e);
+                throw new IllegalStateException(errMsg, e);
+            } finally {
+                groupDataChangeListenerRegistration = null;
+            }
         }
     }
 
-    public void setDataService(final DataProviderService dataService) {
-        this.dataService = dataService;
+    public DataChangeListener getGroupDataChangeListener() {
+        return groupDataChangeListener;
+    }
+
+    public SalGroupService getSalGroupService() {
+        return salGroupService;
     }
 
-    public void setSalGroupService(final SalGroupService salGroupService) {
-        this.salGroupService = salGroupService;
+    public DataBroker getDataService() {
+        return dataService;
     }
 }
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/group/GroupTransactionValidator.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/group/GroupTransactionValidator.java
deleted file mode 100644 (file)
index 88eea0d..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.group;
-
-public class GroupTransactionValidator {
-
-    public static void validate(GroupChangeListener transaction) throws IllegalStateException {
-        // NOOP
-    }
-}
index 48d5257..a2def84 100644 (file)
@@ -8,19 +8,14 @@
 package org.opendaylight.controller.frm.meter;
 
 import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.RemoveMeterInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.RemoveMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.UpdateMeterInput;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.UpdateMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.OriginalMeter;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.OriginalMeterBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.UpdatedMeter;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.UpdatedMeterBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.Meter;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.MeterRef;
@@ -29,86 +24,89 @@ import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
+ * Meter Change Listener
+ *  add, update and remove {@link Meter} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
  *
  * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
  *
  */
 public class MeterChangeListener extends AbstractChangeListener {
 
-    private final static Logger LOG = LoggerFactory.getLogger(MeterChangeListener.class);
-
-    private final SalMeterService salMeterService;
-
-    public SalMeterService getSalMeterService() {
-        return this.salMeterService;
-    }
+    private static final Logger LOG = LoggerFactory.getLogger(MeterChangeListener.class);
 
-    public MeterChangeListener(final SalMeterService manager) {
-        this.salMeterService = manager;
-    }
+    private final MeterProvider provider;
 
-    @Override
-    protected void validate() throws IllegalStateException {
-        MeterTransactionValidator.validate(this);
+    public MeterChangeListener (final MeterProvider provider) {
+        this.provider = Preconditions.checkNotNull(provider, "MeterProvider can not be null !");
     }
 
     @Override
-    protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
-        if ((removeDataObj instanceof Meter)) {
+    protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject removeDataObj) {
 
-            final Meter meter = ((Meter) removeDataObj);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final RemoveMeterInputBuilder builder = new RemoveMeterInputBuilder(meter);
+        final Meter meter = ((Meter) removeDataObj);
+        final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+        final RemoveMeterInputBuilder builder = new RemoveMeterInputBuilder(meter);
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setMeterRef(new MeterRef(identifier));
+        builder.setNode(new NodeRef(nodeIdent));
+        builder.setMeterRef(new MeterRef(identifier));
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salMeterService.removeMeter((RemoveMeterInput) builder.build());
-            LOG.debug("Transaction {} - Remove Meter has removed meter: {}", new Object[]{uri, removeDataObj});
-        }
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalMeterService().removeMeter(builder.build());
+        LOG.debug("Transaction {} - Remove Meter has removed meter: {}", new Object[]{uri, removeDataObj});
     }
 
     @Override
-    protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
-        if (original instanceof Meter && update instanceof Meter) {
+    protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject original, final DataObject update) {
 
-            final Meter originalMeter = ((Meter) original);
-            final Meter updatedMeter = ((Meter) update);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final UpdateMeterInputBuilder builder = new UpdateMeterInputBuilder();
+        final Meter originalMeter = ((Meter) original);
+        final Meter updatedMeter = ((Meter) update);
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+        final UpdateMeterInputBuilder builder = new UpdateMeterInputBuilder();
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setMeterRef(new MeterRef(identifier));
+        builder.setNode(new NodeRef(nodeInstanceId));
+        builder.setMeterRef(new MeterRef(identifier));
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
 
-            builder.setUpdatedMeter((UpdatedMeter) (new UpdatedMeterBuilder(updatedMeter)).build());
-            builder.setOriginalMeter((OriginalMeter) (new OriginalMeterBuilder(originalMeter)).build());
+        builder.setUpdatedMeter((new UpdatedMeterBuilder(updatedMeter)).build());
+        builder.setOriginalMeter((new OriginalMeterBuilder(originalMeter)).build());
+
+        this.provider.getSalMeterService().updateMeter(builder.build());
+        LOG.debug("Transaction {} - Update Meter has updated meter {} with {}", new Object[]{uri, original, update});
 
-            this.salMeterService.updateMeter((UpdateMeterInput) builder.build());
-            LOG.debug("Transaction {} - Update Meter has updated meter {} with {}", new Object[]{uri, original, update});
-        }
     }
 
     @Override
-    protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
-        if ((addDataObj instanceof Meter)) {
+    protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+                       final DataObject addDataObj) {
+
+        final Meter meter = ((Meter) addDataObj);
+        final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+        final AddMeterInputBuilder builder = new AddMeterInputBuilder(meter);
 
-            final Meter meter = ((Meter) addDataObj);
-            final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
-            final AddMeterInputBuilder builder = new AddMeterInputBuilder(meter);
+        builder.setNode(new NodeRef(nodeInstanceId));
+        builder.setMeterRef(new MeterRef(identifier));
 
-            builder.setNode(new NodeRef(nodeInstanceId));
-            builder.setMeterRef(new MeterRef(identifier));
+        Uri uri = new Uri(this.getTransactionId());
+        builder.setTransactionUri(uri);
+        this.provider.getSalMeterService().addMeter(builder.build());
+        LOG.debug("Transaction {} - Add Meter has added meter: {}", new Object[]{uri, addDataObj});
+    }
+
+    @Override
+    protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+            final DataObject dataObj, final DataObject update) {
 
-            Uri uri = new Uri(this.getTransactionId());
-            builder.setTransactionUri(uri);
-            this.salMeterService.addMeter((AddMeterInput) builder.build());
-            LOG.debug("Transaction {} - Add Meter has added meter: {}", new Object[]{uri, addDataObj});
-        }
+        final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+        return update != null
+                ? (dataObj instanceof Meter && update instanceof Meter && isNodeAvailable(identifier, trans))
+                : (dataObj instanceof Meter && isNodeAvailable(identifier, trans));
     }
-}
\ No newline at end of file
+}
index 8596c3f..44de7af 100644 (file)
@@ -7,61 +7,99 @@
  */
 package org.opendaylight.controller.frm.meter;
 
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.meters.Meter;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
+/**
+ * Meter Provider registers the {@link MeterChangeListener} and it holds all needed
+ * services for {@link MeterChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
 public class MeterProvider implements AutoCloseable {
 
-    private final static Logger LOG = LoggerFactory.getLogger(MeterProvider.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MeterProvider.class);
 
-    private DataProviderService dataService;
     private SalMeterService salMeterService;
+    private DataBroker dataService;
 
     /* DataChangeListener */
-    private MeterChangeListener meterDataChangeListener;
-    ListenerRegistration<DataChangeListener> meterDataChangeListenerRegistration;
+    private DataChangeListener meterDataChangeListener;
+    private ListenerRegistration<DataChangeListener> meterDataChangeListenerRegistration;
+
+    /**
+     * Provider Initialization Phase.
+     *
+     * @param DataProviderService dataService
+     */
+    public void init(final DataBroker dataService) {
+        LOG.info("FRM Meter Config Provider initialization.");
+        this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+    }
+
+    /**
+     * Listener Registration Phase
+     *
+     * @param RpcConsumerRegistry rpcRegistry
+     */
+    public void start(final RpcConsumerRegistry rpcRegistry) {
+        Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+        this.salMeterService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalMeterService.class),
+                "RPC SalMeterService not found.");
 
-    public void start() {
         /* Build Path */
-        InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
-        InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
-        InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
-        InstanceIdentifierBuilder<Meter> meterChild = augmentFlowCapNode.<Meter> child(Meter.class);
-        final InstanceIdentifier<? extends DataObject> meterDataObjectPath = meterChild.toInstance();
+        InstanceIdentifier<Meter> meterIdentifier = InstanceIdentifier.create(Nodes.class)
+                .child(Node.class).augmentation(FlowCapableNode.class).child(Meter.class);
 
         /* DataChangeListener registration */
-        this.meterDataChangeListener = new MeterChangeListener(this.salMeterService);
-        this.meterDataChangeListenerRegistration = this.dataService.registerDataChangeListener(meterDataObjectPath, meterDataChangeListener);
-        LOG.info("Meter Config Provider started.");
-    }
+        this.meterDataChangeListener = new MeterChangeListener(MeterProvider.this);
+        this.meterDataChangeListenerRegistration =
+                this.dataService.registerDataChangeListener(LogicalDatastoreType.CONFIGURATION,
+                        meterIdentifier, meterDataChangeListener, DataChangeScope.SUBTREE);
 
-    protected DataModificationTransaction startChange() {
-        return this.dataService.beginTransaction();
+        LOG.info("FRM Meter Config Provider started.");
     }
 
-    public void close() throws Exception {
-        if(meterDataChangeListenerRegistration != null){
-            meterDataChangeListenerRegistration.close();
+    @Override
+    public void close() {
+        LOG.info("FRM Meter Config Provider stopped.");
+        if (meterDataChangeListenerRegistration != null) {
+            try {
+                meterDataChangeListenerRegistration.close();
+            } catch (Exception e) {
+                String errMsg = "Error by stop FRM Meter Config Provider.";
+                LOG.error(errMsg, e);
+                throw new IllegalStateException(errMsg, e);
+            } finally {
+                meterDataChangeListenerRegistration = null;
+            }
         }
     }
 
-    public void setDataService(final DataProviderService dataService) {
-        this.dataService = dataService;
+    public DataChangeListener getMeterDataChangeListener() {
+        return meterDataChangeListener;
+    }
+
+    public DataBroker getDataService() {
+        return dataService;
     }
 
-    public void setSalMeterService(final SalMeterService salMeterService) {
-        this.salMeterService = salMeterService;
+    public SalMeterService getSalMeterService() {
+        return salMeterService;
     }
-}
\ No newline at end of file
+}
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/meter/MeterTransactionValidator.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/meter/MeterTransactionValidator.java
deleted file mode 100644 (file)
index c8fba23..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.meter;
-
-public class MeterTransactionValidator {
-
-    public static void validate(MeterChangeListener transaction) throws IllegalStateException {
-        // NOOP
-    }
-}
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilListener.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilListener.java
new file mode 100644 (file)
index 0000000..eb5ae4a
--- /dev/null
@@ -0,0 +1,185 @@
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm.reconil;
+
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+
+import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.frm.FlowCookieProducer;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.meters.Meter;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.FlowTableRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.GroupRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.MeterRef;
+import org.opendaylight.yangtools.yang.binding.DataObject;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * FlowNode Reconciliation Listener
+ * Reconciliation for a new FlowNode
+ * Remove CookieMapKey for removed FlowNode
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public class FlowNodeReconcilListener extends AbstractChangeListener {
+
+    private static final Logger LOG = LoggerFactory.getLogger(FlowNodeReconcilListener.class);
+
+    private final FlowNodeReconcilProvider provider;
+
+    public FlowNodeReconcilListener(final FlowNodeReconcilProvider provider) {
+        this.provider = Preconditions.checkNotNull(provider, "Flow Node Reconcil Provider can not be null!");
+    }
+
+    @Override
+    public void onDataChanged(AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+        /* FlowCapableNode DataObjects for reconciliation */
+        final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
+                changeEvent.getCreatedData().entrySet();
+        /* FlowCapableNode DataObjects for clean FlowCookieHolder */
+        final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
+                changeEvent.getRemovedPaths();
+        for (final Entry<InstanceIdentifier<? extends DataObject>, DataObject> createdEntry : createdEntries) {
+            InstanceIdentifier<? extends DataObject> entryKey = createdEntry.getKey();
+            DataObject entryValue = createdEntry.getValue();
+            if (preconditionForChange(entryKey, entryValue, null)) {
+                this.add(entryKey, entryValue);
+            }
+        }
+        for (final InstanceIdentifier<?> instanceId : removeEntriesInstanceIdentifiers) {
+            Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
+                    changeEvent.getOriginalData();
+            final DataObject removeValue = origConfigData.get(instanceId);
+            if (preconditionForChange(instanceId, removeValue, null)) {
+                this.remove(instanceId, removeValue);
+            }
+        }
+    }
+
+    @Override
+    /* Cleaning FlowCookieManager holder for all node tables */
+    protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject removeDataObj) {
+
+        final InstanceIdentifier<FlowCapableNode> flowNodeIdent =
+                identifier.firstIdentifierOf(FlowCapableNode.class);
+        final FlowCapableNode flowNode = ((FlowCapableNode) removeDataObj);
+
+        for (Table flowTable : flowNode.getTable()) {
+            final InstanceIdentifier<Table> tableIdent =
+                    flowNodeIdent.child(Table.class, flowTable.getKey());
+            FlowCookieProducer.INSTANCE.clean(tableIdent);
+        }
+    }
+
+    @Override
+    /* Reconciliation by connect new FlowCapableNode */
+    protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+                       final DataObject addDataObj) {
+
+        final InstanceIdentifier<FlowCapableNode> flowNodeIdent =
+                identifier.firstIdentifierOf(FlowCapableNode.class);
+        final Optional<FlowCapableNode> flowCapNode = this.readFlowCapableNode(flowNodeIdent);
+
+        if (flowCapNode.isPresent()) {
+            final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+            final NodeRef nodeRef = new NodeRef(nodeIdent);
+            /* Groups - have to be first */
+            for (Group group : flowCapNode.get().getGroup()) {
+                final GroupRef groupRef = new GroupRef(flowNodeIdent.child(Group.class, group.getKey()));
+                final AddGroupInputBuilder groupBuilder = new AddGroupInputBuilder(group);
+                groupBuilder.setGroupRef(groupRef);
+                groupBuilder.setNode(nodeRef);
+                this.provider.getSalGroupService().addGroup(groupBuilder.build());
+            }
+            /* Meters */
+            for (Meter meter : flowCapNode.get().getMeter()) {
+                final MeterRef meterRef = new MeterRef(flowNodeIdent.child(Meter.class, meter.getKey()));
+                final AddMeterInputBuilder meterBuilder = new AddMeterInputBuilder(meter);
+                meterBuilder.setMeterRef(meterRef);
+                meterBuilder.setNode(nodeRef);
+                this.provider.getSalMeterService().addMeter(meterBuilder.build());
+            }
+            /* Flows */
+            for (Table flowTable : flowCapNode.get().getTable()) {
+                final InstanceIdentifier<Table> tableIdent = flowNodeIdent.child(Table.class, flowTable.getKey());
+                for (Flow flow : flowTable.getFlow()) {
+                    final FlowCookie flowCookie = new FlowCookie(FlowCookieProducer.INSTANCE.getNewCookie(tableIdent));
+                    final FlowRef flowRef = new FlowRef(tableIdent.child(Flow.class, flow.getKey()));
+                    final FlowTableRef flowTableRef = new FlowTableRef(tableIdent);
+                    final AddFlowInputBuilder flowBuilder = new AddFlowInputBuilder(flow);
+                    flowBuilder.setCookie(flowCookie);
+                    flowBuilder.setNode(nodeRef);
+                    flowBuilder.setFlowTable(flowTableRef);
+                    flowBuilder.setFlowRef(flowRef);
+                    this.provider.getSalFlowService().addFlow(flowBuilder.build());
+                }
+            }
+        }
+    }
+
+    @Override
+    protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+                          final DataObject original, DataObject update) {
+        // NOOP - Listener is registered for DataChangeScope.BASE only
+    }
+
+    @Override
+    protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+                                            final DataObject dataObj, final DataObject update) {
+        return (dataObj instanceof FlowCapableNode);
+    }
+
+    private Optional<FlowCapableNode> readFlowCapableNode(final InstanceIdentifier<FlowCapableNode> flowNodeIdent) {
+        ReadOnlyTransaction readTrans = this.provider.getDataService().newReadOnlyTransaction();
+        try {
+            ListenableFuture<Optional<FlowCapableNode>> confFlowNode =
+                    readTrans.read(LogicalDatastoreType.CONFIGURATION, flowNodeIdent);
+            if (confFlowNode.get().isPresent()) {
+                return Optional.<FlowCapableNode> of(confFlowNode.get().get());
+            } else {
+                return Optional.absent();
+            }
+        }
+        catch (InterruptedException | ExecutionException e) {
+            LOG.error("Unexpected exception by reading flow ".concat(flowNodeIdent.toString()), e);
+            return Optional.absent();
+        }
+        finally {
+            readTrans.close();
+        }
+    }
+}
diff --git a/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilProvider.java b/opendaylight/md-sal/forwardingrules-manager/src/main/java/org/opendaylight/controller/frm/reconil/FlowNodeReconcilProvider.java
new file mode 100644 (file)
index 0000000..ad970d6
--- /dev/null
@@ -0,0 +1,115 @@
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm.reconil;
+
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
+import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * FlowNode Reconciliation Provider registers the FlowNodeReconilListener
+ * and it holds all needed services for FlowNodeReconcilListener.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public class FlowNodeReconcilProvider implements AutoCloseable {
+
+    private static final Logger LOG = LoggerFactory.getLogger(FlowNodeReconcilProvider.class);
+
+    private SalFlowService salFlowService;
+    private SalMeterService salMeterService;
+    private SalGroupService salGroupService;
+    private DataBroker dataService;
+
+    /* DataChangeListener */
+    private DataChangeListener flowNodeReconcilListener;
+    private ListenerRegistration<DataChangeListener> flowNodeReconcilListenerRegistration;
+
+    public void init (final DataBroker dataService) {
+        LOG.info("FRM Flow Node Config Reconcil Provider initialization.");
+
+        this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+    }
+
+    public void start( final RpcConsumerRegistry rpcRegistry ) {
+        Preconditions.checkArgument(rpcRegistry != null, "RpcConcumerRegistry can not be null !");
+
+        this.salFlowService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalFlowService.class),
+                "RPC SalFlowService not found.");
+        this.salMeterService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalMeterService.class),
+                "RPC SalMeterService not found.");
+        this.salGroupService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalGroupService.class),
+                "RPC SalGroupService not found.");
+
+        /* Build Path */
+        InstanceIdentifier<FlowCapableNode> flowCapableNodeIdent =
+                InstanceIdentifier.create(Nodes.class).child(Node.class).augmentation(FlowCapableNode.class);
+
+        /* ReconcilNotificationListener registration */
+        this.flowNodeReconcilListener = new FlowNodeReconcilListener(FlowNodeReconcilProvider.this);
+        this.flowNodeReconcilListenerRegistration = this.dataService.registerDataChangeListener(
+                LogicalDatastoreType.OPERATIONAL, flowCapableNodeIdent, flowNodeReconcilListener, DataChangeScope.BASE);
+        LOG.info("FRM Flow Node Config Reconcil Provider started.");
+    }
+
+    @Override
+    public void close() {
+        LOG.info("FRM Flow Node Config Reconcil Provider stopped.");
+        if (flowNodeReconcilListenerRegistration != null) {
+            try {
+                flowNodeReconcilListenerRegistration.close();
+            } catch (Exception e) {
+                String errMsg = "Error by stop FRM Flow Node Config Reconcil Provider.";
+                LOG.error(errMsg, e);
+                throw new IllegalStateException(errMsg, e);
+            } finally {
+                flowNodeReconcilListenerRegistration = null;
+            }
+        }
+    }
+
+    public DataChangeListener getFlowNodeReconcilListener() {
+        return flowNodeReconcilListener;
+    }
+
+    public DataBroker getDataService() {
+        return dataService;
+    }
+
+    public SalFlowService getSalFlowService() {
+        return salFlowService;
+    }
+
+    public SalMeterService getSalMeterService() {
+        return salMeterService;
+    }
+
+    public SalGroupService getSalGroupService() {
+        return salGroupService;
+    }
+}
index 65362a1..605cb90 100644 (file)
@@ -229,4 +229,16 @@ module flow-node-inventory {
         uses flow-node-connector;
     }
 
+    augment "/inv:nodes/inv:node/table" {
+        ext:augment-identifier "flow-cookie-mapping";
+        list flow-cookie-map {
+            key "cookie";
+            leaf cookie {
+                type flow:flow-cookie;
+            }
+            leaf-list flow-ids {
+                type flow-id;
+            }
+        }
+    }
 }
index b62e452..bd78c58 100644 (file)
@@ -39,14 +39,20 @@ public abstract class AbstractBrokerAwareActivator implements BundleActivator {
 
         @Override
         public void modifiedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
-            // TODO Auto-generated method stub
-
+            removedService(reference, service);
+            addingService(reference);
         }
 
         @Override
         public void removedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
-            // TODO Auto-generated method stub
+            broker = context.getService(reference);
+            mdActivationPool.execute(new Runnable() {
 
+                @Override
+                public void run() {
+                    onBrokerRemoved(broker, context);
+                }
+            });
         }
 
     };
@@ -117,6 +123,6 @@ public abstract class AbstractBrokerAwareActivator implements BundleActivator {
     protected abstract void onBrokerAvailable(BindingAwareBroker broker, BundleContext context);
 
     protected void onBrokerRemoved(BindingAwareBroker broker, BundleContext context) {
-
+        stopImpl(context);
     }
 }
index 6db4d3a..4419d19 100644 (file)
       <groupId>org.opendaylight.yangtools</groupId>
       <artifactId>yang-parser-impl</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.opendaylight.controller</groupId>
+      <artifactId>netconf-util</artifactId>
+    </dependency>
 
     <dependency>
       <groupId>xmlunit</groupId>
           <artifactId>jsr305</artifactId>
           <version>2.0.1</version>
       </dependency>
+
+      <dependency>
+          <groupId>com.codahale.metrics</groupId>
+          <artifactId>metrics-core</artifactId>
+          <version>3.0.1</version>
+      </dependency>
   </dependencies>
 
 </project>
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/actor/MeteredBoundedMailbox.java
new file mode 100644 (file)
index 0000000..6464315
--- /dev/null
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorPath;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.dispatch.BoundedMailbox;
+import akka.dispatch.MailboxType;
+import akka.dispatch.MessageQueue;
+import akka.dispatch.ProducesMessageQueue;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Preconditions;
+import com.typesafe.config.Config;
+import org.opendaylight.controller.common.reporting.MetricsReporter;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+
+public class MeteredBoundedMailbox implements MailboxType, ProducesMessageQueue<BoundedMailbox.MessageQueue> {
+
+    private MeteredMessageQueue queue;
+    private Integer capacity;
+    private FiniteDuration pushTimeOut;
+    private ActorPath actorPath;
+    private MetricsReporter reporter;
+
+    private final String QUEUE_SIZE = "queue-size";
+    private final Long DEFAULT_TIMEOUT = 10L;
+
+    public MeteredBoundedMailbox(ActorSystem.Settings settings, Config config) {
+        Preconditions.checkArgument( config.hasPath("mailbox-capacity"), "Missing configuration [mailbox-capacity]" );
+        this.capacity = config.getInt("mailbox-capacity");
+        Preconditions.checkArgument( this.capacity > 0, "mailbox-capacity must be > 0");
+
+        Long timeout = -1L;
+        if ( config.hasPath("mailbox-push-timeout-time") ){
+            timeout = config.getDuration("mailbox-push-timeout-time", TimeUnit.NANOSECONDS);
+        } else {
+            timeout = DEFAULT_TIMEOUT;
+        }
+        Preconditions.checkArgument( timeout > 0, "mailbox-push-timeout-time must be > 0");
+        this.pushTimeOut = new FiniteDuration(timeout, TimeUnit.NANOSECONDS);
+
+        reporter = MetricsReporter.getInstance();
+    }
+
+
+    @Override
+    public MessageQueue create(final scala.Option<ActorRef> owner, scala.Option<ActorSystem> system) {
+        this.queue = new MeteredMessageQueue(this.capacity, this.pushTimeOut);
+        monitorQueueSize(owner, this.queue);
+        return this.queue;
+    }
+
+    private void monitorQueueSize(scala.Option<ActorRef> owner, final MeteredMessageQueue monitoredQueue) {
+        if (owner.isEmpty()) {
+            return; //there's no actor to monitor
+        }
+        actorPath = owner.get().path();
+        MetricRegistry registry = reporter.getMetricsRegistry();
+
+        String actorName = registry.name(actorPath.toString(), QUEUE_SIZE);
+
+        if (registry.getMetrics().containsKey(actorName))
+            return; //already registered
+
+        reporter.getMetricsRegistry().register(actorName,
+                new Gauge<Integer>() {
+                    @Override
+                    public Integer getValue() {
+                        return monitoredQueue.size();
+                    }
+                });
+    }
+
+
+    public static class MeteredMessageQueue extends BoundedMailbox.MessageQueue {
+
+        public MeteredMessageQueue(int capacity, FiniteDuration pushTimeOut) {
+            super(capacity, pushTimeOut);
+        }
+    }
+
+}
+
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/common/reporting/MetricsReporter.java
new file mode 100644 (file)
index 0000000..5c3e11f
--- /dev/null
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.reporting;
+
+import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.MetricRegistry;
+
+/**
+ * Maintains metrics registry that is provided to reporters.
+ * At the moment only one reporter exists {@code JmxReporter}.
+ * More reporters can be added.
+ * <p/>
+ * The consumers of this class will only be interested in {@code MetricsRegistry}
+ * where metrics for that consumer gets stored.
+ */
+public class MetricsReporter implements AutoCloseable{
+
+    private final MetricRegistry METRICS_REGISTRY = new MetricRegistry();
+    private final String DOMAIN = "org.opendaylight.controller";
+
+    public final JmxReporter jmxReporter = JmxReporter.forRegistry(METRICS_REGISTRY).inDomain(DOMAIN).build();
+
+    private static MetricsReporter inst = new MetricsReporter();
+
+    private MetricsReporter(){
+        jmxReporter.start();
+    }
+
+    public static MetricsReporter getInstance(){
+        return inst;
+    }
+
+    public MetricRegistry getMetricsRegistry(){
+        return METRICS_REGISTRY;
+    }
+
+    @Override
+    public void close() throws Exception {
+        jmxReporter.close();
+    }
+}
@@ -5,7 +5,7 @@
  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
  * and is available at http://www.eclipse.org/legal/epl-v10.html
  */
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
 
 import com.google.common.base.Optional;
 import org.opendaylight.controller.netconf.util.xml.XmlUtil;
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/common/actor/MeteredBoundedMailboxTest.java
new file mode 100644 (file)
index 0000000..1e60803
--- /dev/null
@@ -0,0 +1,96 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.DeadLetter;
+import akka.actor.Props;
+import akka.actor.UntypedActor;
+import akka.japi.Creator;
+import akka.testkit.JavaTestKit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class MeteredBoundedMailboxTest {
+
+    private static ActorSystem actorSystem;
+    private final ReentrantLock lock = new ReentrantLock();
+
+    @Before
+    public void setUp() throws Exception {
+        actorSystem = ActorSystem.create("testsystem");
+    }
+
+    @After
+    public void tearDown() throws Exception {
+       if (actorSystem != null)
+           actorSystem.shutdown();
+    }
+
+    @Test
+    public void test_WhenQueueIsFull_ShouldSendMsgToDeadLetter() throws InterruptedException {
+        final JavaTestKit mockReceiver = new JavaTestKit(actorSystem);
+        actorSystem.eventStream().subscribe(mockReceiver.getRef(), DeadLetter.class);
+
+
+        final FiniteDuration TEN_SEC = new FiniteDuration(10, TimeUnit.SECONDS);
+        String boundedMailBox = actorSystem.name() + ".bounded-mailbox";
+        ActorRef pingPongActor = actorSystem.actorOf(PingPongActor.props(lock).withMailbox(boundedMailBox),
+                                                     "pingpongactor");
+
+        actorSystem.mailboxes().settings();
+        lock.lock();
+        //queue capacity = 10
+        //need to send 12 messages; 1 message is dequeued and actor waits on lock,
+        //2nd to 11th messages are put on the queue
+        //12th message is sent to dead letter.
+        for (int i=0;i<12;i++){
+            pingPongActor.tell("ping", mockReceiver.getRef());
+        }
+
+        mockReceiver.expectMsgClass(TEN_SEC, DeadLetter.class);
+
+        lock.unlock();
+
+        Object[] eleven = mockReceiver.receiveN(11, TEN_SEC);
+    }
+
+    /**
+     * For testing
+     */
+    public static class PingPongActor extends UntypedActor{
+
+        ReentrantLock lock;
+
+        private PingPongActor(ReentrantLock lock){
+            this.lock = lock;
+        }
+
+        public static Props props(final ReentrantLock lock){
+            return Props.create(new Creator<PingPongActor>(){
+                @Override
+                public PingPongActor create() throws Exception {
+                    return new PingPongActor(lock);
+                }
+            });
+        }
+
+        @Override
+        public void onReceive(Object message) throws Exception {
+            lock.lock();
+            if ("ping".equals(message))
+                getSender().tell("pong", getSelf());
+        }
+    }
+}
\ No newline at end of file
index e69de29..0392dec 100644 (file)
@@ -0,0 +1,8 @@
+testsystem {
+
+  bounded-mailbox {
+    mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+    mailbox-capacity = 10
+    mailbox-push-timeout-time = 100ms
+  }
+}
\ No newline at end of file
diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf b/opendaylight/md-sal/sal-clustering-commons/src/test/resources/reference.conf
new file mode 100644 (file)
index 0000000..3481bae
--- /dev/null
@@ -0,0 +1,8 @@
+testsystem {
+
+  bounded-mailbox {
+    mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+    mailbox-capacity = 1000
+    mailbox-push-timeout-time = 10ms
+  }
+}
\ No newline at end of file
index 648e8d2..9c5129d 100644 (file)
         <version>1.1-SNAPSHOT</version>
     </dependency>
 
+      <dependency>
+          <groupId>com.codahale.metrics</groupId>
+          <artifactId>metrics-core</artifactId>
+          <version>3.0.1</version>
+      </dependency>
     <!-- Test Dependencies -->
     <dependency>
       <groupId>junit</groupId>
             <Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
             <Export-package></Export-package>
             <Private-Package></Private-Package>
-            <Import-Package>!*snappy;!org.jboss.*;*</Import-Package>
+            <Import-Package>!*snappy;!org.jboss.*;!com.jcraft.*;*</Import-Package>
             <Embed-Dependency>
                 sal-clustering-commons;
                 sal-akka-raft;
+                *metrics*;
                 !sal*;
                 !*config-api*;
                 !*testkit*;
index ac01f42..b258c44 100644 (file)
@@ -18,7 +18,7 @@ public abstract class AbstractUntypedActor extends UntypedActor {
         Logging.getLogger(getContext().system(), this);
 
 
-    public AbstractUntypedActor(){
+    public AbstractUntypedActor() {
         LOG.debug("Actor created {}", getSelf());
         getContext().
             system().
@@ -29,16 +29,18 @@ public abstract class AbstractUntypedActor extends UntypedActor {
     @Override public void onReceive(Object message) throws Exception {
         LOG.debug("Received message {}", message.getClass().getSimpleName());
         handleReceive(message);
-        LOG.debug("Done handling message {}", message.getClass().getSimpleName());
+        LOG.debug("Done handling message {}",
+            message.getClass().getSimpleName());
     }
 
     protected abstract void handleReceive(Object message) throws Exception;
 
-    protected void ignoreMessage(Object message){
+    protected void ignoreMessage(Object message) {
         LOG.debug("Unhandled message {} ", message);
     }
 
-    protected void unknownMessage(Object message) throws Exception{
+    protected void unknownMessage(Object message) throws Exception {
+        LOG.debug("Received unhandled message {}", message);
         unhandled(message);
     }
 }
index 40e045f..404a4e0 100644 (file)
@@ -10,9 +10,8 @@ package org.opendaylight.controller.cluster.datastore;
 
 import akka.actor.ActorRef;
 import akka.actor.ActorSystem;
+
 import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
 import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
 import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
 import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
@@ -21,14 +20,13 @@ import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategy
 import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
 import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
 import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
 import org.opendaylight.controller.sal.core.spi.data.DOMStore;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.util.PropertyUtils;
-import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
@@ -41,38 +39,14 @@ import org.slf4j.LoggerFactory;
  */
 public class DistributedDataStore implements DOMStore, SchemaContextListener, AutoCloseable {
 
-    private static final Logger
-        LOG = LoggerFactory.getLogger(DistributedDataStore.class);
-
-    private static final String EXECUTOR_MAX_POOL_SIZE_PROP =
-            "mdsal.dist-datastore-executor-pool.size";
-    private static final int DEFAULT_EXECUTOR_MAX_POOL_SIZE = 10;
-
-    private static final String EXECUTOR_MAX_QUEUE_SIZE_PROP =
-            "mdsal.dist-datastore-executor-queue.size";
-    private static final int DEFAULT_EXECUTOR_MAX_QUEUE_SIZE = 5000;
+    private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
 
     private final ActorContext actorContext;
 
     private SchemaContext schemaContext;
 
-    /**
-     * Executor used to run FutureTask's
-     *
-     * This is typically used when we need to make a request to an actor and
-     * wait for it's response and the consumer needs to be provided a Future.
-     */
-    private final ListeningExecutorService executor =
-            MoreExecutors.listeningDecorator(
-                    SpecialExecutors.newBlockingBoundedFastThreadPool(
-                            PropertyUtils.getIntSystemProperty(
-                                    EXECUTOR_MAX_POOL_SIZE_PROP,
-                                    DEFAULT_EXECUTOR_MAX_POOL_SIZE),
-                            PropertyUtils.getIntSystemProperty(
-                                    EXECUTOR_MAX_QUEUE_SIZE_PROP,
-                                    DEFAULT_EXECUTOR_MAX_QUEUE_SIZE), "DistDataStore"));
-
-    public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster, Configuration configuration) {
+    public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster,
+            Configuration configuration, InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
         Preconditions.checkNotNull(actorSystem, "actorSystem should not be null");
         Preconditions.checkNotNull(type, "type should not be null");
         Preconditions.checkNotNull(cluster, "cluster should not be null");
@@ -84,7 +58,7 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
         LOG.info("Creating ShardManager : {}", shardManagerId);
 
         this.actorContext = new ActorContext(actorSystem, actorSystem
-            .actorOf(ShardManager.props(type, cluster, configuration),
+            .actorOf(ShardManager.props(type, cluster, configuration, dataStoreProperties),
                 shardManagerId ), cluster, configuration);
     }
 
@@ -93,15 +67,16 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
     }
 
 
+    @SuppressWarnings("unchecked")
     @Override
-    public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
+    public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>>
+                                              ListenerRegistration<L> registerChangeListener(
         YangInstanceIdentifier path, L listener,
         AsyncDataBroker.DataChangeScope scope) {
 
         Preconditions.checkNotNull(path, "path should not be null");
         Preconditions.checkNotNull(listener, "listener should not be null");
 
-
         LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
 
         ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
@@ -110,10 +85,8 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
         String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
 
         Object result = actorContext.executeLocalShardOperation(shardName,
-            new RegisterChangeListener(path, dataChangeListenerActor.path(),
-                scope),
-            ActorContext.ASK_DURATION
-        );
+            new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+            ActorContext.ASK_DURATION);
 
         if (result != null) {
             RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
@@ -125,34 +98,31 @@ public class DistributedDataStore implements DOMStore, SchemaContextListener, Au
         LOG.debug(
             "No local shard for shardName {} was found so returning a noop registration",
             shardName);
+
         return new NoOpDataChangeListenerRegistration(listener);
     }
 
-
-
-
-
     @Override
     public DOMStoreTransactionChain createTransactionChain() {
-        return new TransactionChainProxy(actorContext, executor, schemaContext);
+        return new TransactionChainProxy(actorContext, schemaContext);
     }
 
     @Override
     public DOMStoreReadTransaction newReadOnlyTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override
     public DOMStoreWriteTransaction newWriteOnlyTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.WRITE_ONLY,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override
     public DOMStoreReadWriteTransaction newReadWriteTransaction() {
         return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_WRITE,
-            executor, schemaContext);
+            schemaContext);
     }
 
     @Override public void onGlobalContextUpdated(SchemaContext schemaContext) {
index 6d87271..a1a3e87 100644 (file)
@@ -9,19 +9,22 @@
 package org.opendaylight.controller.cluster.datastore;
 
 import akka.actor.ActorSystem;
+
 import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
 import org.opendaylight.controller.sal.core.api.model.SchemaService;
 
 public class DistributedDataStoreFactory {
-    public static DistributedDataStore createInstance(String name, SchemaService schemaService){
+    public static DistributedDataStore createInstance(String name, SchemaService schemaService,
+            InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
+
         ActorSystem actorSystem = ActorSystemFactory.getInstance();
         Configuration config = new ConfigurationImpl("module-shards.conf", "modules.conf");
         final DistributedDataStore dataStore =
-            new DistributedDataStore(actorSystem, name, new ClusterWrapperImpl(actorSystem),config );
-       ShardStrategyFactory.setConfiguration(config);
-        schemaService
-            .registerSchemaContextListener(dataStore);
+            new DistributedDataStore(actorSystem, name, new ClusterWrapperImpl(actorSystem),
+                    config, dataStoreProperties );
+        ShardStrategyFactory.setConfiguration(config);
+        schemaService.registerSchemaContextListener(dataStore);
         return dataStore;
-
     }
 }
index 63b2633..75f540a 100644 (file)
@@ -17,6 +17,8 @@ import akka.japi.Creator;
 import akka.serialization.Serialization;
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
 import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
 import org.opendaylight.controller.cluster.datastore.identifiers.ShardTransactionIdentifier;
@@ -41,6 +43,7 @@ import org.opendaylight.controller.cluster.raft.RaftActor;
 import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry;
 import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
 import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
 import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
@@ -92,7 +95,8 @@ public class Shard extends RaftActor {
 
     private final List<ActorSelection> dataChangeListeners = new ArrayList<>();
 
-    private Shard(ShardIdentifier name, Map<ShardIdentifier, String> peerAddresses) {
+    private Shard(ShardIdentifier name, Map<ShardIdentifier, String> peerAddresses,
+            InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
         super(name.toString(), mapPeerAddresses(peerAddresses), Optional.of(configParams));
 
         this.name = name;
@@ -103,16 +107,18 @@ public class Shard extends RaftActor {
 
         LOG.info("Shard created : {} persistent : {}", name, persistent);
 
-        store = InMemoryDOMDataStoreFactory.create(name.toString(), null);
+        store = InMemoryDOMDataStoreFactory.create(name.toString(), null, dataStoreProperties);
 
         shardMBean = ShardMBeanFactory.getShardStatsMBean(name.toString());
 
     }
 
-    private static Map<String, String> mapPeerAddresses(Map<ShardIdentifier, String> peerAddresses){
-        Map<String , String> map = new HashMap<>();
+    private static Map<String, String> mapPeerAddresses(
+        Map<ShardIdentifier, String> peerAddresses) {
+        Map<String, String> map = new HashMap<>();
 
-        for(Map.Entry<ShardIdentifier, String> entry : peerAddresses.entrySet()){
+        for (Map.Entry<ShardIdentifier, String> entry : peerAddresses
+            .entrySet()) {
             map.put(entry.getKey().toString(), entry.getValue());
         }
 
@@ -123,15 +129,17 @@ public class Shard extends RaftActor {
 
 
     public static Props props(final ShardIdentifier name,
-        final Map<ShardIdentifier, String> peerAddresses) {
+        final Map<ShardIdentifier, String> peerAddresses,
+        final InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
         Preconditions.checkNotNull(name, "name should not be null");
-        Preconditions.checkNotNull(peerAddresses, "peerAddresses should not be null");
+        Preconditions
+            .checkNotNull(peerAddresses, "peerAddresses should not be null");
 
         return Props.create(new Creator<Shard>() {
 
             @Override
             public Shard create() throws Exception {
-                return new Shard(name, peerAddresses);
+                return new Shard(name, peerAddresses, dataStoreProperties);
             }
 
         });
@@ -164,14 +172,16 @@ public class Shard extends RaftActor {
             }
         } else if (message instanceof PeerAddressResolved) {
             PeerAddressResolved resolved = (PeerAddressResolved) message;
-            setPeerAddress(resolved.getPeerId().toString(), resolved.getPeerAddress());
+            setPeerAddress(resolved.getPeerId().toString(),
+                resolved.getPeerAddress());
         } else {
             super.onReceiveCommand(message);
         }
     }
 
     private ActorRef createTypedTransactionActor(
-        CreateTransaction createTransaction, ShardTransactionIdentifier transactionId) {
+        CreateTransaction createTransaction,
+        ShardTransactionIdentifier transactionId) {
         if (createTransaction.getTransactionType()
             == TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
 
@@ -203,24 +213,26 @@ public class Shard extends RaftActor {
                     .props(store.newWriteOnlyTransaction(), getSelf(),
                         schemaContext), transactionId.toString());
         } else {
-            // FIXME: This does not seem right
             throw new IllegalArgumentException(
-                "CreateTransaction message has unidentified transaction type="
+                "Shard="+name + ":CreateTransaction message has unidentified transaction type="
                     + createTransaction.getTransactionType());
         }
     }
 
     private void createTransaction(CreateTransaction createTransaction) {
 
-        ShardTransactionIdentifier transactionId = ShardTransactionIdentifier.builder().remoteTransactionId(createTransaction.getTransactionId()).build();
+        ShardTransactionIdentifier transactionId =
+            ShardTransactionIdentifier.builder()
+                .remoteTransactionId(createTransaction.getTransactionId())
+                .build();
         LOG.debug("Creating transaction : {} ", transactionId);
         ActorRef transactionActor =
             createTypedTransactionActor(createTransaction, transactionId);
 
         getSender()
             .tell(new CreateTransactionReply(
-                Serialization.serializedActorPath(transactionActor),
-                createTransaction.getTransactionId()).toSerializable(),
+                    Serialization.serializedActorPath(transactionActor),
+                    createTransaction.getTransactionId()).toSerializable(),
                 getSelf());
     }
 
@@ -255,22 +267,21 @@ public class Shard extends RaftActor {
 
         final ListenableFuture<Void> future = cohort.commit();
         final ActorRef self = getSelf();
-        future.addListener(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    future.get();
-                        sender
-                            .tell(new CommitTransactionReply().toSerializable(),
-                                self);
-                        shardMBean.incrementCommittedTransactionCount();
-                        shardMBean.setLastCommittedTransactionTime(new Date());
-                } catch (InterruptedException | ExecutionException e) {
-                    shardMBean.incrementFailedTransactionsCount();
-                    sender.tell(new akka.actor.Status.Failure(e),self);
-                }
+
+        Futures.addCallback(future, new FutureCallback<Void>() {
+            public void onSuccess(Void v) {
+               sender.tell(new CommitTransactionReply().toSerializable(),self);
+               shardMBean.incrementCommittedTransactionCount();
+               shardMBean.setLastCommittedTransactionTime(new Date());
             }
-        }, getContext().dispatcher());
+
+            public void onFailure(Throwable t) {
+                LOG.error(t, "An exception happened during commit");
+                shardMBean.incrementFailedTransactionsCount();
+                sender.tell(new akka.actor.Status.Failure(t), self);
+            }
+        });
+
     }
 
     private void handleForwardedCommit(ForwardedCommitTransaction message) {
@@ -329,7 +340,7 @@ public class Shard extends RaftActor {
 
         LOG.debug(
             "registerDataChangeListener sending reply, listenerRegistrationPath = {} "
-                , listenerRegistration.path().toString());
+            , listenerRegistration.path().toString());
 
         getSender()
             .tell(new RegisterChangeListenerReply(listenerRegistration.path()),
@@ -370,7 +381,7 @@ public class Shard extends RaftActor {
         // Update stats
         ReplicatedLogEntry lastLogEntry = getLastLogEntry();
 
-        if(lastLogEntry != null){
+        if (lastLogEntry != null) {
             shardMBean.setLastLogIndex(lastLogEntry.getIndex());
             shardMBean.setLastLogTerm(lastLogEntry.getTerm());
         }
index 6162a03..2972772 100644 (file)
@@ -30,6 +30,8 @@ import org.opendaylight.controller.cluster.datastore.messages.PeerAddressResolve
 import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
 import org.opendaylight.controller.cluster.datastore.messages.PrimaryNotFound;
 import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
+
 import scala.concurrent.duration.Duration;
 
 import java.util.ArrayList;
@@ -68,15 +70,19 @@ public class ShardManager extends AbstractUntypedActor {
 
     private ShardManagerInfoMBean mBean;
 
+    private final InMemoryDOMDataStoreConfigProperties dataStoreProperties;
+
     /**
      * @param type defines the kind of data that goes into shards created by this shard manager. Examples of type would be
      *             configuration or operational
      */
-    private ShardManager(String type, ClusterWrapper cluster, Configuration configuration) {
+    private ShardManager(String type, ClusterWrapper cluster, Configuration configuration,
+            InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
 
         this.type = Preconditions.checkNotNull(type, "type should not be null");
         this.cluster = Preconditions.checkNotNull(cluster, "cluster should not be null");
         this.configuration = Preconditions.checkNotNull(configuration, "configuration should not be null");
+        this.dataStoreProperties = dataStoreProperties;
 
         // Subscribe this actor to cluster member events
         cluster.subscribeToMemberEvents(getSelf());
@@ -88,7 +94,8 @@ public class ShardManager extends AbstractUntypedActor {
 
     public static Props props(final String type,
         final ClusterWrapper cluster,
-        final Configuration configuration) {
+        final Configuration configuration,
+        final InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
 
         Preconditions.checkNotNull(type, "type should not be null");
         Preconditions.checkNotNull(cluster, "cluster should not be null");
@@ -98,7 +105,7 @@ public class ShardManager extends AbstractUntypedActor {
 
             @Override
             public ShardManager create() throws Exception {
-                return new ShardManager(type, cluster, configuration);
+                return new ShardManager(type, cluster, configuration, dataStoreProperties);
             }
         });
     }
@@ -243,7 +250,7 @@ public class ShardManager extends AbstractUntypedActor {
             ShardIdentifier shardId = getShardIdentifier(memberName, shardName);
             Map<ShardIdentifier, String> peerAddresses = getPeerAddresses(shardName);
             ActorRef actor = getContext()
-                .actorOf(Shard.props(shardId, peerAddresses),
+                .actorOf(Shard.props(shardId, peerAddresses, dataStoreProperties),
                     shardId.toString());
             localShardActorNames.add(shardId.toString());
             localShards.put(shardName, new ShardInformation(shardName, actor, peerAddresses));
@@ -283,10 +290,17 @@ public class ShardManager extends AbstractUntypedActor {
 
     @Override
     public SupervisorStrategy supervisorStrategy() {
+
         return new OneForOneStrategy(10, Duration.create("1 minute"),
             new Function<Throwable, SupervisorStrategy.Directive>() {
                 @Override
                 public SupervisorStrategy.Directive apply(Throwable t) {
+                    StringBuilder sb = new StringBuilder();
+                    for(StackTraceElement element : t.getStackTrace()) {
+                       sb.append("\n\tat ")
+                         .append(element.toString());
+                    }
+                    LOG.warning("Supervisor Strategy of resume applied {}",sb.toString());
                     return SupervisorStrategy.resume();
                 }
             }
index 97bb196..49c7b7e 100644 (file)
@@ -53,7 +53,7 @@ public class ShardReadWriteTransaction extends ShardTransaction {
     } else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
       mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
     } else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
-      deleteData(transaction,DeleteData.fromSerizalizable(message));
+      deleteData(transaction,DeleteData.fromSerializable(message));
     } else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
       readyTransaction(transaction,new ReadyTransaction());
     } else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
index 91e578b..b01fe7d 100644 (file)
@@ -50,7 +50,7 @@ public class ShardWriteTransaction extends ShardTransaction {
     } else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
       mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
     } else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
-      deleteData(transaction,DeleteData.fromSerizalizable(message));
+      deleteData(transaction,DeleteData.fromSerializable(message));
     } else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
       readyTransaction(transaction,new ReadyTransaction());
     }else {
index 500b73c..34d3531 100644 (file)
@@ -14,6 +14,8 @@ import akka.actor.Props;
 import akka.event.Logging;
 import akka.event.LoggingAdapter;
 import akka.japi.Creator;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
@@ -26,8 +28,6 @@ import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransacti
 import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
 
-import java.util.concurrent.ExecutionException;
-
 public class ThreePhaseCommitCohort extends AbstractUntypedActor {
     private final DOMStoreThreePhaseCommitCohort cohort;
     private final ActorRef shardActor;
@@ -58,13 +58,17 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor {
 
     @Override
     public void handleReceive(Object message) throws Exception {
-        if (message.getClass().equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
+        if (message.getClass()
+            .equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
             canCommit(new CanCommitTransaction());
-        } else if (message.getClass().equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
+        } else if (message.getClass()
+            .equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
             preCommit(new PreCommitTransaction());
-        } else if (message.getClass().equals(CommitTransaction.SERIALIZABLE_CLASS)) {
+        } else if (message.getClass()
+            .equals(CommitTransaction.SERIALIZABLE_CLASS)) {
             commit(new CommitTransaction());
-        } else if (message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) {
+        } else if (message.getClass()
+            .equals(AbortTransaction.SERIALIZABLE_CLASS)) {
             abort(new AbortTransaction());
         } else {
             unknownMessage(message);
@@ -76,17 +80,19 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor {
         final ActorRef sender = getSender();
         final ActorRef self = getSelf();
 
-        future.addListener(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    future.get();
-                    sender.tell(new AbortTransactionReply().toSerializable(), self);
-                } catch (InterruptedException | ExecutionException e) {
-                    log.error(e, "An exception happened when aborting");
-                }
+        Futures.addCallback(future, new FutureCallback<Void>() {
+            public void onSuccess(Void v) {
+                sender
+                    .tell(new AbortTransactionReply().toSerializable(),
+                        self);
+            }
+
+            public void onFailure(Throwable t) {
+                LOG.error(t, "An exception happened during abort");
+                sender
+                    .tell(new akka.actor.Status.Failure(t), self);
             }
-        }, getContext().dispatcher());
+        });
     }
 
     private void commit(CommitTransaction message) {
@@ -103,18 +109,19 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor {
         final ListenableFuture<Void> future = cohort.preCommit();
         final ActorRef sender = getSender();
         final ActorRef self = getSelf();
+        Futures.addCallback(future, new FutureCallback<Void>() {
+            public void onSuccess(Void v) {
+                sender
+                    .tell(new PreCommitTransactionReply().toSerializable(),
+                        self);
+            }
 
-        future.addListener(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    future.get();
-                    sender.tell(new PreCommitTransactionReply().toSerializable(), self);
-                } catch (InterruptedException | ExecutionException e) {
-                    log.error(e, "An exception happened when preCommitting");
-                }
+            public void onFailure(Throwable t) {
+                LOG.error(t, "An exception happened during pre-commit");
+                sender
+                    .tell(new akka.actor.Status.Failure(t), self);
             }
-        }, getContext().dispatcher());
+        });
 
     }
 
@@ -122,18 +129,19 @@ public class ThreePhaseCommitCohort extends AbstractUntypedActor {
         final ListenableFuture<Boolean> future = cohort.canCommit();
         final ActorRef sender = getSender();
         final ActorRef self = getSelf();
+        Futures.addCallback(future, new FutureCallback<Boolean>() {
+            public void onSuccess(Boolean canCommit) {
+                sender.tell(new CanCommitTransactionReply(canCommit)
+                    .toSerializable(), self);
+            }
 
-        future.addListener(new Runnable() {
-            @Override
-            public void run() {
-                try {
-                    Boolean canCommit = future.get();
-                    sender.tell(new CanCommitTransactionReply(canCommit).toSerializable(), self);
-                } catch (InterruptedException | ExecutionException e) {
-                    log.error(e, "An exception happened when checking canCommit");
-                }
+            public void onFailure(Throwable t) {
+                LOG.error(t, "An exception happened during canCommit");
+                sender
+                    .tell(new akka.actor.Status.Failure(t), self);
             }
-        }, getContext().dispatcher());
+        });
+
 
     }
 }
index 5b44794..fc455b1 100644 (file)
@@ -10,11 +10,13 @@ package org.opendaylight.controller.cluster.datastore;
 
 import akka.actor.ActorPath;
 import akka.actor.ActorSelection;
+import akka.dispatch.Futures;
+import akka.dispatch.OnComplete;
 
+import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.SettableFuture;
 
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
 import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
@@ -28,124 +30,156 @@ import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCoh
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import scala.concurrent.Future;
+
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Callable;
 
 /**
  * ThreePhaseCommitCohortProxy represents a set of remote cohort proxies
  */
-public class ThreePhaseCommitCohortProxy implements
-    DOMStoreThreePhaseCommitCohort{
+public class ThreePhaseCommitCohortProxy implements DOMStoreThreePhaseCommitCohort{
 
-    private static final Logger
-        LOG = LoggerFactory.getLogger(DistributedDataStore.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
 
     private final ActorContext actorContext;
     private final List<ActorPath> cohortPaths;
-    private final ListeningExecutorService executor;
     private final String transactionId;
 
-
-    public ThreePhaseCommitCohortProxy(ActorContext actorContext,
-        List<ActorPath> cohortPaths,
-        String transactionId,
-        ListeningExecutorService executor) {
-
+    public ThreePhaseCommitCohortProxy(ActorContext actorContext, List<ActorPath> cohortPaths,
+            String transactionId) {
         this.actorContext = actorContext;
         this.cohortPaths = cohortPaths;
         this.transactionId = transactionId;
-        this.executor = executor;
     }
 
-    @Override public ListenableFuture<Boolean> canCommit() {
+    @Override
+    public ListenableFuture<Boolean> canCommit() {
         LOG.debug("txn {} canCommit", transactionId);
-        Callable<Boolean> call = new Callable<Boolean>() {
 
+        Future<Iterable<Object>> combinedFuture =
+                invokeCohorts(new CanCommitTransaction().toSerializable());
+
+        final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+        combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
             @Override
-            public Boolean call() throws Exception {
-                for(ActorPath actorPath : cohortPaths){
-
-                    Object message = new CanCommitTransaction().toSerializable();
-                    LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
-                    ActorSelection cohort = actorContext.actorSelection(actorPath);
-
-                    try {
-                        Object response =
-                                actorContext.executeRemoteOperation(cohort,
-                                        message,
-                                        ActorContext.ASK_DURATION);
-
-                        if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
-                            CanCommitTransactionReply reply =
-                                    CanCommitTransactionReply.fromSerializable(response);
-                            if (!reply.getCanCommit()) {
-                                return false;
-                            }
+            public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+                if(failure != null) {
+                    returnFuture.setException(failure);
+                    return;
+                }
+
+                boolean result = true;
+                for(Object response: responses) {
+                    if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
+                        CanCommitTransactionReply reply =
+                                CanCommitTransactionReply.fromSerializable(response);
+                        if (!reply.getCanCommit()) {
+                            result = false;
+                            break;
                         }
-                    } catch(RuntimeException e){
-                        // FIXME : Need to properly handle this
-                        LOG.error("Unexpected Exception", e);
-                        return false;
+                    } else {
+                        LOG.error("Unexpected response type {}", response.getClass());
+                        returnFuture.setException(new IllegalArgumentException(
+                                String.format("Unexpected response type {}", response.getClass())));
+                        return;
                     }
                 }
 
-                return true;
+                returnFuture.set(Boolean.valueOf(result));
             }
-        };
+        }, actorContext.getActorSystem().dispatcher());
+
+        return returnFuture;
+    }
+
+    private Future<Iterable<Object>> invokeCohorts(Object message) {
+        List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
+        for(ActorPath actorPath : cohortPaths) {
+
+            LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
 
-        return executor.submit(call);
+            ActorSelection cohort = actorContext.actorSelection(actorPath);
+
+            futureList.add(actorContext.executeRemoteOperationAsync(cohort, message,
+                    ActorContext.ASK_DURATION));
+        }
+
+        return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
     }
 
-    @Override public ListenableFuture<Void> preCommit() {
+    @Override
+    public ListenableFuture<Void> preCommit() {
         LOG.debug("txn {} preCommit", transactionId);
-        return voidOperation(new PreCommitTransaction().toSerializable(), PreCommitTransactionReply.SERIALIZABLE_CLASS);
+        return voidOperation(new PreCommitTransaction().toSerializable(),
+                PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
     }
 
-    @Override public ListenableFuture<Void> abort() {
+    @Override
+    public ListenableFuture<Void> abort() {
         LOG.debug("txn {} abort", transactionId);
-        return voidOperation(new AbortTransaction().toSerializable(), AbortTransactionReply.SERIALIZABLE_CLASS);
+
+        // Note - we pass false for propagateException. In the front-end data broker, this method
+        // is called when one of the 3 phases fails with an exception. We'd rather have that
+        // original exception propagated to the client. If our abort fails and we propagate the
+        // exception then that exception will supersede and suppress the original exception. But
+        // it's the original exception that is the root cause and of more interest to the client.
+
+        return voidOperation(new AbortTransaction().toSerializable(),
+                AbortTransactionReply.SERIALIZABLE_CLASS, false);
     }
 
-    @Override public ListenableFuture<Void> commit() {
+    @Override
+    public ListenableFuture<Void> commit() {
         LOG.debug("txn {} commit", transactionId);
-        return voidOperation(new CommitTransaction().toSerializable(), CommitTransactionReply.SERIALIZABLE_CLASS);
+        return voidOperation(new CommitTransaction().toSerializable(),
+                CommitTransactionReply.SERIALIZABLE_CLASS, true);
     }
 
-    private ListenableFuture<Void> voidOperation(final Object message, final Class expectedResponseClass){
-        Callable<Void> call = new Callable<Void>() {
-
-            @Override public Void call() throws Exception {
-                for(ActorPath actorPath : cohortPaths){
-                    ActorSelection cohort = actorContext.actorSelection(actorPath);
-
-                    LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
-                    try {
-                        Object response =
-                            actorContext.executeRemoteOperation(cohort,
-                                message,
-                                ActorContext.ASK_DURATION);
-
-                        if (response != null && !response.getClass()
-                            .equals(expectedResponseClass)) {
-                            throw new RuntimeException(
-                                String.format(
-                                    "did not get the expected response \n\t\t expected : %s \n\t\t actual   : %s",
-                                    expectedResponseClass.toString(),
-                                    response.getClass().toString())
-                            );
+    private ListenableFuture<Void> voidOperation(final Object message,
+            final Class<?> expectedResponseClass, final boolean propagateException) {
+
+        Future<Iterable<Object>> combinedFuture = invokeCohorts(message);
+
+        final SettableFuture<Void> returnFuture = SettableFuture.create();
+
+        combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
+            @Override
+            public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+
+                Throwable exceptionToPropagate = failure;
+                if(exceptionToPropagate == null) {
+                    for(Object response: responses) {
+                        if(!response.getClass().equals(expectedResponseClass)) {
+                            exceptionToPropagate = new IllegalArgumentException(
+                                    String.format("Unexpected response type {}",
+                                            response.getClass()));
+                            break;
                         }
-                    } catch(TimeoutException e){
-                        LOG.error(String.format("A timeout occurred when processing operation : %s", message));
                     }
                 }
-                return null;
+
+                if(exceptionToPropagate != null) {
+                    if(propagateException) {
+                        // We don't log the exception here to avoid redundant logging since we're
+                        // propagating to the caller in MD-SAL core who will log it.
+                        returnFuture.setException(exceptionToPropagate);
+                    } else {
+                        // Since the caller doesn't want us to propagate the exception we'll also
+                        // not log it normally. But it's usually not good to totally silence
+                        // exceptions so we'll log it to debug level.
+                        LOG.debug(String.format("%s failed",  message.getClass().getSimpleName()),
+                                exceptionToPropagate);
+                        returnFuture.set(null);
+                    }
+                } else {
+                    returnFuture.set(null);
+                }
             }
-        };
+        }, actorContext.getActorSystem().dispatcher());
 
-        return executor.submit(call);
+        return returnFuture;
     }
 
     public List<ActorPath> getCohortPaths() {
index 5e9defa..c4ec760 100644 (file)
@@ -15,39 +15,34 @@ import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
 import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 
-import com.google.common.util.concurrent.ListeningExecutorService;
-
 /**
  * TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
  */
 public class TransactionChainProxy implements DOMStoreTransactionChain{
     private final ActorContext actorContext;
-    private final ListeningExecutorService transactionExecutor;
     private final SchemaContext schemaContext;
 
-    public TransactionChainProxy(ActorContext actorContext, ListeningExecutorService transactionExecutor,
-            SchemaContext schemaContext) {
+    public TransactionChainProxy(ActorContext actorContext, SchemaContext schemaContext) {
         this.actorContext = actorContext;
-        this.transactionExecutor = transactionExecutor;
         this.schemaContext = schemaContext;
     }
 
     @Override
     public DOMStoreReadTransaction newReadOnlyTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.READ_ONLY, schemaContext);
     }
 
     @Override
     public DOMStoreReadWriteTransaction newReadWriteTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.WRITE_ONLY, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
     }
 
     @Override
     public DOMStoreWriteTransaction newWriteOnlyTransaction() {
         return new TransactionProxy(actorContext,
-            TransactionProxy.TransactionType.READ_WRITE, transactionExecutor, schemaContext);
+            TransactionProxy.TransactionType.READ_WRITE, schemaContext);
     }
 
     @Override
index 95862ae..5b5b129 100644 (file)
@@ -12,13 +12,14 @@ import akka.actor.ActorPath;
 import akka.actor.ActorRef;
 import akka.actor.ActorSelection;
 import akka.actor.Props;
+import akka.dispatch.OnComplete;
+
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.CheckedFuture;
 import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import com.google.common.util.concurrent.SettableFuture;
+
 import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
 import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
 import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
@@ -44,11 +45,12 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import scala.concurrent.Future;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Callable;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -80,25 +82,22 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     private final ActorContext actorContext;
     private final Map<String, TransactionContext> remoteTransactionPaths = new HashMap<>();
     private final TransactionIdentifier identifier;
-    private final ListeningExecutorService executor;
     private final SchemaContext schemaContext;
+    private boolean inReadyState;
 
-    public TransactionProxy(
-        ActorContext actorContext,
-        TransactionType transactionType,
-        ListeningExecutorService executor,
-        SchemaContext schemaContext
-    ) {
+    public TransactionProxy(ActorContext actorContext, TransactionType transactionType,
+            SchemaContext schemaContext) {
         this.actorContext = Preconditions.checkNotNull(actorContext, "actorContext should not be null");
         this.transactionType = Preconditions.checkNotNull(transactionType, "transactionType should not be null");
-        this.executor = Preconditions.checkNotNull(executor, "executor should not be null");
         this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null");
 
         String memberName = actorContext.getCurrentMemberName();
         if(memberName == null){
             memberName = "UNKNOWN-MEMBER";
         }
-        this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(counter.getAndIncrement()).build();
+
+        this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(
+                counter.getAndIncrement()).build();
 
         LOG.debug("Created txn {}", identifier);
 
@@ -108,6 +107,9 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read(
             final YangInstanceIdentifier path) {
 
+        Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+                "Read operation on write-only transaction is not allowed");
+
         LOG.debug("txn {} read {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -115,8 +117,12 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         return transactionContext(path).readData(path);
     }
 
-    @Override public CheckedFuture<Boolean, ReadFailedException> exists(
-        YangInstanceIdentifier path) {
+    @Override
+    public CheckedFuture<Boolean, ReadFailedException> exists(YangInstanceIdentifier path) {
+
+        Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+                "Exists operation on write-only transaction is not allowed");
+
         LOG.debug("txn {} exists {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -124,9 +130,18 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         return transactionContext(path).dataExists(path);
     }
 
+    private void checkModificationState() {
+        Preconditions.checkState(transactionType != TransactionType.READ_ONLY,
+                "Modification operation on read-only transaction is not allowed");
+        Preconditions.checkState(!inReadyState,
+                "Transaction is sealed - further modifications are allowed");
+    }
+
     @Override
     public void write(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
 
+        checkModificationState();
+
         LOG.debug("txn {} write {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -137,6 +152,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     @Override
     public void merge(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
 
+        checkModificationState();
+
         LOG.debug("txn {} merge {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -147,6 +164,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
     @Override
     public void delete(YangInstanceIdentifier path) {
 
+        checkModificationState();
+
         LOG.debug("txn {} delete {}", identifier, path);
 
         createTransactionIfMissing(actorContext, path);
@@ -156,25 +175,36 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
 
     @Override
     public DOMStoreThreePhaseCommitCohort ready() {
+
+        checkModificationState();
+
+        inReadyState = true;
+
         List<ActorPath> cohortPaths = new ArrayList<>();
 
-        LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier, remoteTransactionPaths.size());
+        LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier,
+                remoteTransactionPaths.size());
 
         for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
 
-            LOG.debug("txn {} Readying transaction for shard {}", identifier, transactionContext.getShardName());
+            LOG.debug("txn {} Readying transaction for shard {}", identifier,
+                    transactionContext.getShardName());
 
             Object result = transactionContext.readyTransaction();
 
             if(result.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)){
-                ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(actorContext.getActorSystem(),result);
-                String resolvedCohortPath = transactionContext
-                    .getResolvedCohortPath(reply.getCohortPath().toString());
+                ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
+                        actorContext.getActorSystem(),result);
+                String resolvedCohortPath = transactionContext.getResolvedCohortPath(
+                        reply.getCohortPath().toString());
                 cohortPaths.add(actorContext.actorFor(resolvedCohortPath));
+            } else {
+                LOG.error("Was expecting {} but got {}", ReadyTransactionReply.SERIALIZABLE_CLASS,
+                        result.getClass());
             }
         }
 
-        return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString(), executor);
+        return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString());
     }
 
     @Override
@@ -213,8 +243,7 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             Object response = actorContext.executeShardOperation(shardName,
                 new CreateTransaction(identifier.toString(),this.transactionType.ordinal() ).toSerializable(),
                 ActorContext.ASK_DURATION);
-            if (response.getClass()
-                .equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
+            if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
                 CreateTransactionReply reply =
                     CreateTransactionReply.fromSerializable(response);
 
@@ -229,11 +258,13 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
                         transactionActor);
 
                 remoteTransactionPaths.put(shardName, transactionContext);
+            } else {
+                LOG.error("Was expecting {} but got {}", CreateTransactionReply.SERIALIZABLE_CLASS,
+                        response.getClass());
             }
-        } catch(TimeoutException | PrimaryNotFoundException e){
+        } catch(Exception e){
             LOG.error("txn {} Creating NoOpTransaction because of : {}", identifier, e.getMessage());
-            remoteTransactionPaths.put(shardName,
-                new NoOpTransactionContext(shardName));
+            remoteTransactionPaths.put(shardName, new NoOpTransactionContext(shardName, e));
         }
     }
 
@@ -272,7 +303,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             this.actor = actor;
         }
 
-        @Override public String getShardName() {
+        @Override
+        public String getShardName() {
             return shardName;
         }
 
@@ -280,96 +312,105 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             return actor;
         }
 
-        @Override public String getResolvedCohortPath(String cohortPath) {
+        @Override
+        public String getResolvedCohortPath(String cohortPath) {
             return actorContext.resolvePath(actorPath, cohortPath);
         }
 
-        @Override public void closeTransaction() {
-            getActor().tell(
-                new CloseTransaction().toSerializable(), null);
+        @Override
+        public void closeTransaction() {
+            actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
         }
 
-        @Override public Object readyTransaction() {
+        @Override
+        public Object readyTransaction() {
             return actorContext.executeRemoteOperation(getActor(),
-                new ReadyTransaction().toSerializable(),
-                ActorContext.ASK_DURATION
-            );
-
+                    new ReadyTransaction().toSerializable(), ActorContext.ASK_DURATION);
         }
 
-        @Override public void deleteData(YangInstanceIdentifier path) {
-            getActor().tell(new DeleteData(path).toSerializable(), null);
+        @Override
+        public void deleteData(YangInstanceIdentifier path) {
+            actorContext.sendRemoteOperationAsync(getActor(), new DeleteData(path).toSerializable() );
         }
 
-        @Override public void mergeData(YangInstanceIdentifier path,
-            NormalizedNode<?, ?> data) {
-            getActor()
-                .tell(new MergeData(path, data, schemaContext).toSerializable(),
-                    null);
+        @Override
+        public void mergeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+            actorContext.sendRemoteOperationAsync(getActor(),
+                    new MergeData(path, data, schemaContext).toSerializable());
         }
 
         @Override
         public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
             final YangInstanceIdentifier path) {
 
-            Callable<Optional<NormalizedNode<?, ?>>> call =
-                new Callable<Optional<NormalizedNode<?, ?>>>() {
-
-                    @Override public Optional<NormalizedNode<?, ?>> call()
-                        throws Exception {
-                        Object response = actorContext
-                            .executeRemoteOperation(getActor(),
-                                new ReadData(path).toSerializable(),
-                                ActorContext.ASK_DURATION);
-                        if (response.getClass()
-                            .equals(ReadDataReply.SERIALIZABLE_CLASS)) {
-                            ReadDataReply reply = ReadDataReply
-                                .fromSerializable(schemaContext, path,
-                                    response);
+            final SettableFuture<Optional<NormalizedNode<?, ?>>> returnFuture = SettableFuture.create();
+
+            OnComplete<Object> onComplete = new OnComplete<Object>() {
+                @Override
+                public void onComplete(Throwable failure, Object response) throws Throwable {
+                    if(failure != null) {
+                        returnFuture.setException(new ReadFailedException(
+                                "Error reading data for path " + path, failure));
+                    } else {
+                        if (response.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
+                            ReadDataReply reply = ReadDataReply.fromSerializable(schemaContext,
+                                    path, response);
                             if (reply.getNormalizedNode() == null) {
-                                return Optional.absent();
+                                returnFuture.set(Optional.<NormalizedNode<?, ?>>absent());
+                            } else {
+                                returnFuture.set(Optional.<NormalizedNode<?, ?>>of(
+                                        reply.getNormalizedNode()));
                             }
-                            return Optional.<NormalizedNode<?, ?>>of(
-                                reply.getNormalizedNode());
+                        } else {
+                            returnFuture.setException(new ReadFailedException(
+                                    "Invalid response reading data for path " + path));
                         }
-
-                        throw new ReadFailedException("Read Failed " + path);
                     }
-                };
+                }
+            };
 
-            return MappingCheckedFuture
-                .create(executor.submit(call), ReadFailedException.MAPPER);
-        }
+            Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+                    new ReadData(path).toSerializable(), ActorContext.ASK_DURATION);
+            future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
 
-        @Override public void writeData(YangInstanceIdentifier path,
-            NormalizedNode<?, ?> data) {
-            getActor()
-                .tell(new WriteData(path, data, schemaContext).toSerializable(),
-                    null);
+            return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
         }
 
-        @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
-            final YangInstanceIdentifier path) {
-
-            Callable<Boolean> call = new Callable<Boolean>() {
-
-                @Override public Boolean call() throws Exception {
-                    Object o = actorContext.executeRemoteOperation(getActor(),
-                        new DataExists(path).toSerializable(),
-                        ActorContext.ASK_DURATION
-                    );
-
+        @Override
+        public void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+            actorContext.sendRemoteOperationAsync(getActor(),
+                    new WriteData(path, data, schemaContext).toSerializable());
+        }
 
-                    if (DataExistsReply.SERIALIZABLE_CLASS
-                        .equals(o.getClass())) {
-                        return DataExistsReply.fromSerializable(o).exists();
+        @Override
+        public CheckedFuture<Boolean, ReadFailedException> dataExists(
+                final YangInstanceIdentifier path) {
+
+            final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+            OnComplete<Object> onComplete = new OnComplete<Object>() {
+                @Override
+                public void onComplete(Throwable failure, Object response) throws Throwable {
+                    if(failure != null) {
+                        returnFuture.setException(new ReadFailedException(
+                                "Error checking exists for path " + path, failure));
+                    } else {
+                        if (response.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) {
+                            returnFuture.set(Boolean.valueOf(DataExistsReply.
+                                        fromSerializable(response).exists()));
+                        } else {
+                            returnFuture.setException(new ReadFailedException(
+                                    "Invalid response checking exists for path " + path));
+                        }
                     }
-
-                    throw new ReadFailedException("Exists Failed " + path);
                 }
             };
-            return MappingCheckedFuture
-                .create(executor.submit(call), ReadFailedException.MAPPER);
+
+            Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+                    new DataExists(path).toSerializable(), ActorContext.ASK_DURATION);
+            future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
+
+            return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
         }
     }
 
@@ -379,22 +420,28 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             LOG = LoggerFactory.getLogger(NoOpTransactionContext.class);
 
         private final String shardName;
+        private final Exception failure;
 
         private ActorRef cohort;
 
-        public NoOpTransactionContext(String shardName){
+        public NoOpTransactionContext(String shardName, Exception failure){
             this.shardName = shardName;
+            this.failure = failure;
         }
-        @Override public String getShardName() {
+
+        @Override
+        public String getShardName() {
             return  shardName;
 
         }
 
-        @Override public String getResolvedCohortPath(String cohortPath) {
+        @Override
+        public String getResolvedCohortPath(String cohortPath) {
             return cohort.path().toString();
         }
 
-        @Override public void closeTransaction() {
+        @Override
+        public void closeTransaction() {
             LOG.warn("txn {} closeTransaction called", identifier);
         }
 
@@ -404,11 +451,13 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
             return new ReadyTransactionReply(cohort.path()).toSerializable();
         }
 
-        @Override public void deleteData(YangInstanceIdentifier path) {
+        @Override
+        public void deleteData(YangInstanceIdentifier path) {
             LOG.warn("txt {} deleteData called path = {}", identifier, path);
         }
 
-        @Override public void mergeData(YangInstanceIdentifier path,
+        @Override
+        public void mergeData(YangInstanceIdentifier path,
             NormalizedNode<?, ?> data) {
             LOG.warn("txn {} mergeData called path = {}", identifier, path);
         }
@@ -417,8 +466,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
             YangInstanceIdentifier path) {
             LOG.warn("txn {} readData called path = {}", identifier, path);
-            return Futures.immediateCheckedFuture(
-                Optional.<NormalizedNode<?, ?>>absent());
+            return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+                    "Error reading data for path " + path, failure));
         }
 
         @Override public void writeData(YangInstanceIdentifier path,
@@ -429,10 +478,8 @@ public class TransactionProxy implements DOMStoreReadWriteTransaction {
         @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
             YangInstanceIdentifier path) {
             LOG.warn("txn {} dataExists called path = {}", identifier, path);
-
-            // Returning false instead of an exception to keep this aligned with
-            // read
-            return Futures.immediateCheckedFuture(false);
+            return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+                    "Error checking exists for path " + path, failure));
         }
     }
 
index 17861a5..9ae851e 100644 (file)
@@ -31,7 +31,7 @@ public class DeleteData implements SerializableMessage {
             .setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path)).build();
     }
 
-    public static DeleteData fromSerizalizable(Object serializable){
+    public static DeleteData fromSerializable(Object serializable){
         ShardTransactionMessages.DeleteData o = (ShardTransactionMessages.DeleteData) serializable;
         return new DeleteData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
     }
index 4706c66..e12a966 100644 (file)
@@ -14,6 +14,7 @@ import akka.actor.ActorSelection;
 import akka.actor.ActorSystem;
 import akka.actor.PoisonPill;
 import akka.util.Timeout;
+
 import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
 import org.opendaylight.controller.cluster.datastore.Configuration;
 import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
@@ -22,9 +23,9 @@ import org.opendaylight.controller.cluster.datastore.messages.FindLocalShard;
 import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
 import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
 import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import scala.concurrent.Await;
 import scala.concurrent.Future;
 import scala.concurrent.duration.Duration;
@@ -54,8 +55,6 @@ public class ActorContext {
     private final ClusterWrapper clusterWrapper;
     private final Configuration configuration;
 
-    private SchemaContext schemaContext = null;
-
     public ActorContext(ActorSystem actorSystem, ActorRef shardManager,
         ClusterWrapper clusterWrapper,
         Configuration configuration) {
@@ -174,6 +173,33 @@ public class ActorContext {
         }
     }
 
+    /**
+     * Execute an operation on a remote actor asynchronously.
+     *
+     * @param actor the ActorSelection
+     * @param message the message to send
+     * @param duration the maximum amount of time to send he message
+     * @return a Future containing the eventual result
+     */
+    public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message,
+            FiniteDuration duration) {
+
+        LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
+
+        return ask(actor, message, new Timeout(duration));
+    }
+
+    /**
+     * Sends an operation to be executed by a remote actor asynchronously without waiting for a
+     * reply (essentially set and forget).
+     *
+     * @param actor the ActorSelection
+     * @param message the message to send
+     */
+    public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
+        actor.tell(message, ActorRef.noSender());
+    }
+
     /**
      * Execute an operation on the primary for a given shard
      * <p>
index 87a621f..592bc49 100644 (file)
@@ -1,6 +1,7 @@
 package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
 
 import org.opendaylight.controller.cluster.datastore.DistributedDataStoreFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
 
 public class DistributedConfigDataStoreProviderModule extends
     org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedConfigDataStoreProviderModule {
@@ -25,8 +26,10 @@ public class DistributedConfigDataStoreProviderModule extends
 
     @Override
     public java.lang.AutoCloseable createInstance() {
-        return DistributedDataStoreFactory
-            .createInstance("config", getConfigSchemaServiceDependency());
+        return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(),
+                InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
+                        getMaxShardDataChangeExecutorQueueSize(),
+                        getMaxShardDataChangeListenerQueueSize()));
     }
 
 }
index 6af2748..9eb72d6 100644 (file)
@@ -1,6 +1,7 @@
 package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
 
 import org.opendaylight.controller.cluster.datastore.DistributedDataStoreFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
 
 public class DistributedOperationalDataStoreProviderModule extends
     org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedOperationalDataStoreProviderModule {
@@ -25,8 +26,11 @@ public class DistributedOperationalDataStoreProviderModule extends
 
     @Override
     public java.lang.AutoCloseable createInstance() {
-        return DistributedDataStoreFactory
-            .createInstance("operational", getOperationalSchemaServiceDependency());
+        return DistributedDataStoreFactory.createInstance("operational",
+                getOperationalSchemaServiceDependency(),
+                InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
+                        getMaxShardDataChangeExecutorQueueSize(),
+                        getMaxShardDataChangeListenerQueueSize()));
     }
 
 }
index 6f355cb..ecb823e 100644 (file)
@@ -41,28 +41,64 @@ module distributed-datastore-provider {
         case distributed-config-datastore-provider {
             when "/config:modules/config:module/config:type = 'distributed-config-datastore-provider'";
             container config-schema-service {
-                          uses config:service-ref {
-                               refine type {
-                                      mandatory false;
-                                      config:required-identity sal:schema-service;
-                                }
-                          }
-                        }
+                uses config:service-ref {
+                    refine type {
+                        mandatory false;
+                        config:required-identity sal:schema-service;
+                    }
+                }
+            }
+
+            leaf max-shard-data-change-executor-queue-size {
+                default 1000;
+                type uint16;
+                description "The maximum queue size for each shard's data store data change notification executor.";
+            }
+
+            leaf max-shard-data-change-executor-pool-size {
+                default 20;
+                type uint16;
+                description "The maximum thread pool size for each shard's data store data change notification executor.";
+            }
+
+            leaf max-shard-data-change-listener-queue-size {
+                default 1000;
+                type uint16;
+                description "The maximum queue size for each shard's data store data change listeners.";
+            }
         }
     }
 
     // Augments the 'configuration' choice node under modules/module.
-        augment "/config:modules/config:module/config:configuration" {
-            case distributed-operational-datastore-provider {
-                when "/config:modules/config:module/config:type = 'distributed-operational-datastore-provider'";
+    augment "/config:modules/config:module/config:configuration" {
+        case distributed-operational-datastore-provider {
+            when "/config:modules/config:module/config:type = 'distributed-operational-datastore-provider'";
                 container operational-schema-service {
-                              uses config:service-ref {
-                                   refine type {
-                                          mandatory false;
-                                          config:required-identity sal:schema-service;
-                                    }
-                              }
-                            }
+                    uses config:service-ref {
+                        refine type {
+                            mandatory false;
+                            config:required-identity sal:schema-service;
+                        }
+                    }
+                }
+
+            leaf max-shard-data-change-executor-queue-size {
+                default 1000;
+                type uint16;
+                description "The maximum queue size for each shard's data store data change notification executor.";
+            }
+
+            leaf max-shard-data-change-executor-pool-size {
+                default 20;
+                type uint16;
+                description "The maximum thread pool size for each shard's data store data change notification executor.";
+            }
+
+            leaf max-shard-data-change-listener-queue-size {
+                default 1000;
+                type uint16;
+                description "The maximum queue size for each shard's data store data change listeners.";
+            }
             }
         }
 }
index 319451f..036b00a 100644 (file)
@@ -58,7 +58,7 @@ public class BasicIntegrationTest extends AbstractActorTest {
                 ShardIdentifier.builder().memberName("member-1")
                     .shardName("inventory").type("config").build();
 
-            final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+            final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
             final ActorRef shard = getSystem().actorOf(props);
 
             new Within(duration("5 seconds")) {
index fc527b6..49408b7 100644 (file)
@@ -72,7 +72,7 @@ public class DistributedDataStoreIntegrationTest {
                     protected void run() {
                         try {
                             final DistributedDataStore distributedDataStore =
-                                new DistributedDataStore(getSystem(), "config", new MockClusterWrapper(), configuration);
+                                new DistributedDataStore(getSystem(), "config", new MockClusterWrapper(), configuration, null);
 
                             distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
 
@@ -154,7 +154,7 @@ public class DistributedDataStoreIntegrationTest {
                         try {
                             final DistributedDataStore distributedDataStore =
                                 new DistributedDataStore(getSystem(), "config",
-                                    new MockClusterWrapper(), configuration);
+                                    new MockClusterWrapper(), configuration, null);
 
                             distributedDataStore.onGlobalContextUpdated(
                                 SchemaContextHelper.full());
index 406f0ff..69590e6 100644 (file)
@@ -68,7 +68,7 @@ public class DistributedDataStoreTest extends AbstractActorTest{
         ActorSystem actorSystem = mock(ActorSystem.class);
 
         new DistributedDataStore(actorSystem, "config",
-            mock(ClusterWrapper.class), mock(Configuration.class));
+            mock(ClusterWrapper.class), mock(Configuration.class), null);
 
         verify(actorSystem).actorOf(any(Props.class), eq("shardmanager-config"));
     }
index e9ad450..499b4e1 100644 (file)
@@ -42,7 +42,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", new MockClusterWrapper(),
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
@@ -66,7 +66,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", new MockClusterWrapper(),
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
@@ -89,7 +89,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", new MockClusterWrapper(),
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
@@ -124,7 +124,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", mockClusterWrapper,
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
@@ -158,7 +158,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", new MockClusterWrapper(),
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
@@ -196,7 +196,7 @@ public class ShardManagerTest {
         new JavaTestKit(system) {{
             final Props props = ShardManager
                 .props("config", new MockClusterWrapper(),
-                    new MockConfiguration());
+                    new MockConfiguration(), null);
             final TestActorRef<ShardManager> subject =
                 TestActorRef.create(system, props);
 
index 0d86ffb..7740b8e 100644 (file)
@@ -40,7 +40,7 @@ public class ShardTest extends AbstractActorTest {
                 ShardIdentifier.builder().memberName("member-1")
                     .shardName("inventory").type("config").build();
 
-            final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+            final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
             final ActorRef subject =
                 getSystem().actorOf(props, "testCreateTransactionChain");
 
@@ -96,7 +96,7 @@ public class ShardTest extends AbstractActorTest {
                 ShardIdentifier.builder().memberName("member-1")
                     .shardName("inventory").type("config").build();
 
-            final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+            final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
             final ActorRef subject =
                 getSystem().actorOf(props, "testRegisterChangeListener");
 
@@ -154,7 +154,7 @@ public class ShardTest extends AbstractActorTest {
                 ShardIdentifier.builder().memberName("member-1")
                     .shardName("inventory").type("config").build();
 
-            final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+            final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
             final ActorRef subject =
                 getSystem().actorOf(props, "testCreateTransaction");
 
@@ -216,7 +216,7 @@ public class ShardTest extends AbstractActorTest {
                     .shardName("inventory").type("config").build();
 
             peerAddresses.put(identifier, null);
-            final Props props = Shard.props(identifier, peerAddresses);
+            final Props props = Shard.props(identifier, peerAddresses, null);
             final ActorRef subject =
                 getSystem().actorOf(props, "testPeerAddressResolved");
 
index 02ceee8..16b7304 100644 (file)
@@ -33,6 +33,7 @@ import static org.junit.Assert.assertTrue;
 
 /**
  * Covers negative test cases
+ *
  * @author Basheeruddin Ahmed <syedbahm@cisco.com>
  */
 public class ShardTransactionFailureTest extends AbstractActorTest {
@@ -48,7 +49,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
     private static final ShardIdentifier SHARD_IDENTIFIER =
         ShardIdentifier.builder().memberName("member-1")
-            .shardName("inventory").type("config").build();
+            .shardName("inventory").type("operational").build();
 
     static {
         store.onGlobalContextUpdated(testSchemaContext);
@@ -60,7 +61,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
         throws Throwable {
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
         final Props props =
             ShardTransaction.props(store.newReadOnlyTransaction(), shard,
                 TestModel.createTestContext());
@@ -95,7 +96,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
         throws Throwable {
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newReadWriteTransaction(), shard,
                 TestModel.createTestContext());
@@ -129,7 +130,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
         throws Throwable {
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newReadWriteTransaction(), shard,
                 TestModel.createTestContext());
@@ -164,7 +165,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
                 TestModel.createTestContext());
@@ -203,7 +204,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newReadWriteTransaction(), shard,
                 TestModel.createTestContext());
@@ -241,7 +242,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newReadWriteTransaction(), shard,
                 TestModel.createTestContext());
@@ -279,7 +280,7 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
 
         final ActorRef shard =
-            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
         final Props props =
             ShardTransaction.props(store.newReadWriteTransaction(), shard,
                 TestModel.createTestContext());
@@ -308,6 +309,4 @@ public class ShardTransactionFailureTest extends AbstractActorTest {
 
 
     }
-
-
 }
index 78895b2..8f5d0c2 100644 (file)
@@ -62,7 +62,7 @@ public class ShardTransactionTest extends AbstractActorTest {
     @Test
     public void testOnReceiveReadData() throws Exception {
         new JavaTestKit(getSystem()) {{
-            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
             final Props props =
                 ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext);
             final ActorRef subject = getSystem().actorOf(props, "testReadData");
@@ -104,7 +104,7 @@ public class ShardTransactionTest extends AbstractActorTest {
     @Test
     public void testOnReceiveReadDataWhenDataNotFound() throws Exception {
         new JavaTestKit(getSystem()) {{
-            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
             final Props props =
                 ShardTransaction.props( store.newReadOnlyTransaction(), shard, testSchemaContext);
             final ActorRef subject = getSystem().actorOf(props, "testReadDataWhenDataNotFound");
@@ -147,7 +147,7 @@ public class ShardTransactionTest extends AbstractActorTest {
     @Test
     public void testOnReceiveDataExistsPositive() throws Exception {
         new JavaTestKit(getSystem()) {{
-            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+            final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
             final Props props =
                 ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext);
             final ActorRef subject = getSystem().actorOf(props, "testDataExistsPositive");
@@ -189,7 +189,8 @@ public class ShardTransactionTest extends AbstractA