.DS_STORE
.metadata
opendaylight/md-sal/sal-distributed-datastore/journal
+!opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin
+
</parent>
<artifactId>features-config-netty</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-config-persister</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-netty-config</artifactId>
+ <!--
+ note, the reason the type and classifier
+ are here instead of in opendaylight/commons/opendaylight/pom.xml
+ is because they are used as jars in distribution.
+ -->
+ <version>${config.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
+ </dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
</dependency>
</dependencies>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-config-persister</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<version>${yangtools.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-netconf</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<groupId>org.eclipse.persistence</groupId>
<artifactId>org.eclipse.persistence.moxy</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-config</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
</properties>
<dependencies>
+ <!-- dependency for opendaylight-karaf-empty for use by testing -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>opendaylight-karaf-empty</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <type>zip</type>
+ </dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
<version>${yangtools.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-manager</artifactId>
</dependency>
+ <!-- test the features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
<repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
<feature name='odl-config-all' version='${project.version}'>
- <feature version='${project.version}'>odl-mdsal-common</feature>
+ <feature version='${mdsal.version}'>odl-mdsal-common</feature>
<feature version='${project.version}'>odl-config-api</feature>
<feature version='${project.version}'>odl-config-netty-config-api</feature>
<feature version='${project.version}'>odl-config-core</feature>
</parent>
<artifactId>features-flow</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<version>${mdsal.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.model</groupId>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>forwardingrules-manager</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-mdsal</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-yangtools</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config-persister</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config-netty</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>md-sal-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-connector-config</artifactId>
+ <version>${netconf.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-rest-connector-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.samples</groupId>
<dependency>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>toaster-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
+ </dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ <version>0.6.2-SNAPSHOT</version>
</dependency>
</dependencies>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-protocol-framework</artifactId>
<version>${protocol-framework.version}</version>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>protocol-framework</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
<artifactId>toaster-config</artifactId>
<version>${mdsal.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ <version>${yangtools.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>opendaylight-karaf-empty</artifactId>
+ <packaging>pom</packaging>
+ <prerequisites>
+ <maven>3.0</maven>
+ </prerequisites>
+
+ <dependencies>
+ <dependency>
+ <!-- scope is compile so all features (there is only one) are installed
+ into startup.properties and the feature repo itself is not installed -->
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>framework</artifactId>
+ <version>${karaf.version}</version>
+ <type>kar</type>
+ </dependency>
+ <!-- scope is runtime so the feature repo is listed in the features
+ service config file, and features may be installed using the
+ karaf-maven-plugin configuration -->
+ <dependency>
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>standard</artifactId>
+ <version>${karaf.version}</version>
+ <classifier>features</classifier>
+ <type>xml</type>
+ <scope>runtime</scope>
+ </dependency>
+
+ <!-- ODL Branding -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Resources needed -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>opendaylight-karaf-resources</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>cleanVersions</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>copy</goal>
+ <goal>unpack</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.karaf.tooling</groupId>
+ <artifactId>karaf-maven-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>commands-generate-help</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.fusesource.scalate</groupId>
+ <artifactId>maven-scalate-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>sitegen</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.servicemix.tooling</groupId>
+ <artifactId>depends-maven-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>generate-depends-file</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.karaf.tooling</groupId>
+ <artifactId>karaf-maven-plugin</artifactId>
+ <version>${karaf.version}</version>
+ <extensions>true</extensions>
+ <executions>
+ <execution>
+ <id>process-resources</id>
+ <goals>
+ <goal>install-kars</goal>
+ </goals>
+ <phase>process-resources</phase>
+ </execution>
+ <execution>
+ <id>package</id>
+ <goals>
+ <goal>instance-create-archive</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-checkstyle-plugin</artifactId>
+ <version>${checkstyle.version}</version>
+ <configuration>
+ <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/configuration\/initial\/</excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.6</version>
+ <executions>
+ <execution>
+ <id>copy</id>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <!-- here the phase you need -->
+ <phase>generate-resources</phase>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <version>${karaf.branding.version}</version>
+ <outputDirectory>target/assembly/lib</outputDirectory>
+ <destFileName>karaf.branding-${branding.version}.jar</destFileName>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ <execution>
+ <id>unpack-karaf-resources</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>prepare-package</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+ <groupId>org.opendaylight.controller</groupId>
+ <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+ <excludes>META-INF\/**</excludes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <chmod perm="755">
+ <fileset dir="${project.build.directory}/assembly/bin">
+ <include name="karaf"/>
+ <include name="instance"/>
+ </fileset>
+ </chmod>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <tag>HEAD</tag>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main</url>
+ </scm>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+
+ This program and the accompanying materials are made available under the
+ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ and is available at http://www.eclipse.org/legal/epl-v10.html
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>opendaylight-karaf-resources</artifactId>
+ <description>Resources for opendaylight-karaf</description>
+ <packaging>jar</packaging>
+</project>
<version>${karaf.version}</version>
<type>kar</type>
</dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>base-features</artifactId>
- <version>${project.version}</version>
- <type>kar</type>
- </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>karaf.branding</artifactId>
<scope>compile</scope>
</dependency>
- <!-- scope is runtime so the feature repo is listed in the features
- service config file, and features may be installed using the
- karaf-maven-plugin configuration -->
- <dependency>
- <groupId>org.apache.karaf.features</groupId>
- <artifactId>standard</artifactId>
- <version>${karaf.version}</version>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>base-features</artifactId>
- <version>${project.parent.version}</version>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
- </dependency>
- <!-- scope is compile so all features (there is only one) are installed
- into startup.properties and the feature repo itself is not installed -->
+
+ <!-- Resources needed -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>extras-features</artifactId>
+ <artifactId>opendaylight-karaf-resources</artifactId>
<version>${project.version}</version>
- <type>kar</type>
- <scope>runtime</scope>
- </dependency>
- <!-- AD-SAL Related Features -->
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>features-adsal</artifactId>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
</dependency>
+
+ <!-- scope is not runtime so the feature repo is pulled into the local
+ repo on build and thus you actually run. Failure to do so can lead
+ to very confusing errors for devs -->
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>features-nsf</artifactId>
- <version>${project.version}</version>
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>standard</artifactId>
+ <version>${karaf.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
+ <!--
+ controller provided features:
+ Note: Nothing should go here that is not locked
+ down with testing... ie, no broken feature repos
+ -->
+
<!-- MD-SAL Related Features -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-flow</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
</dependencies>
</artifactItems>
</configuration>
</execution>
+ <execution>
+ <id>unpack-karaf-resources</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>prepare-package</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+ <groupId>org.opendaylight.controller</groupId>
+ <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+ <excludes>META-INF\/**</excludes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <chmod perm="755">
+ <fileset dir="${project.build.directory}/assembly/bin">
+ <include name="karaf"/>
+ <include name="instance"/>
+ </fileset>
+ </chmod>
+ </tasks>
+ </configuration>
+ </execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <phase>prepare-package</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <tasks>
- <copy todir="${project.build.directory}/assembly/bin" overwrite="true">
- <fileset dir="${basedir}/src/main/resources/karaf/" includes="karaf,karaf.bat,instance,instance.bat"/>
- </copy>
- </tasks>
- </configuration>
- </execution>
- </executions>
- </plugin>
</plugins>
</build>
<scm>
+++ /dev/null
-################################################################################
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-#
-# Java platform package export properties.
-#
-
-# Standard package set. Note that:
-# - javax.transaction* is exported with a mandatory attribute
-jre-1.6= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-# Standard package set. Note that:
-# - javax.transaction* is exported with a mandatory attribute
-jre-1.7= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-jre-1.8= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
+++ /dev/null
-#Bundles to be started on startup, with startlevel
-
-# feature: framework version: 3.0.1
-mvn\:org.ops4j.base/ops4j-base-lang/1.4.0 = 5
-mvn\:biz.aQute.bnd/bndlib/2.2.0 = 5
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-bnd/1.7.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-maven-commons/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-aether/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-wrap/1.6.0 = 5
-mvn\:javax.annotation/javax.annotation-api/1.2 = 5
-mvn\:org.ops4j.pax.logging/pax-logging-api/1.7.2 = 8
-mvn\:org.ops4j.pax.logging/pax-logging-service/1.7.2 = 8
-mvn\:org.apache.karaf.service/org.apache.karaf.service.guard/3.0.1 = 10
-mvn\:org.apache.felix/org.apache.felix.configadmin/1.6.0 = 10
-mvn\:org.apache.felix/org.apache.felix.fileinstall/3.2.8 = 11
-mvn\:org.ow2.asm/asm-all/4.1 = 12
-mvn\:org.apache.aries/org.apache.aries.util/1.1.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.api/1.0.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.impl/1.0.2 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.api/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.cm/1.0.3 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core.compatibility/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core/1.4.0 = 20
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.spring/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.blueprint/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.wrap/3.0.1 = 24
-mvn\:org.apache.karaf.region/org.apache.karaf.region.core/3.0.1 = 25
-mvn\:org.apache.karaf.features/org.apache.karaf.features.core/3.0.1 = 25
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.features/3.0.1 = 26
-mvn\:jline/jline/2.11 = 30
-mvn\:org.jledit/core/0.2.1 = 30
-mvn\:org.fusesource.jansi/jansi/1.11 = 30
-mvn\:org.ops4j.base/ops4j-base-util-property/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-xml/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-collections/1.4.0 = 30
-mvn\:org.ops4j.pax.url/pax-url-commons/1.6.0 = 30
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-property/1.7.0 = 30
-mvn\:org.ops4j.base/ops4j-base-net/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-monitors/1.4.0 = 30
-mvn\:org.apache.karaf.features/org.apache.karaf.features.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.console/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.modules/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.config/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.boot/3.0.1 = 30
-mvn\:org.apache.sshd/sshd-core/0.9.0 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.table/3.0.1 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.core/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.help/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.core/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.commands/3.0.1 = 30
-mvn\:org.apache.aries.quiesce/org.apache.aries.quiesce.api/1.0.0 = 30
+++ /dev/null
-#
-# The properties defined in this file will be made available through system
-# properties at the very beginning of the Karaf's boot process.
-#
-
-# Use Equinox as default OSGi Framework Implementation
-karaf.framework=equinox
-
-# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
-# Extend the framework to avoid the resources to be presented with
-# a URL of type bundleresource: but to be presented as file:
-osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
-
-
-# Log level when the pax-logging service is not available
-# This level will only be used while the pax-logging service bundle
-# is not fully available.
-# To change log levels, please refer to the org.ops4j.pax.logging.cfg file
-# instead.
-org.ops4j.pax.logging.DefaultServiceLog.level = ERROR
-
-#
-# Name of this Karaf instance.
-#
-karaf.name = root
-
-#
-# Default repository where bundles will be loaded from before using
-# other Maven repositories. For the full Maven configuration, see
-# the org.ops4j.pax.url.mvn.cfg file.
-#
-karaf.default.repository = system
-
-#
-# Location of a shell script that will be run when starting a shell
-# session. This script can be used to create aliases and define
-# additional commands.
-#
-karaf.shell.init.script = ${karaf.etc}/shell.init.script
-
-#
-# Sets the maximum size of the shell command history. If not set,
-# defaults to 500 entries. Setting to 0 will disable history.
-#
-# karaf.shell.history.maxSize = 0
-
-#
-# Deletes the entire karaf.data directory at every start
-#
-karaf.clean.all = false
-
-#
-# Deletes the karaf.data/cache directory at every start
-#
-karaf.clean.cache = false
-
-#
-# Roles to use when logging into a local Karaf console.
-#
-# The syntax is the following:
-# [classname:]principal
-# where classname is the class name of the principal object
-# (defaults to org.apache.karaf.jaas.modules.RolePrincipal)
-# and principal is the name of the principal of that class
-# (defaults to instance).
-#
-karaf.local.roles = admin,manager,viewer
-
-#
-# Set this empty property to avoid errors when validating xml documents.
-#
-xml.catalog.files =
-
-#
-# Suppress the bell in the console when hitting backspace too many times
-# for example
-#
-jline.nobell = true
-
-#
-# ServiceMix specs options
-#
-org.apache.servicemix.specs.debug = false
-org.apache.servicemix.specs.timeout = 0
-
-#
-# Settings for the OSGi 4.3 Weaving
-# By default, we will not weave any classes. Change this setting to include classes
-# that you application needs to have woven.
-#
-org.apache.aries.proxy.weaving.enabled = none
-# Classes not to weave - Aries default + Xerces which is known to have issues.
-org.apache.aries.proxy.weaving.disabled = org.objectweb.asm.*,org.slf4j.*,org.apache.log4j.*,javax.*,org.apache.xerces.*
-
-#
-# By default, only Karaf shell commands are secured, but additional services can be
-# secured by expanding this filter
-#
-karaf.secured.services = (&(osgi.command.scope=*)(osgi.command.function=*))
-
-#
-# Security properties
-#
-# To enable OSGi security, uncomment the properties below,
-# install the framework-security feature and restart.
-#
-#java.security.policy=${karaf.etc}/all.policy
-#org.osgi.framework.security=osgi
-#org.osgi.framework.trust.repositories=${karaf.etc}/trustStore.ks
import java.util.HashSet;
import java.util.Map;
-import java.util.Set;
import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
+ * AbstractChangeListner implemented basic {@link AsyncDataChangeEvent} processing for
+ * flow node subDataObject (flows, groups and meters).
*
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public abstract class AbstractChangeListener implements DataChangeListener {
+ private final static Logger LOG = LoggerFactory.getLogger(AbstractChangeListener.class);
+
private final AtomicLong txNum = new AtomicLong();
private String transactionId;
@Override
- public void onDataChanged(DataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+ public void onDataChanged(final AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
this.transactionId = this.newTransactionIdentifier().toString();
-
+ /* All DataObjects for create */
final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
- changeEvent.getCreatedConfigurationData().entrySet();
- final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries =
- new HashSet<Entry<InstanceIdentifier<? extends DataObject>, DataObject>>();
-
+ changeEvent.getCreatedData().entrySet();
+ /* All DataObjects for updates - init HashSet */
+ final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updatedEntries = new HashSet<>();
+ /* Filtered DataObject for update processing only */
Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> updateConfigEntrySet =
- changeEvent.getUpdatedConfigurationData().entrySet();
+ changeEvent.getUpdatedData().entrySet();
updatedEntries.addAll(updateConfigEntrySet);
updatedEntries.removeAll(createdEntries);
-
+ /* All DataObjects for remove */
final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
- changeEvent.getRemovedConfigurationData();
-
+ changeEvent.getRemovedPaths();
+ /* Create DataObject processing (send to device) */
for (final Entry<InstanceIdentifier<? extends DataObject>, DataObject> createdEntry : createdEntries) {
- InstanceIdentifier<? extends DataObject> c_key = createdEntry.getKey();
- DataObject c_value = createdEntry.getValue();
- this.add(c_key, c_value);
+ InstanceIdentifier<? extends DataObject> entryKey = createdEntry.getKey();
+ DataObject entryValue = createdEntry.getValue();
+ if (preconditionForChange(entryKey, entryValue, null)) {
+ this.add(entryKey, entryValue);
+ }
}
for (final Entry<InstanceIdentifier<?>, DataObject> updatedEntrie : updatedEntries) {
Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
- changeEvent.getOriginalConfigurationData();
-
- InstanceIdentifier<? extends Object> u_key = updatedEntrie.getKey();
- final DataObject originalFlow = origConfigData.get(u_key);
- final DataObject updatedFlow = updatedEntrie.getValue();
- this.update(u_key, originalFlow, updatedFlow);
+ changeEvent.getOriginalData();
+
+ InstanceIdentifier<? extends Object> entryKey = updatedEntrie.getKey();
+ final DataObject original = origConfigData.get(entryKey);
+ final DataObject updated = updatedEntrie.getValue();
+ if (preconditionForChange(entryKey, original, updated)) {
+ this.update(entryKey, original, updated);
+ }
}
for (final InstanceIdentifier<?> instanceId : removeEntriesInstanceIdentifiers) {
Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
- changeEvent.getOriginalConfigurationData();
+ changeEvent.getOriginalData();
final DataObject removeValue = origConfigData.get(instanceId);
- this.remove(instanceId, removeValue);
+ if (preconditionForChange(instanceId, removeValue, null)) {
+ this.remove(instanceId, removeValue);
+ }
}
}
+ /**
+ * Method returns generated transaction ID, which is unique for
+ * every transaction. ID is composite from prefix ("DOM") and unique number.
+ *
+ * @return String transactionID
+ */
public String getTransactionId() {
return this.transactionId;
}
return "DOM-" + txNum.getAndIncrement();
}
- protected abstract void validate() throws IllegalStateException;
-
- protected abstract void remove(
+ /**
+ * Method check all local preconditions for apply relevant changes.
+ *
+ * @param InstanceIdentifier identifier - the whole path to DataObject
+ * @param DataObject original - original DataObject (for update)
+ * or relevant DataObject (add/delete operations)
+ * @param DataObject update - changed DataObject (contain updates)
+ * or should be null for (add/delete operations)
+ *
+ * @return boolean - applicable
+ */
+ protected abstract boolean preconditionForChange(
final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject original, final DataObject update);
+
+ /**
+ * Method checks the node data path in DataStore/OPERATIONAL because
+ * without the Node Identifier in DataStore/OPERATIONAL, device
+ * is not connected and device pre-configuration is allowed only.
+ *
+ * @param InstanceIdentifier identifier - could be whole path to DataObject,
+ * but parent Node.class InstanceIdentifier is used for a check only
+ *
+ * @return boolean - is the Node available in DataStore/OPERATIONAL (is connected)
+ */
+ protected boolean isNodeAvailable(final InstanceIdentifier<? extends DataObject> identifier,
+ final ReadOnlyTransaction readTrans) {
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+ try {
+ return readTrans.read(LogicalDatastoreType.OPERATIONAL, nodeInstanceId).get().isPresent();
+ }
+ catch (InterruptedException | ExecutionException e) {
+ LOG.error("Unexpected exception by reading Node ".concat(nodeInstanceId.toString()), e);
+ return false;
+ }
+ finally {
+ readTrans.close();
+ }
+ }
+
+ /**
+ * Method removes DataObject which is identified by InstanceIdentifier
+ * from device.
+ *
+ * @param InstanceIdentifier identifier - the whole path to DataObject
+ * @param DataObject remove - DataObject for removing
+ */
+ protected abstract void remove(final InstanceIdentifier<? extends DataObject> identifier,
final DataObject remove);
- protected abstract void update(
- final InstanceIdentifier<? extends DataObject> identifier,
+ /**
+ * Method updates the original DataObject to the update DataObject
+ * in device. Both are identified by same InstanceIdentifier
+ *
+ * @param InstanceIdentifier identifier - the whole path to DataObject
+ * @param DataObject original - original DataObject (for update)
+ * @param DataObject update - changed DataObject (contain updates)
+ */
+ protected abstract void update(final InstanceIdentifier<? extends DataObject> identifier,
final DataObject original, final DataObject update);
- protected abstract void add(
- final InstanceIdentifier<? extends DataObject> identifier,
+ /**
+ * Method adds the DataObject which is identified by InstanceIdentifier
+ * to device.
+ *
+ * @param InstanceIdentifier identifier - the whole path to new DataObject
+ * @param DataObject add - new DataObject
+ */
+ protected abstract void add(final InstanceIdentifier<? extends DataObject> identifier,
final DataObject add);
}
import org.opendaylight.controller.frm.flow.FlowProvider;
import org.opendaylight.controller.frm.group.GroupProvider;
import org.opendaylight.controller.frm.meter.MeterProvider;
+import org.opendaylight.controller.frm.reconil.FlowNodeReconcilProvider;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.sal.binding.api.AbstractBindingAwareProvider;
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ProviderContext;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/**
+ * Forwarding Rules Manager Activator
+ *
+ * Activator manages all Providers ({@link FlowProvider}, {@link GroupProvider},
+ * {@link MeterProvider} and the {@link FlowNodeReconcilProvider}).
+ * It registers all listeners (DataChangeEvent, ReconcilNotification)
+ * in the Session Initialization phase.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ * *
+ */
public class FRMActivator extends AbstractBindingAwareProvider {
private final static Logger LOG = LoggerFactory.getLogger(FRMActivator.class);
- private static FlowProvider flowProvider = new FlowProvider();
- private static GroupProvider groupProvider = new GroupProvider();
- private static MeterProvider meterProvider = new MeterProvider();
+ private final FlowProvider flowProvider;
+ private final GroupProvider groupProvider;
+ private final MeterProvider meterProvider;
+ private final FlowNodeReconcilProvider flowNodeReconcilProvider;
+
+ public FRMActivator() {
+ this.flowProvider = new FlowProvider();
+ this.groupProvider = new GroupProvider();
+ this.meterProvider = new MeterProvider();
+ this.flowNodeReconcilProvider = new FlowNodeReconcilProvider();
+ }
@Override
public void onSessionInitiated(final ProviderContext session) {
- DataProviderService flowSalService = session.<DataProviderService>getSALService(DataProviderService.class);
- FRMActivator.flowProvider.setDataService(flowSalService);
- SalFlowService rpcFlowSalService = session.<SalFlowService>getRpcService(SalFlowService.class);
- FRMActivator.flowProvider.setSalFlowService(rpcFlowSalService);
- FRMActivator.flowProvider.start();
- DataProviderService groupSalService = session.<DataProviderService>getSALService(DataProviderService.class);
- FRMActivator.groupProvider.setDataService(groupSalService);
- SalGroupService rpcGroupSalService = session.<SalGroupService>getRpcService(SalGroupService.class);
- FRMActivator.groupProvider.setSalGroupService(rpcGroupSalService);
- FRMActivator.groupProvider.start();
- DataProviderService meterSalService = session.<DataProviderService>getSALService(DataProviderService.class);
- FRMActivator.meterProvider.setDataService(meterSalService);
- SalMeterService rpcMeterSalService = session.<SalMeterService>getRpcService(SalMeterService.class);
- FRMActivator.meterProvider.setSalMeterService(rpcMeterSalService);
- FRMActivator.meterProvider.start();
+ LOG.info("FRMActivator initialization.");
+ /* Flow */
+ try {
+ final DataBroker flowSalService = session.getSALService(DataBroker.class);
+ this.flowProvider.init(flowSalService);
+ this.flowProvider.start(session);
+ /* Group */
+ final DataBroker groupSalService = session.getSALService(DataBroker.class);
+ this.groupProvider.init(groupSalService);
+ this.groupProvider.start(session);
+ /* Meter */
+ final DataBroker meterSalService = session.getSALService(DataBroker.class);
+ this.meterProvider.init(meterSalService);
+ this.meterProvider.start(session);
+ /* FlowNode Reconciliation */
+ final DataBroker dbs = session.getSALService(DataBroker.class);
+ this.flowNodeReconcilProvider.init(dbs);
+ this.flowNodeReconcilProvider.start(session);
+
+ LOG.info("FRMActivator started successfully");
+ } catch (Exception e) {
+ String errMsg = "Unexpected error by starting FRMActivator";
+ LOG.error(errMsg, e);
+ throw new IllegalStateException(errMsg, e);
+ }
}
@Override
protected void stopImpl(final BundleContext context) {
try {
- FRMActivator.flowProvider.close();
- FRMActivator.groupProvider.close();
- FRMActivator.meterProvider.close();
- } catch (Throwable e) {
+ this.flowProvider.close();
+ this.groupProvider.close();
+ this.meterProvider.close();
+ this.flowNodeReconcilProvider.close();
+ } catch (Exception e) {
LOG.error("Unexpected error by stopping FRMActivator", e);
- throw new RuntimeException(e);
}
}
}
\ No newline at end of file
--- /dev/null
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm;
+
+import java.math.BigInteger;
+
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.AtomicLongMap;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * Singleton FlowCookieProducer contains a FlowCookie generator which is generated unique
+ * flowCookie identifier for every flow in same Table. That could help with quick
+ * identification of flow statistic because DataStore/CONFIGURATION could contains
+ * a lot of flows with same flowCookie. So we are replacing original flowCookie
+ * with unique and we are building final FlowCookieMap in DataStore/OPERATIONAL
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public enum FlowCookieProducer {
+
+ INSTANCE;
+
+ /* Flow_Cookie_Key and Flow_Ids MapHolder */
+ private static final AtomicLongMap<InstanceIdentifier<Table>> cookieKeys = AtomicLongMap.create();
+
+ /**
+ * Method returns the unique cookie for a node table.
+ * Flow Cookie Key signs List<FlowId> for a right flow statistic identification
+ * in the DataStore/operational.
+ * We need a List<FlowId> because system doesn't guarantee unique mapping
+ * from flow_cookie to flow_id. REST Operations doesn't used FRM yet, so
+ * cookie from user input could have a user input flow ID and an alien system ID
+ * which is generated by system.
+ *
+ * @param InstanceIdentifier<Table> tableIdentifier
+ * @return unique BigInteger flowCookie for a node table
+ */
+ public BigInteger getNewCookie(final InstanceIdentifier<Table> tableIdentifier) {
+ FlowCookieProducer.validationTableIdentifier(tableIdentifier);
+ if ( cookieKeys.containsKey(tableIdentifier)) {
+ /* new identifier always starts from ONE because
+ * ZERO is reserved for the NO_COOKIES flows */
+ return BigInteger.valueOf(cookieKeys.addAndGet(tableIdentifier, 1L));
+ } else {
+ return BigInteger.valueOf(cookieKeys.incrementAndGet(tableIdentifier));
+ }
+ }
+
+ /**
+ * Method cleans the node table flow_cookie_key for the disconnected Node.
+ *
+ * @param InstanceIdentifier<Table> tableIdentifier
+ */
+ public void clean(final InstanceIdentifier<Table> tableIdentifier) {
+ FlowCookieProducer.validationTableIdentifier(tableIdentifier);
+ cookieKeys.remove(tableIdentifier);
+ }
+
+ /*
+ * Help the TableIdentifer input validation method
+ */
+ private static void validationTableIdentifier(final InstanceIdentifier<Table> tableIdent) {
+ Preconditions.checkArgument(tableIdent != null, "Input validation exception: TableIdentifier can not be null !");
+ }
+}
*/
package org.opendaylight.controller.frm.flow;
+import java.math.BigInteger;
+
import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.frm.FlowCookieProducer;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInput;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInputBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.FlowTableRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.RemoveFlowInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.RemoveFlowInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.UpdateFlowInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.UpdateFlowInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.OriginalFlow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.OriginalFlowBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.UpdatedFlow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.flow.update.UpdatedFlowBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.Flow;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
/**
+ * Flow Change Listener
+ * add, update and remove {@link Flow} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
*
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public class FlowChangeListener extends AbstractChangeListener {
- private final static Logger LOG = LoggerFactory.getLogger(FlowChangeListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlowChangeListener.class);
- private final SalFlowService salFlowService;
-
- public SalFlowService getSalFlowService() {
- return this.salFlowService;
- }
+ private final FlowProvider provider;
- public FlowChangeListener(final SalFlowService manager) {
- this.salFlowService = manager;
+ public FlowChangeListener (final FlowProvider provider) {
+ this.provider = Preconditions.checkNotNull(provider, "FlowProvider can not be null !");
}
@Override
- protected void validate() throws IllegalStateException {
- FlowTransactionValidator.validate(this);
- }
+ protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject removeDataObj) {
- @Override
- protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
- if ((removeDataObj instanceof Flow)) {
-
- final Flow flow = ((Flow) removeDataObj);
- final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final RemoveFlowInputBuilder builder = new RemoveFlowInputBuilder(flow);
-
- builder.setFlowRef(new FlowRef(identifier));
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salFlowService.removeFlow((RemoveFlowInput) builder.build());
- LOG.debug("Transaction {} - Removed Flow has removed flow: {}", new Object[]{uri, removeDataObj});
- }
+ final Flow flow = ((Flow) removeDataObj);
+ final InstanceIdentifier<Table> tableIdent = identifier.firstIdentifierOf(Table.class);
+ final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+ final RemoveFlowInputBuilder builder = new RemoveFlowInputBuilder(flow);
+
+ // use empty cookie mask in order to delete flow even with generated cookie
+ builder.setCookieMask(new FlowCookie(BigInteger.ZERO));
+
+ builder.setFlowRef(new FlowRef(identifier));
+ builder.setNode(new NodeRef(nodeIdent));
+ builder.setFlowTable(new FlowTableRef(tableIdent));
+
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalFlowService().removeFlow(builder.build());
+ LOG.debug("Transaction {} - Removed Flow has removed flow: {}", new Object[]{uri, removeDataObj});
}
@Override
- protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
- if (original instanceof Flow && update instanceof Flow) {
+ protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject original, final DataObject update) {
+
+ final Flow originalFlow = ((Flow) original);
+ final Flow updatedFlow = ((Flow) update);
+ final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+ final UpdateFlowInputBuilder builder = new UpdateFlowInputBuilder();
- final Flow originalFlow = ((Flow) original);
- final Flow updatedFlow = ((Flow) update);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node>firstIdentifierOf(Node.class);
- final UpdateFlowInputBuilder builder = new UpdateFlowInputBuilder();
+ builder.setNode(new NodeRef(nodeIdent));
+ builder.setFlowRef(new FlowRef(identifier));
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setFlowRef(new FlowRef(identifier));
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
+ builder.setUpdatedFlow((new UpdatedFlowBuilder(updatedFlow)).build());
+ builder.setOriginalFlow((new OriginalFlowBuilder(originalFlow)).build());
- builder.setUpdatedFlow((UpdatedFlow) (new UpdatedFlowBuilder(updatedFlow)).build());
- builder.setOriginalFlow((OriginalFlow) (new OriginalFlowBuilder(originalFlow)).build());
+ this.provider.getSalFlowService().updateFlow(builder.build());
+ LOG.debug("Transaction {} - Update Flow has updated flow {} with {}", new Object[]{uri, original, update});
+ }
- this.salFlowService.updateFlow((UpdateFlowInput) builder.build());
- LOG.debug("Transaction {} - Update Flow has updated flow {} with {}", new Object[]{uri, original, update});
- }
+ @Override
+ protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject addDataObj) {
+
+ final Flow flow = ((Flow) addDataObj);
+ final InstanceIdentifier<Table> tableIdent = identifier.firstIdentifierOf(Table.class);
+ final NodeRef nodeRef = new NodeRef(identifier.firstIdentifierOf(Node.class));
+ final FlowCookie flowCookie = new FlowCookie(FlowCookieProducer.INSTANCE.getNewCookie(tableIdent));
+ final AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
+
+ builder.setNode(nodeRef);
+ builder.setFlowRef(new FlowRef(identifier));
+ builder.setFlowTable(new FlowTableRef(tableIdent));
+ builder.setCookie( flowCookie );
+
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalFlowService().addFlow(builder.build());
+ LOG.debug("Transaction {} - Add Flow has added flow: {}", new Object[]{uri, addDataObj});
}
@Override
- protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
- if ((addDataObj instanceof Flow)) {
-
- final Flow flow = ((Flow) addDataObj);
- final InstanceIdentifier<Table> tableInstanceId = identifier.<Table> firstIdentifierOf(Table.class);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final AddFlowInputBuilder builder = new AddFlowInputBuilder(flow);
-
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setFlowRef(new FlowRef(identifier));
- builder.setFlowTable(new FlowTableRef(tableInstanceId));
-
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salFlowService.addFlow((AddFlowInput) builder.build());
- LOG.debug("Transaction {} - Add Flow has added flow: {}", new Object[]{uri, addDataObj});
- }
+ protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject dataObj, final DataObject update) {
+
+ final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+ return update != null
+ ? (dataObj instanceof Flow && update instanceof Flow && isNodeAvailable(identifier, trans))
+ : (dataObj instanceof Flow && isNodeAvailable(identifier, trans));
}
}
*/
package org.opendaylight.controller.frm.flow;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
+/**
+ * Flow Provider registers the {@link FlowChangeListener} and it holds all needed
+ * services for {@link FlowChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
public class FlowProvider implements AutoCloseable {
- private final static Logger LOG = LoggerFactory.getLogger(FlowProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlowProvider.class);
private SalFlowService salFlowService;
- private DataProviderService dataService;
+ private DataBroker dataService;
/* DataChangeListener */
- private FlowChangeListener flowDataChangeListener;
- ListenerRegistration<DataChangeListener> flowDataChangeListenerRegistration;
+ private DataChangeListener flowDataChangeListener;
+ private ListenerRegistration<DataChangeListener> flowDataChangeListenerRegistration;
+
+ /**
+ * Provider Initialization Phase.
+ *
+ * @param DataProviderService dataService
+ */
+ public void init (final DataBroker dataService) {
+ LOG.info("FRM Flow Config Provider initialization.");
+ this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+ }
+
+ /**
+ * Listener Registration Phase
+ *
+ * @param RpcConsumerRegistry rpcRegistry
+ */
+ public void start(final RpcConsumerRegistry rpcRegistry) {
+ Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+
+ this.salFlowService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalFlowService.class),
+ "RPC SalFlowService not found.");
- public void start() {
/* Build Path */
- InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
- InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
- InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
- InstanceIdentifierBuilder<Table> tableChild = augmentFlowCapNode.<Table> child(Table.class);
- InstanceIdentifierBuilder<Flow> flowChild = tableChild.<Flow> child(Flow.class);
- final InstanceIdentifier<? extends DataObject> flowDataObjectPath = flowChild.toInstance();
+ InstanceIdentifier<Flow> flowIdentifier = InstanceIdentifier.create(Nodes.class)
+ .child(Node.class).augmentation(FlowCapableNode.class).child(Table.class).child(Flow.class);
/* DataChangeListener registration */
- this.flowDataChangeListener = new FlowChangeListener(this.salFlowService);
- this.flowDataChangeListenerRegistration = this.dataService.registerDataChangeListener(flowDataObjectPath, flowDataChangeListener);
- LOG.info("Flow Config Provider started.");
- }
+ this.flowDataChangeListener = new FlowChangeListener(FlowProvider.this);
+ this.flowDataChangeListenerRegistration =
+ this.dataService.registerDataChangeListener(LogicalDatastoreType.CONFIGURATION,
+ flowIdentifier, flowDataChangeListener, DataChangeScope.SUBTREE);
- protected DataModificationTransaction startChange() {
- return this.dataService.beginTransaction();
+ LOG.info("FRM Flow Config Provider started.");
}
@Override
- public void close() throws Exception {
- if(flowDataChangeListenerRegistration != null){
- flowDataChangeListenerRegistration.close();
+ public void close() {
+ LOG.info("FRM Flow Config Provider stopped.");
+ if (flowDataChangeListenerRegistration != null) {
+ try {
+ flowDataChangeListenerRegistration.close();
+ } catch (Exception e) {
+ String errMsg = "Error by stop FRM Flow Config Provider.";
+ LOG.error(errMsg, e);
+ throw new IllegalStateException(errMsg, e);
+ } finally {
+ flowDataChangeListenerRegistration = null;
+ }
}
}
- public void setDataService(final DataProviderService dataService) {
- this.dataService = dataService;
+ public DataChangeListener getFlowDataChangeListener() {
+ return flowDataChangeListener;
+ }
+
+ public SalFlowService getSalFlowService() {
+ return salFlowService;
}
- public void setSalFlowService(final SalFlowService salFlowService) {
- this.salFlowService = salFlowService;
+ public DataBroker getDataService() {
+ return dataService;
}
}
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.flow;
-
-public class FlowTransactionValidator {
-
- public static void validate(FlowChangeListener transaction) throws IllegalStateException {
- // NOOP
- }
-}
package org.opendaylight.controller.frm.group;
import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.RemoveGroupInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.RemoveGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.UpdateGroupInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.UpdateGroupInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.OriginalGroup;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.OriginalGroupBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.UpdatedGroup;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.group.update.UpdatedGroupBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.GroupRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
/**
+ * Group Change Listener
+ * add, update and remove {@link Group} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
*
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public class GroupChangeListener extends AbstractChangeListener {
- private final static Logger LOG = LoggerFactory.getLogger(GroupChangeListener.class);
-
- private final SalGroupService salGroupService;
+ private static final Logger LOG = LoggerFactory.getLogger(GroupChangeListener.class);
- public SalGroupService getSalGroupService() {
- return this.salGroupService;
- }
+ private final GroupProvider provider;
- public GroupChangeListener(final SalGroupService manager) {
- this.salGroupService = manager;
+ public GroupChangeListener(final GroupProvider provider) {
+ this.provider = Preconditions.checkNotNull(provider, "GroupProvider can not be null !");
}
@Override
- protected void validate() throws IllegalStateException {
- GroupTransactionValidator.validate(this);
+ protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject removeDataObj) {
+
+ final Group group = ((Group) removeDataObj);
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+ final RemoveGroupInputBuilder builder = new RemoveGroupInputBuilder(group);
+
+ builder.setNode(new NodeRef(nodeInstanceId));
+ builder.setGroupRef(new GroupRef(identifier));
+
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalGroupService().removeGroup(builder.build());
+ LOG.debug("Transaction {} - Remove Group has removed group: {}", new Object[]{uri, removeDataObj});
}
@Override
- protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
- if ((removeDataObj instanceof Group)) {
+ protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject original, final DataObject update) {
- final Group group = ((Group) removeDataObj);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final RemoveGroupInputBuilder builder = new RemoveGroupInputBuilder(group);
+ final Group originalGroup = ((Group) original);
+ final Group updatedGroup = ((Group) update);
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+ final UpdateGroupInputBuilder builder = new UpdateGroupInputBuilder();
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setGroupRef(new GroupRef(identifier));
+ builder.setNode(new NodeRef(nodeInstanceId));
+ builder.setGroupRef(new GroupRef(identifier));
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salGroupService.removeGroup((RemoveGroupInput) builder.build());
- LOG.debug("Transaction {} - Remove Group has removed group: {}", new Object[]{uri, removeDataObj});
- }
- }
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
- @Override
- protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
- if (original instanceof Group && update instanceof Group) {
+ builder.setUpdatedGroup((new UpdatedGroupBuilder(updatedGroup)).build());
+ builder.setOriginalGroup((new OriginalGroupBuilder(originalGroup)).build());
- final Group originalGroup = ((Group) original);
- final Group updatedGroup = ((Group) update);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final UpdateGroupInputBuilder builder = new UpdateGroupInputBuilder();
+ this.provider.getSalGroupService().updateGroup(builder.build());
+ LOG.debug("Transaction {} - Update Group has updated group {} with group {}", new Object[]{uri, original, update});
+ }
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setGroupRef(new GroupRef(identifier));
+ @Override
+ protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject addDataObj) {
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
+ final Group group = ((Group) addDataObj);
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
+ final AddGroupInputBuilder builder = new AddGroupInputBuilder(group);
- builder.setUpdatedGroup((UpdatedGroup) (new UpdatedGroupBuilder(updatedGroup)).build());
- builder.setOriginalGroup((OriginalGroup) (new OriginalGroupBuilder(originalGroup)).build());
+ builder.setNode(new NodeRef(nodeInstanceId));
+ builder.setGroupRef(new GroupRef(identifier));
- this.salGroupService.updateGroup((UpdateGroupInput) builder.build());
- LOG.debug("Transaction {} - Update Group has updated group {} with group {}", new Object[]{uri, original, update});
- }
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalGroupService().addGroup(builder.build());
+ LOG.debug("Transaction {} - Add Group has added group: {}", new Object[]{uri, addDataObj});
}
@Override
- protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
- if ((addDataObj instanceof Group)) {
- final Group group = ((Group) addDataObj);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final AddGroupInputBuilder builder = new AddGroupInputBuilder(group);
-
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setGroupRef(new GroupRef(identifier));
-
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salGroupService.addGroup((AddGroupInput) builder.build());
- LOG.debug("Transaction {} - Add Group has added group: {}", new Object[]{uri, addDataObj});
- }
+ protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject dataObj, final DataObject update) {
+
+ final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+ return update != null
+ ? (dataObj instanceof Group && update instanceof Group && isNodeAvailable(identifier, trans))
+ : (dataObj instanceof Group && isNodeAvailable(identifier, trans));
}
}
*/
package org.opendaylight.controller.frm.group;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
+/**
+ * Group Provider registers the {@link GroupChangeListener} and it holds all needed
+ * services for {@link GroupChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
public class GroupProvider implements AutoCloseable {
- private final static Logger LOG = LoggerFactory.getLogger(GroupProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(GroupProvider.class);
private SalGroupService salGroupService;
- private DataProviderService dataService;
+ private DataBroker dataService;
/* DataChangeListener */
- private GroupChangeListener groupDataChangeListener;
- ListenerRegistration<DataChangeListener> groupDataChangeListenerRegistration;
+ private DataChangeListener groupDataChangeListener;
+ private ListenerRegistration<DataChangeListener> groupDataChangeListenerRegistration;
+
+ /**
+ * Provider Initialization Phase.
+ *
+ * @param DataProviderService dataService
+ */
+ public void init (final DataBroker dataService) {
+ LOG.info("FRM Group Config Provider initialization.");
+ this.dataService = Preconditions.checkNotNull(dataService, "DataService can not be null !");
+ }
+
+ /**
+ * Listener Registration Phase
+ *
+ * @param RpcConsumerRegistry rpcRegistry
+ */
+ public void start(final RpcConsumerRegistry rpcRegistry) {
+ Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+
+ this.salGroupService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalGroupService.class),
+ "RPC SalGroupService not found.");
- public void start() {
/* Build Path */
- InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
- InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
- InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
- InstanceIdentifierBuilder<Group> groupChild = augmentFlowCapNode.<Group> child(Group.class);
- final InstanceIdentifier<? extends DataObject> groupDataObjectPath = groupChild.toInstance();
+ InstanceIdentifier<Group> groupIdentifier = InstanceIdentifier.create(Nodes.class)
+ .child(Node.class).augmentation(FlowCapableNode.class).child(Group.class);
/* DataChangeListener registration */
- this.groupDataChangeListener = new GroupChangeListener(this.salGroupService);
- this.groupDataChangeListenerRegistration = this.dataService.registerDataChangeListener(groupDataObjectPath, groupDataChangeListener);
- LOG.info("Group Config Provider started.");
- }
+ this.groupDataChangeListener = new GroupChangeListener(GroupProvider.this);
+ this.groupDataChangeListenerRegistration = this.dataService.registerDataChangeListener(
+ LogicalDatastoreType.CONFIGURATION, groupIdentifier, groupDataChangeListener, DataChangeScope.SUBTREE);
- protected DataModificationTransaction startChange() {
- return this.dataService.beginTransaction();
+ LOG.info("FRM Group Config Provider started.");
}
- public void close() throws Exception {
- if(groupDataChangeListenerRegistration != null){
- groupDataChangeListenerRegistration.close();
+ @Override
+ public void close() {
+ LOG.info("FRM Group Config Provider stopped.");
+ if (groupDataChangeListenerRegistration != null) {
+ try {
+ groupDataChangeListenerRegistration.close();
+ } catch (Exception e) {
+ String errMsg = "Error by stop FRM Group Config Provider.";
+ LOG.error(errMsg, e);
+ throw new IllegalStateException(errMsg, e);
+ } finally {
+ groupDataChangeListenerRegistration = null;
+ }
}
}
- public void setDataService(final DataProviderService dataService) {
- this.dataService = dataService;
+ public DataChangeListener getGroupDataChangeListener() {
+ return groupDataChangeListener;
+ }
+
+ public SalGroupService getSalGroupService() {
+ return salGroupService;
}
- public void setSalGroupService(final SalGroupService salGroupService) {
- this.salGroupService = salGroupService;
+ public DataBroker getDataService() {
+ return dataService;
}
}
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.group;
-
-public class GroupTransactionValidator {
-
- public static void validate(GroupChangeListener transaction) throws IllegalStateException {
- // NOOP
- }
-}
package org.opendaylight.controller.frm.meter;
import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.RemoveMeterInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.RemoveMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.UpdateMeterInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.UpdateMeterInputBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.OriginalMeter;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.OriginalMeterBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.UpdatedMeter;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.meter.update.UpdatedMeterBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.Meter;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.MeterRef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
/**
+ * Meter Change Listener
+ * add, update and remove {@link Meter} processing from {@link org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent}.
*
* @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
*
*/
public class MeterChangeListener extends AbstractChangeListener {
- private final static Logger LOG = LoggerFactory.getLogger(MeterChangeListener.class);
-
- private final SalMeterService salMeterService;
-
- public SalMeterService getSalMeterService() {
- return this.salMeterService;
- }
+ private static final Logger LOG = LoggerFactory.getLogger(MeterChangeListener.class);
- public MeterChangeListener(final SalMeterService manager) {
- this.salMeterService = manager;
- }
+ private final MeterProvider provider;
- @Override
- protected void validate() throws IllegalStateException {
- MeterTransactionValidator.validate(this);
+ public MeterChangeListener (final MeterProvider provider) {
+ this.provider = Preconditions.checkNotNull(provider, "MeterProvider can not be null !");
}
@Override
- protected void remove(InstanceIdentifier<? extends DataObject> identifier, DataObject removeDataObj) {
- if ((removeDataObj instanceof Meter)) {
+ protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject removeDataObj) {
- final Meter meter = ((Meter) removeDataObj);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final RemoveMeterInputBuilder builder = new RemoveMeterInputBuilder(meter);
+ final Meter meter = ((Meter) removeDataObj);
+ final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+ final RemoveMeterInputBuilder builder = new RemoveMeterInputBuilder(meter);
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setMeterRef(new MeterRef(identifier));
+ builder.setNode(new NodeRef(nodeIdent));
+ builder.setMeterRef(new MeterRef(identifier));
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salMeterService.removeMeter((RemoveMeterInput) builder.build());
- LOG.debug("Transaction {} - Remove Meter has removed meter: {}", new Object[]{uri, removeDataObj});
- }
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalMeterService().removeMeter(builder.build());
+ LOG.debug("Transaction {} - Remove Meter has removed meter: {}", new Object[]{uri, removeDataObj});
}
@Override
- protected void update(InstanceIdentifier<? extends DataObject> identifier, DataObject original, DataObject update) {
- if (original instanceof Meter && update instanceof Meter) {
+ protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject original, final DataObject update) {
- final Meter originalMeter = ((Meter) original);
- final Meter updatedMeter = ((Meter) update);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final UpdateMeterInputBuilder builder = new UpdateMeterInputBuilder();
+ final Meter originalMeter = ((Meter) original);
+ final Meter updatedMeter = ((Meter) update);
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+ final UpdateMeterInputBuilder builder = new UpdateMeterInputBuilder();
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setMeterRef(new MeterRef(identifier));
+ builder.setNode(new NodeRef(nodeInstanceId));
+ builder.setMeterRef(new MeterRef(identifier));
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
- builder.setUpdatedMeter((UpdatedMeter) (new UpdatedMeterBuilder(updatedMeter)).build());
- builder.setOriginalMeter((OriginalMeter) (new OriginalMeterBuilder(originalMeter)).build());
+ builder.setUpdatedMeter((new UpdatedMeterBuilder(updatedMeter)).build());
+ builder.setOriginalMeter((new OriginalMeterBuilder(originalMeter)).build());
+
+ this.provider.getSalMeterService().updateMeter(builder.build());
+ LOG.debug("Transaction {} - Update Meter has updated meter {} with {}", new Object[]{uri, original, update});
- this.salMeterService.updateMeter((UpdateMeterInput) builder.build());
- LOG.debug("Transaction {} - Update Meter has updated meter {} with {}", new Object[]{uri, original, update});
- }
}
@Override
- protected void add(InstanceIdentifier<? extends DataObject> identifier, DataObject addDataObj) {
- if ((addDataObj instanceof Meter)) {
+ protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject addDataObj) {
+
+ final Meter meter = ((Meter) addDataObj);
+ final InstanceIdentifier<Node> nodeInstanceId = identifier.firstIdentifierOf(Node.class);
+ final AddMeterInputBuilder builder = new AddMeterInputBuilder(meter);
- final Meter meter = ((Meter) addDataObj);
- final InstanceIdentifier<Node> nodeInstanceId = identifier.<Node> firstIdentifierOf(Node.class);
- final AddMeterInputBuilder builder = new AddMeterInputBuilder(meter);
+ builder.setNode(new NodeRef(nodeInstanceId));
+ builder.setMeterRef(new MeterRef(identifier));
- builder.setNode(new NodeRef(nodeInstanceId));
- builder.setMeterRef(new MeterRef(identifier));
+ Uri uri = new Uri(this.getTransactionId());
+ builder.setTransactionUri(uri);
+ this.provider.getSalMeterService().addMeter(builder.build());
+ LOG.debug("Transaction {} - Add Meter has added meter: {}", new Object[]{uri, addDataObj});
+ }
+
+ @Override
+ protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject dataObj, final DataObject update) {
- Uri uri = new Uri(this.getTransactionId());
- builder.setTransactionUri(uri);
- this.salMeterService.addMeter((AddMeterInput) builder.build());
- LOG.debug("Transaction {} - Add Meter has added meter: {}", new Object[]{uri, addDataObj});
- }
+ final ReadOnlyTransaction trans = this.provider.getDataService().newReadOnlyTransaction();
+ return update != null
+ ? (dataObj instanceof Meter && update instanceof Meter && isNodeAvailable(identifier, trans))
+ : (dataObj instanceof Meter && isNodeAvailable(identifier, trans));
}
-}
\ No newline at end of file
+}
*/
package org.opendaylight.controller.frm.meter;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sal.binding.api.data.DataProviderService;
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.meters.Meter;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Preconditions;
+
+/**
+ * Meter Provider registers the {@link MeterChangeListener} and it holds all needed
+ * services for {@link MeterChangeListener}.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ */
public class MeterProvider implements AutoCloseable {
- private final static Logger LOG = LoggerFactory.getLogger(MeterProvider.class);
+ private static final Logger LOG = LoggerFactory.getLogger(MeterProvider.class);
- private DataProviderService dataService;
private SalMeterService salMeterService;
+ private DataBroker dataService;
/* DataChangeListener */
- private MeterChangeListener meterDataChangeListener;
- ListenerRegistration<DataChangeListener> meterDataChangeListenerRegistration;
+ private DataChangeListener meterDataChangeListener;
+ private ListenerRegistration<DataChangeListener> meterDataChangeListenerRegistration;
+
+ /**
+ * Provider Initialization Phase.
+ *
+ * @param DataProviderService dataService
+ */
+ public void init(final DataBroker dataService) {
+ LOG.info("FRM Meter Config Provider initialization.");
+ this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+ }
+
+ /**
+ * Listener Registration Phase
+ *
+ * @param RpcConsumerRegistry rpcRegistry
+ */
+ public void start(final RpcConsumerRegistry rpcRegistry) {
+ Preconditions.checkArgument(rpcRegistry != null, "RpcConsumerRegistry can not be null !");
+ this.salMeterService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalMeterService.class),
+ "RPC SalMeterService not found.");
- public void start() {
/* Build Path */
- InstanceIdentifierBuilder<Nodes> nodesBuilder = InstanceIdentifier.<Nodes> builder(Nodes.class);
- InstanceIdentifierBuilder<Node> nodeChild = nodesBuilder.<Node> child(Node.class);
- InstanceIdentifierBuilder<FlowCapableNode> augmentFlowCapNode = nodeChild.<FlowCapableNode> augmentation(FlowCapableNode.class);
- InstanceIdentifierBuilder<Meter> meterChild = augmentFlowCapNode.<Meter> child(Meter.class);
- final InstanceIdentifier<? extends DataObject> meterDataObjectPath = meterChild.toInstance();
+ InstanceIdentifier<Meter> meterIdentifier = InstanceIdentifier.create(Nodes.class)
+ .child(Node.class).augmentation(FlowCapableNode.class).child(Meter.class);
/* DataChangeListener registration */
- this.meterDataChangeListener = new MeterChangeListener(this.salMeterService);
- this.meterDataChangeListenerRegistration = this.dataService.registerDataChangeListener(meterDataObjectPath, meterDataChangeListener);
- LOG.info("Meter Config Provider started.");
- }
+ this.meterDataChangeListener = new MeterChangeListener(MeterProvider.this);
+ this.meterDataChangeListenerRegistration =
+ this.dataService.registerDataChangeListener(LogicalDatastoreType.CONFIGURATION,
+ meterIdentifier, meterDataChangeListener, DataChangeScope.SUBTREE);
- protected DataModificationTransaction startChange() {
- return this.dataService.beginTransaction();
+ LOG.info("FRM Meter Config Provider started.");
}
- public void close() throws Exception {
- if(meterDataChangeListenerRegistration != null){
- meterDataChangeListenerRegistration.close();
+ @Override
+ public void close() {
+ LOG.info("FRM Meter Config Provider stopped.");
+ if (meterDataChangeListenerRegistration != null) {
+ try {
+ meterDataChangeListenerRegistration.close();
+ } catch (Exception e) {
+ String errMsg = "Error by stop FRM Meter Config Provider.";
+ LOG.error(errMsg, e);
+ throw new IllegalStateException(errMsg, e);
+ } finally {
+ meterDataChangeListenerRegistration = null;
+ }
}
}
- public void setDataService(final DataProviderService dataService) {
- this.dataService = dataService;
+ public DataChangeListener getMeterDataChangeListener() {
+ return meterDataChangeListener;
+ }
+
+ public DataBroker getDataService() {
+ return dataService;
}
- public void setSalMeterService(final SalMeterService salMeterService) {
- this.salMeterService = salMeterService;
+ public SalMeterService getSalMeterService() {
+ return salMeterService;
}
-}
\ No newline at end of file
+}
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.frm.meter;
-
-public class MeterTransactionValidator {
-
- public static void validate(MeterChangeListener transaction) throws IllegalStateException {
- // NOOP
- }
-}
--- /dev/null
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm.reconil;
+
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+
+import org.opendaylight.controller.frm.AbstractChangeListener;
+import org.opendaylight.controller.frm.FlowCookieProducer;
+import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.meters.Meter;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.AddFlowInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.FlowTableRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.AddGroupInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.GroupRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.types.rev131018.groups.Group;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.AddMeterInputBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.MeterRef;
+import org.opendaylight.yangtools.yang.binding.DataObject;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.ListenableFuture;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * FlowNode Reconciliation Listener
+ * Reconciliation for a new FlowNode
+ * Remove CookieMapKey for removed FlowNode
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public class FlowNodeReconcilListener extends AbstractChangeListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(FlowNodeReconcilListener.class);
+
+ private final FlowNodeReconcilProvider provider;
+
+ public FlowNodeReconcilListener(final FlowNodeReconcilProvider provider) {
+ this.provider = Preconditions.checkNotNull(provider, "Flow Node Reconcil Provider can not be null!");
+ }
+
+ @Override
+ public void onDataChanged(AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+ /* FlowCapableNode DataObjects for reconciliation */
+ final Set<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> createdEntries =
+ changeEvent.getCreatedData().entrySet();
+ /* FlowCapableNode DataObjects for clean FlowCookieHolder */
+ final Set<InstanceIdentifier<? extends DataObject>> removeEntriesInstanceIdentifiers =
+ changeEvent.getRemovedPaths();
+ for (final Entry<InstanceIdentifier<? extends DataObject>, DataObject> createdEntry : createdEntries) {
+ InstanceIdentifier<? extends DataObject> entryKey = createdEntry.getKey();
+ DataObject entryValue = createdEntry.getValue();
+ if (preconditionForChange(entryKey, entryValue, null)) {
+ this.add(entryKey, entryValue);
+ }
+ }
+ for (final InstanceIdentifier<?> instanceId : removeEntriesInstanceIdentifiers) {
+ Map<InstanceIdentifier<? extends DataObject>, DataObject> origConfigData =
+ changeEvent.getOriginalData();
+ final DataObject removeValue = origConfigData.get(instanceId);
+ if (preconditionForChange(instanceId, removeValue, null)) {
+ this.remove(instanceId, removeValue);
+ }
+ }
+ }
+
+ @Override
+ /* Cleaning FlowCookieManager holder for all node tables */
+ protected void remove(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject removeDataObj) {
+
+ final InstanceIdentifier<FlowCapableNode> flowNodeIdent =
+ identifier.firstIdentifierOf(FlowCapableNode.class);
+ final FlowCapableNode flowNode = ((FlowCapableNode) removeDataObj);
+
+ for (Table flowTable : flowNode.getTable()) {
+ final InstanceIdentifier<Table> tableIdent =
+ flowNodeIdent.child(Table.class, flowTable.getKey());
+ FlowCookieProducer.INSTANCE.clean(tableIdent);
+ }
+ }
+
+ @Override
+ /* Reconciliation by connect new FlowCapableNode */
+ protected void add(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject addDataObj) {
+
+ final InstanceIdentifier<FlowCapableNode> flowNodeIdent =
+ identifier.firstIdentifierOf(FlowCapableNode.class);
+ final Optional<FlowCapableNode> flowCapNode = this.readFlowCapableNode(flowNodeIdent);
+
+ if (flowCapNode.isPresent()) {
+ final InstanceIdentifier<Node> nodeIdent = identifier.firstIdentifierOf(Node.class);
+ final NodeRef nodeRef = new NodeRef(nodeIdent);
+ /* Groups - have to be first */
+ for (Group group : flowCapNode.get().getGroup()) {
+ final GroupRef groupRef = new GroupRef(flowNodeIdent.child(Group.class, group.getKey()));
+ final AddGroupInputBuilder groupBuilder = new AddGroupInputBuilder(group);
+ groupBuilder.setGroupRef(groupRef);
+ groupBuilder.setNode(nodeRef);
+ this.provider.getSalGroupService().addGroup(groupBuilder.build());
+ }
+ /* Meters */
+ for (Meter meter : flowCapNode.get().getMeter()) {
+ final MeterRef meterRef = new MeterRef(flowNodeIdent.child(Meter.class, meter.getKey()));
+ final AddMeterInputBuilder meterBuilder = new AddMeterInputBuilder(meter);
+ meterBuilder.setMeterRef(meterRef);
+ meterBuilder.setNode(nodeRef);
+ this.provider.getSalMeterService().addMeter(meterBuilder.build());
+ }
+ /* Flows */
+ for (Table flowTable : flowCapNode.get().getTable()) {
+ final InstanceIdentifier<Table> tableIdent = flowNodeIdent.child(Table.class, flowTable.getKey());
+ for (Flow flow : flowTable.getFlow()) {
+ final FlowCookie flowCookie = new FlowCookie(FlowCookieProducer.INSTANCE.getNewCookie(tableIdent));
+ final FlowRef flowRef = new FlowRef(tableIdent.child(Flow.class, flow.getKey()));
+ final FlowTableRef flowTableRef = new FlowTableRef(tableIdent);
+ final AddFlowInputBuilder flowBuilder = new AddFlowInputBuilder(flow);
+ flowBuilder.setCookie(flowCookie);
+ flowBuilder.setNode(nodeRef);
+ flowBuilder.setFlowTable(flowTableRef);
+ flowBuilder.setFlowRef(flowRef);
+ this.provider.getSalFlowService().addFlow(flowBuilder.build());
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void update(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject original, DataObject update) {
+ // NOOP - Listener is registered for DataChangeScope.BASE only
+ }
+
+ @Override
+ protected boolean preconditionForChange(final InstanceIdentifier<? extends DataObject> identifier,
+ final DataObject dataObj, final DataObject update) {
+ return (dataObj instanceof FlowCapableNode);
+ }
+
+ private Optional<FlowCapableNode> readFlowCapableNode(final InstanceIdentifier<FlowCapableNode> flowNodeIdent) {
+ ReadOnlyTransaction readTrans = this.provider.getDataService().newReadOnlyTransaction();
+ try {
+ ListenableFuture<Optional<FlowCapableNode>> confFlowNode =
+ readTrans.read(LogicalDatastoreType.CONFIGURATION, flowNodeIdent);
+ if (confFlowNode.get().isPresent()) {
+ return Optional.<FlowCapableNode> of(confFlowNode.get().get());
+ } else {
+ return Optional.absent();
+ }
+ }
+ catch (InterruptedException | ExecutionException e) {
+ LOG.error("Unexpected exception by reading flow ".concat(flowNodeIdent.toString()), e);
+ return Optional.absent();
+ }
+ finally {
+ readTrans.close();
+ }
+ }
+}
--- /dev/null
+/**
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.frm.reconil;
+
+import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.sal.binding.api.RpcConsumerRegistry;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.service.rev130819.SalFlowService;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.service.rev130918.SalGroupService;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.service.rev130918.SalMeterService;
+import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * forwardingrules-manager
+ * org.opendaylight.controller.frm
+ *
+ * FlowNode Reconciliation Provider registers the FlowNodeReconilListener
+ * and it holds all needed services for FlowNodeReconcilListener.
+ *
+ * @author <a href="mailto:vdemcak@cisco.com">Vaclav Demcak</a>
+ *
+ * Created: Jun 13, 2014
+ */
+public class FlowNodeReconcilProvider implements AutoCloseable {
+
+ private static final Logger LOG = LoggerFactory.getLogger(FlowNodeReconcilProvider.class);
+
+ private SalFlowService salFlowService;
+ private SalMeterService salMeterService;
+ private SalGroupService salGroupService;
+ private DataBroker dataService;
+
+ /* DataChangeListener */
+ private DataChangeListener flowNodeReconcilListener;
+ private ListenerRegistration<DataChangeListener> flowNodeReconcilListenerRegistration;
+
+ public void init (final DataBroker dataService) {
+ LOG.info("FRM Flow Node Config Reconcil Provider initialization.");
+
+ this.dataService = Preconditions.checkNotNull(dataService, "DataProviderService can not be null !");
+ }
+
+ public void start( final RpcConsumerRegistry rpcRegistry ) {
+ Preconditions.checkArgument(rpcRegistry != null, "RpcConcumerRegistry can not be null !");
+
+ this.salFlowService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalFlowService.class),
+ "RPC SalFlowService not found.");
+ this.salMeterService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalMeterService.class),
+ "RPC SalMeterService not found.");
+ this.salGroupService = Preconditions.checkNotNull(rpcRegistry.getRpcService(SalGroupService.class),
+ "RPC SalGroupService not found.");
+
+ /* Build Path */
+ InstanceIdentifier<FlowCapableNode> flowCapableNodeIdent =
+ InstanceIdentifier.create(Nodes.class).child(Node.class).augmentation(FlowCapableNode.class);
+
+ /* ReconcilNotificationListener registration */
+ this.flowNodeReconcilListener = new FlowNodeReconcilListener(FlowNodeReconcilProvider.this);
+ this.flowNodeReconcilListenerRegistration = this.dataService.registerDataChangeListener(
+ LogicalDatastoreType.OPERATIONAL, flowCapableNodeIdent, flowNodeReconcilListener, DataChangeScope.BASE);
+ LOG.info("FRM Flow Node Config Reconcil Provider started.");
+ }
+
+ @Override
+ public void close() {
+ LOG.info("FRM Flow Node Config Reconcil Provider stopped.");
+ if (flowNodeReconcilListenerRegistration != null) {
+ try {
+ flowNodeReconcilListenerRegistration.close();
+ } catch (Exception e) {
+ String errMsg = "Error by stop FRM Flow Node Config Reconcil Provider.";
+ LOG.error(errMsg, e);
+ throw new IllegalStateException(errMsg, e);
+ } finally {
+ flowNodeReconcilListenerRegistration = null;
+ }
+ }
+ }
+
+ public DataChangeListener getFlowNodeReconcilListener() {
+ return flowNodeReconcilListener;
+ }
+
+ public DataBroker getDataService() {
+ return dataService;
+ }
+
+ public SalFlowService getSalFlowService() {
+ return salFlowService;
+ }
+
+ public SalMeterService getSalMeterService() {
+ return salMeterService;
+ }
+
+ public SalGroupService getSalGroupService() {
+ return salGroupService;
+ }
+}
uses flow-node-connector;
}
+ augment "/inv:nodes/inv:node/table" {
+ ext:augment-identifier "flow-cookie-mapping";
+ list flow-cookie-map {
+ key "cookie";
+ leaf cookie {
+ type flow:flow-cookie;
+ }
+ leaf-list flow-ids {
+ type flow-id;
+ }
+ }
+ }
}
@Override
public void modifiedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
- // TODO Auto-generated method stub
-
+ removedService(reference, service);
+ addingService(reference);
}
@Override
public void removedService(ServiceReference<BindingAwareBroker> reference, BindingAwareBroker service) {
- // TODO Auto-generated method stub
+ broker = context.getService(reference);
+ mdActivationPool.execute(new Runnable() {
+ @Override
+ public void run() {
+ onBrokerRemoved(broker, context);
+ }
+ });
}
};
protected abstract void onBrokerAvailable(BindingAwareBroker broker, BundleContext context);
protected void onBrokerRemoved(BindingAwareBroker broker, BundleContext context) {
-
+ stopImpl(context);
}
}
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>yang-parser-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-util</artifactId>
+ </dependency>
<dependency>
<groupId>xmlunit</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.1</version>
</dependency>
+
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
</dependencies>
</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorPath;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.dispatch.BoundedMailbox;
+import akka.dispatch.MailboxType;
+import akka.dispatch.MessageQueue;
+import akka.dispatch.ProducesMessageQueue;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Preconditions;
+import com.typesafe.config.Config;
+import org.opendaylight.controller.common.reporting.MetricsReporter;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+
+public class MeteredBoundedMailbox implements MailboxType, ProducesMessageQueue<BoundedMailbox.MessageQueue> {
+
+ private MeteredMessageQueue queue;
+ private Integer capacity;
+ private FiniteDuration pushTimeOut;
+ private ActorPath actorPath;
+ private MetricsReporter reporter;
+
+ private final String QUEUE_SIZE = "queue-size";
+ private final Long DEFAULT_TIMEOUT = 10L;
+
+ public MeteredBoundedMailbox(ActorSystem.Settings settings, Config config) {
+ Preconditions.checkArgument( config.hasPath("mailbox-capacity"), "Missing configuration [mailbox-capacity]" );
+ this.capacity = config.getInt("mailbox-capacity");
+ Preconditions.checkArgument( this.capacity > 0, "mailbox-capacity must be > 0");
+
+ Long timeout = -1L;
+ if ( config.hasPath("mailbox-push-timeout-time") ){
+ timeout = config.getDuration("mailbox-push-timeout-time", TimeUnit.NANOSECONDS);
+ } else {
+ timeout = DEFAULT_TIMEOUT;
+ }
+ Preconditions.checkArgument( timeout > 0, "mailbox-push-timeout-time must be > 0");
+ this.pushTimeOut = new FiniteDuration(timeout, TimeUnit.NANOSECONDS);
+
+ reporter = MetricsReporter.getInstance();
+ }
+
+
+ @Override
+ public MessageQueue create(final scala.Option<ActorRef> owner, scala.Option<ActorSystem> system) {
+ this.queue = new MeteredMessageQueue(this.capacity, this.pushTimeOut);
+ monitorQueueSize(owner, this.queue);
+ return this.queue;
+ }
+
+ private void monitorQueueSize(scala.Option<ActorRef> owner, final MeteredMessageQueue monitoredQueue) {
+ if (owner.isEmpty()) {
+ return; //there's no actor to monitor
+ }
+ actorPath = owner.get().path();
+ MetricRegistry registry = reporter.getMetricsRegistry();
+
+ String actorName = registry.name(actorPath.toString(), QUEUE_SIZE);
+
+ if (registry.getMetrics().containsKey(actorName))
+ return; //already registered
+
+ reporter.getMetricsRegistry().register(actorName,
+ new Gauge<Integer>() {
+ @Override
+ public Integer getValue() {
+ return monitoredQueue.size();
+ }
+ });
+ }
+
+
+ public static class MeteredMessageQueue extends BoundedMailbox.MessageQueue {
+
+ public MeteredMessageQueue(int capacity, FiniteDuration pushTimeOut) {
+ super(capacity, pushTimeOut);
+ }
+ }
+
+}
+
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.reporting;
+
+import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.MetricRegistry;
+
+/**
+ * Maintains metrics registry that is provided to reporters.
+ * At the moment only one reporter exists {@code JmxReporter}.
+ * More reporters can be added.
+ * <p/>
+ * The consumers of this class will only be interested in {@code MetricsRegistry}
+ * where metrics for that consumer gets stored.
+ */
+public class MetricsReporter implements AutoCloseable{
+
+ private final MetricRegistry METRICS_REGISTRY = new MetricRegistry();
+ private final String DOMAIN = "org.opendaylight.controller";
+
+ public final JmxReporter jmxReporter = JmxReporter.forRegistry(METRICS_REGISTRY).inDomain(DOMAIN).build();
+
+ private static MetricsReporter inst = new MetricsReporter();
+
+ private MetricsReporter(){
+ jmxReporter.start();
+ }
+
+ public static MetricsReporter getInstance(){
+ return inst;
+ }
+
+ public MetricRegistry getMetricsRegistry(){
+ return METRICS_REGISTRY;
+ }
+
+ @Override
+ public void close() throws Exception {
+ jmxReporter.close();
+ }
+}
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import org.opendaylight.yangtools.yang.common.QName;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Function;
import com.google.common.base.Objects;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.annotations.Beta;
import com.google.common.base.Preconditions;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Optional;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.DeadLetter;
+import akka.actor.Props;
+import akka.actor.UntypedActor;
+import akka.japi.Creator;
+import akka.testkit.JavaTestKit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class MeteredBoundedMailboxTest {
+
+ private static ActorSystem actorSystem;
+ private final ReentrantLock lock = new ReentrantLock();
+
+ @Before
+ public void setUp() throws Exception {
+ actorSystem = ActorSystem.create("testsystem");
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (actorSystem != null)
+ actorSystem.shutdown();
+ }
+
+ @Test
+ public void test_WhenQueueIsFull_ShouldSendMsgToDeadLetter() throws InterruptedException {
+ final JavaTestKit mockReceiver = new JavaTestKit(actorSystem);
+ actorSystem.eventStream().subscribe(mockReceiver.getRef(), DeadLetter.class);
+
+
+ final FiniteDuration TEN_SEC = new FiniteDuration(10, TimeUnit.SECONDS);
+ String boundedMailBox = actorSystem.name() + ".bounded-mailbox";
+ ActorRef pingPongActor = actorSystem.actorOf(PingPongActor.props(lock).withMailbox(boundedMailBox),
+ "pingpongactor");
+
+ actorSystem.mailboxes().settings();
+ lock.lock();
+ //queue capacity = 10
+ //need to send 12 messages; 1 message is dequeued and actor waits on lock,
+ //2nd to 11th messages are put on the queue
+ //12th message is sent to dead letter.
+ for (int i=0;i<12;i++){
+ pingPongActor.tell("ping", mockReceiver.getRef());
+ }
+
+ mockReceiver.expectMsgClass(TEN_SEC, DeadLetter.class);
+
+ lock.unlock();
+
+ Object[] eleven = mockReceiver.receiveN(11, TEN_SEC);
+ }
+
+ /**
+ * For testing
+ */
+ public static class PingPongActor extends UntypedActor{
+
+ ReentrantLock lock;
+
+ private PingPongActor(ReentrantLock lock){
+ this.lock = lock;
+ }
+
+ public static Props props(final ReentrantLock lock){
+ return Props.create(new Creator<PingPongActor>(){
+ @Override
+ public PingPongActor create() throws Exception {
+ return new PingPongActor(lock);
+ }
+ });
+ }
+
+ @Override
+ public void onReceive(Object message) throws Exception {
+ lock.lock();
+ if ("ping".equals(message))
+ getSender().tell("pong", getSelf());
+ }
+ }
+}
\ No newline at end of file
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.collect.ImmutableList;
+testsystem {
+
+ bounded-mailbox {
+ mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+ mailbox-capacity = 10
+ mailbox-push-timeout-time = 100ms
+ }
+}
\ No newline at end of file
--- /dev/null
+testsystem {
+
+ bounded-mailbox {
+ mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+ mailbox-capacity = 1000
+ mailbox-push-timeout-time = 10ms
+ }
+}
\ No newline at end of file
<version>1.1-SNAPSHOT</version>
</dependency>
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>
<Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
<Export-package></Export-package>
<Private-Package></Private-Package>
- <Import-Package>!*snappy;!org.jboss.*;*</Import-Package>
+ <Import-Package>!*snappy;!org.jboss.*;!com.jcraft.*;*</Import-Package>
<Embed-Dependency>
sal-clustering-commons;
sal-akka-raft;
+ *metrics*;
!sal*;
!*config-api*;
!*testkit*;
Logging.getLogger(getContext().system(), this);
- public AbstractUntypedActor(){
+ public AbstractUntypedActor() {
LOG.debug("Actor created {}", getSelf());
getContext().
system().
@Override public void onReceive(Object message) throws Exception {
LOG.debug("Received message {}", message.getClass().getSimpleName());
handleReceive(message);
- LOG.debug("Done handling message {}", message.getClass().getSimpleName());
+ LOG.debug("Done handling message {}",
+ message.getClass().getSimpleName());
}
protected abstract void handleReceive(Object message) throws Exception;
- protected void ignoreMessage(Object message){
+ protected void ignoreMessage(Object message) {
LOG.debug("Unhandled message {} ", message);
}
- protected void unknownMessage(Object message) throws Exception{
+ protected void unknownMessage(Object message) throws Exception {
+ LOG.debug("Received unhandled message {}", message);
unhandled(message);
}
}
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
+
import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.util.PropertyUtils;
-import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
*/
public class DistributedDataStore implements DOMStore, SchemaContextListener, AutoCloseable {
- private static final Logger
- LOG = LoggerFactory.getLogger(DistributedDataStore.class);
-
- private static final String EXECUTOR_MAX_POOL_SIZE_PROP =
- "mdsal.dist-datastore-executor-pool.size";
- private static final int DEFAULT_EXECUTOR_MAX_POOL_SIZE = 10;
-
- private static final String EXECUTOR_MAX_QUEUE_SIZE_PROP =
- "mdsal.dist-datastore-executor-queue.size";
- private static final int DEFAULT_EXECUTOR_MAX_QUEUE_SIZE = 5000;
+ private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
private final ActorContext actorContext;
private SchemaContext schemaContext;
- /**
- * Executor used to run FutureTask's
- *
- * This is typically used when we need to make a request to an actor and
- * wait for it's response and the consumer needs to be provided a Future.
- */
- private final ListeningExecutorService executor =
- MoreExecutors.listeningDecorator(
- SpecialExecutors.newBlockingBoundedFastThreadPool(
- PropertyUtils.getIntSystemProperty(
- EXECUTOR_MAX_POOL_SIZE_PROP,
- DEFAULT_EXECUTOR_MAX_POOL_SIZE),
- PropertyUtils.getIntSystemProperty(
- EXECUTOR_MAX_QUEUE_SIZE_PROP,
- DEFAULT_EXECUTOR_MAX_QUEUE_SIZE), "DistDataStore"));
-
- public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster, Configuration configuration) {
+ public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster,
+ Configuration configuration, InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
Preconditions.checkNotNull(actorSystem, "actorSystem should not be null");
Preconditions.checkNotNull(type, "type should not be null");
Preconditions.checkNotNull(cluster, "cluster should not be null");
LOG.info("Creating ShardManager : {}", shardManagerId);
this.actorContext = new ActorContext(actorSystem, actorSystem
- .actorOf(ShardManager.props(type, cluster, configuration),
+ .actorOf(ShardManager.props(type, cluster, configuration, dataStoreProperties),
shardManagerId ), cluster, configuration);
}
}
+ @SuppressWarnings("unchecked")
@Override
- public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
+ public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>>
+ ListenerRegistration<L> registerChangeListener(
YangInstanceIdentifier path, L listener,
AsyncDataBroker.DataChangeScope scope) {
Preconditions.checkNotNull(path, "path should not be null");
Preconditions.checkNotNull(listener, "listener should not be null");
-
LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
Object result = actorContext.executeLocalShardOperation(shardName,
- new RegisterChangeListener(path, dataChangeListenerActor.path(),
- scope),
- ActorContext.ASK_DURATION
- );
+ new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+ ActorContext.ASK_DURATION);
if (result != null) {
RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
LOG.debug(
"No local shard for shardName {} was found so returning a noop registration",
shardName);
+
return new NoOpDataChangeListenerRegistration(listener);
}
-
-
-
-
@Override
public DOMStoreTransactionChain createTransactionChain() {
- return new TransactionChainProxy(actorContext, executor, schemaContext);
+ return new TransactionChainProxy(actorContext, schemaContext);
}
@Override
public DOMStoreReadTransaction newReadOnlyTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY,
- executor, schemaContext);
+ schemaContext);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.WRITE_ONLY,
- executor, schemaContext);
+ schemaContext);
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_WRITE,
- executor, schemaContext);
+ schemaContext);
}
@Override public void onGlobalContextUpdated(SchemaContext schemaContext) {
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorSystem;
+
import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
public class DistributedDataStoreFactory {
- public static DistributedDataStore createInstance(String name, SchemaService schemaService){
+ public static DistributedDataStore createInstance(String name, SchemaService schemaService,
+ InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
+
ActorSystem actorSystem = ActorSystemFactory.getInstance();
Configuration config = new ConfigurationImpl("module-shards.conf", "modules.conf");
final DistributedDataStore dataStore =
- new DistributedDataStore(actorSystem, name, new ClusterWrapperImpl(actorSystem),config );
- ShardStrategyFactory.setConfiguration(config);
- schemaService
- .registerSchemaContextListener(dataStore);
+ new DistributedDataStore(actorSystem, name, new ClusterWrapperImpl(actorSystem),
+ config, dataStoreProperties );
+ ShardStrategyFactory.setConfiguration(config);
+ schemaService.registerSchemaContextListener(dataStore);
return dataStore;
-
}
}
import akka.serialization.Serialization;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardTransactionIdentifier;
import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
private final List<ActorSelection> dataChangeListeners = new ArrayList<>();
- private Shard(ShardIdentifier name, Map<ShardIdentifier, String> peerAddresses) {
+ private Shard(ShardIdentifier name, Map<ShardIdentifier, String> peerAddresses,
+ InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
super(name.toString(), mapPeerAddresses(peerAddresses), Optional.of(configParams));
this.name = name;
LOG.info("Shard created : {} persistent : {}", name, persistent);
- store = InMemoryDOMDataStoreFactory.create(name.toString(), null);
+ store = InMemoryDOMDataStoreFactory.create(name.toString(), null, dataStoreProperties);
shardMBean = ShardMBeanFactory.getShardStatsMBean(name.toString());
}
- private static Map<String, String> mapPeerAddresses(Map<ShardIdentifier, String> peerAddresses){
- Map<String , String> map = new HashMap<>();
+ private static Map<String, String> mapPeerAddresses(
+ Map<ShardIdentifier, String> peerAddresses) {
+ Map<String, String> map = new HashMap<>();
- for(Map.Entry<ShardIdentifier, String> entry : peerAddresses.entrySet()){
+ for (Map.Entry<ShardIdentifier, String> entry : peerAddresses
+ .entrySet()) {
map.put(entry.getKey().toString(), entry.getValue());
}
public static Props props(final ShardIdentifier name,
- final Map<ShardIdentifier, String> peerAddresses) {
+ final Map<ShardIdentifier, String> peerAddresses,
+ final InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
Preconditions.checkNotNull(name, "name should not be null");
- Preconditions.checkNotNull(peerAddresses, "peerAddresses should not be null");
+ Preconditions
+ .checkNotNull(peerAddresses, "peerAddresses should not be null");
return Props.create(new Creator<Shard>() {
@Override
public Shard create() throws Exception {
- return new Shard(name, peerAddresses);
+ return new Shard(name, peerAddresses, dataStoreProperties);
}
});
}
} else if (message instanceof PeerAddressResolved) {
PeerAddressResolved resolved = (PeerAddressResolved) message;
- setPeerAddress(resolved.getPeerId().toString(), resolved.getPeerAddress());
+ setPeerAddress(resolved.getPeerId().toString(),
+ resolved.getPeerAddress());
} else {
super.onReceiveCommand(message);
}
}
private ActorRef createTypedTransactionActor(
- CreateTransaction createTransaction, ShardTransactionIdentifier transactionId) {
+ CreateTransaction createTransaction,
+ ShardTransactionIdentifier transactionId) {
if (createTransaction.getTransactionType()
== TransactionProxy.TransactionType.READ_ONLY.ordinal()) {
.props(store.newWriteOnlyTransaction(), getSelf(),
schemaContext), transactionId.toString());
} else {
- // FIXME: This does not seem right
throw new IllegalArgumentException(
- "CreateTransaction message has unidentified transaction type="
+ "Shard="+name + ":CreateTransaction message has unidentified transaction type="
+ createTransaction.getTransactionType());
}
}
private void createTransaction(CreateTransaction createTransaction) {
- ShardTransactionIdentifier transactionId = ShardTransactionIdentifier.builder().remoteTransactionId(createTransaction.getTransactionId()).build();
+ ShardTransactionIdentifier transactionId =
+ ShardTransactionIdentifier.builder()
+ .remoteTransactionId(createTransaction.getTransactionId())
+ .build();
LOG.debug("Creating transaction : {} ", transactionId);
ActorRef transactionActor =
createTypedTransactionActor(createTransaction, transactionId);
getSender()
.tell(new CreateTransactionReply(
- Serialization.serializedActorPath(transactionActor),
- createTransaction.getTransactionId()).toSerializable(),
+ Serialization.serializedActorPath(transactionActor),
+ createTransaction.getTransactionId()).toSerializable(),
getSelf());
}
final ListenableFuture<Void> future = cohort.commit();
final ActorRef self = getSelf();
- future.addListener(new Runnable() {
- @Override
- public void run() {
- try {
- future.get();
- sender
- .tell(new CommitTransactionReply().toSerializable(),
- self);
- shardMBean.incrementCommittedTransactionCount();
- shardMBean.setLastCommittedTransactionTime(new Date());
- } catch (InterruptedException | ExecutionException e) {
- shardMBean.incrementFailedTransactionsCount();
- sender.tell(new akka.actor.Status.Failure(e),self);
- }
+
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ public void onSuccess(Void v) {
+ sender.tell(new CommitTransactionReply().toSerializable(),self);
+ shardMBean.incrementCommittedTransactionCount();
+ shardMBean.setLastCommittedTransactionTime(new Date());
}
- }, getContext().dispatcher());
+
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during commit");
+ shardMBean.incrementFailedTransactionsCount();
+ sender.tell(new akka.actor.Status.Failure(t), self);
+ }
+ });
+
}
private void handleForwardedCommit(ForwardedCommitTransaction message) {
LOG.debug(
"registerDataChangeListener sending reply, listenerRegistrationPath = {} "
- , listenerRegistration.path().toString());
+ , listenerRegistration.path().toString());
getSender()
.tell(new RegisterChangeListenerReply(listenerRegistration.path()),
// Update stats
ReplicatedLogEntry lastLogEntry = getLastLogEntry();
- if(lastLogEntry != null){
+ if (lastLogEntry != null) {
shardMBean.setLastLogIndex(lastLogEntry.getIndex());
shardMBean.setLastLogTerm(lastLogEntry.getTerm());
}
import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryNotFound;
import org.opendaylight.controller.cluster.datastore.messages.UpdateSchemaContext;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
+
import scala.concurrent.duration.Duration;
import java.util.ArrayList;
private ShardManagerInfoMBean mBean;
+ private final InMemoryDOMDataStoreConfigProperties dataStoreProperties;
+
/**
* @param type defines the kind of data that goes into shards created by this shard manager. Examples of type would be
* configuration or operational
*/
- private ShardManager(String type, ClusterWrapper cluster, Configuration configuration) {
+ private ShardManager(String type, ClusterWrapper cluster, Configuration configuration,
+ InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
this.type = Preconditions.checkNotNull(type, "type should not be null");
this.cluster = Preconditions.checkNotNull(cluster, "cluster should not be null");
this.configuration = Preconditions.checkNotNull(configuration, "configuration should not be null");
+ this.dataStoreProperties = dataStoreProperties;
// Subscribe this actor to cluster member events
cluster.subscribeToMemberEvents(getSelf());
public static Props props(final String type,
final ClusterWrapper cluster,
- final Configuration configuration) {
+ final Configuration configuration,
+ final InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
Preconditions.checkNotNull(type, "type should not be null");
Preconditions.checkNotNull(cluster, "cluster should not be null");
@Override
public ShardManager create() throws Exception {
- return new ShardManager(type, cluster, configuration);
+ return new ShardManager(type, cluster, configuration, dataStoreProperties);
}
});
}
ShardIdentifier shardId = getShardIdentifier(memberName, shardName);
Map<ShardIdentifier, String> peerAddresses = getPeerAddresses(shardName);
ActorRef actor = getContext()
- .actorOf(Shard.props(shardId, peerAddresses),
+ .actorOf(Shard.props(shardId, peerAddresses, dataStoreProperties),
shardId.toString());
localShardActorNames.add(shardId.toString());
localShards.put(shardName, new ShardInformation(shardName, actor, peerAddresses));
@Override
public SupervisorStrategy supervisorStrategy() {
+
return new OneForOneStrategy(10, Duration.create("1 minute"),
new Function<Throwable, SupervisorStrategy.Directive>() {
@Override
public SupervisorStrategy.Directive apply(Throwable t) {
+ StringBuilder sb = new StringBuilder();
+ for(StackTraceElement element : t.getStackTrace()) {
+ sb.append("\n\tat ")
+ .append(element.toString());
+ }
+ LOG.warning("Supervisor Strategy of resume applied {}",sb.toString());
return SupervisorStrategy.resume();
}
}
} else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
} else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction,DeleteData.fromSerizalizable(message));
+ deleteData(transaction,DeleteData.fromSerializable(message));
} else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction,new ReadyTransaction());
} else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
} else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
} else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction,DeleteData.fromSerizalizable(message));
+ deleteData(transaction,DeleteData.fromSerializable(message));
} else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction,new ReadyTransaction());
}else {
import akka.event.Logging;
import akka.event.LoggingAdapter;
import akka.japi.Creator;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
-import java.util.concurrent.ExecutionException;
-
public class ThreePhaseCommitCohort extends AbstractUntypedActor {
private final DOMStoreThreePhaseCommitCohort cohort;
private final ActorRef shardActor;
@Override
public void handleReceive(Object message) throws Exception {
- if (message.getClass().equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
+ if (message.getClass()
+ .equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
canCommit(new CanCommitTransaction());
- } else if (message.getClass().equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
+ } else if (message.getClass()
+ .equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
preCommit(new PreCommitTransaction());
- } else if (message.getClass().equals(CommitTransaction.SERIALIZABLE_CLASS)) {
+ } else if (message.getClass()
+ .equals(CommitTransaction.SERIALIZABLE_CLASS)) {
commit(new CommitTransaction());
- } else if (message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) {
+ } else if (message.getClass()
+ .equals(AbortTransaction.SERIALIZABLE_CLASS)) {
abort(new AbortTransaction());
} else {
unknownMessage(message);
final ActorRef sender = getSender();
final ActorRef self = getSelf();
- future.addListener(new Runnable() {
- @Override
- public void run() {
- try {
- future.get();
- sender.tell(new AbortTransactionReply().toSerializable(), self);
- } catch (InterruptedException | ExecutionException e) {
- log.error(e, "An exception happened when aborting");
- }
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ public void onSuccess(Void v) {
+ sender
+ .tell(new AbortTransactionReply().toSerializable(),
+ self);
+ }
+
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during abort");
+ sender
+ .tell(new akka.actor.Status.Failure(t), self);
}
- }, getContext().dispatcher());
+ });
}
private void commit(CommitTransaction message) {
final ListenableFuture<Void> future = cohort.preCommit();
final ActorRef sender = getSender();
final ActorRef self = getSelf();
+ Futures.addCallback(future, new FutureCallback<Void>() {
+ public void onSuccess(Void v) {
+ sender
+ .tell(new PreCommitTransactionReply().toSerializable(),
+ self);
+ }
- future.addListener(new Runnable() {
- @Override
- public void run() {
- try {
- future.get();
- sender.tell(new PreCommitTransactionReply().toSerializable(), self);
- } catch (InterruptedException | ExecutionException e) {
- log.error(e, "An exception happened when preCommitting");
- }
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during pre-commit");
+ sender
+ .tell(new akka.actor.Status.Failure(t), self);
}
- }, getContext().dispatcher());
+ });
}
final ListenableFuture<Boolean> future = cohort.canCommit();
final ActorRef sender = getSender();
final ActorRef self = getSelf();
+ Futures.addCallback(future, new FutureCallback<Boolean>() {
+ public void onSuccess(Boolean canCommit) {
+ sender.tell(new CanCommitTransactionReply(canCommit)
+ .toSerializable(), self);
+ }
- future.addListener(new Runnable() {
- @Override
- public void run() {
- try {
- Boolean canCommit = future.get();
- sender.tell(new CanCommitTransactionReply(canCommit).toSerializable(), self);
- } catch (InterruptedException | ExecutionException e) {
- log.error(e, "An exception happened when checking canCommit");
- }
+ public void onFailure(Throwable t) {
+ LOG.error(t, "An exception happened during canCommit");
+ sender
+ .tell(new akka.actor.Status.Failure(t), self);
}
- }, getContext().dispatcher());
+ });
+
}
}
import akka.actor.ActorPath;
import akka.actor.ActorSelection;
+import akka.dispatch.Futures;
+import akka.dispatch.OnComplete;
+import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.SettableFuture;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import scala.concurrent.Future;
+
import java.util.Collections;
import java.util.List;
-import java.util.concurrent.Callable;
/**
* ThreePhaseCommitCohortProxy represents a set of remote cohort proxies
*/
-public class ThreePhaseCommitCohortProxy implements
- DOMStoreThreePhaseCommitCohort{
+public class ThreePhaseCommitCohortProxy implements DOMStoreThreePhaseCommitCohort{
- private static final Logger
- LOG = LoggerFactory.getLogger(DistributedDataStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
private final ActorContext actorContext;
private final List<ActorPath> cohortPaths;
- private final ListeningExecutorService executor;
private final String transactionId;
-
- public ThreePhaseCommitCohortProxy(ActorContext actorContext,
- List<ActorPath> cohortPaths,
- String transactionId,
- ListeningExecutorService executor) {
-
+ public ThreePhaseCommitCohortProxy(ActorContext actorContext, List<ActorPath> cohortPaths,
+ String transactionId) {
this.actorContext = actorContext;
this.cohortPaths = cohortPaths;
this.transactionId = transactionId;
- this.executor = executor;
}
- @Override public ListenableFuture<Boolean> canCommit() {
+ @Override
+ public ListenableFuture<Boolean> canCommit() {
LOG.debug("txn {} canCommit", transactionId);
- Callable<Boolean> call = new Callable<Boolean>() {
+ Future<Iterable<Object>> combinedFuture =
+ invokeCohorts(new CanCommitTransaction().toSerializable());
+
+ final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+ combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
@Override
- public Boolean call() throws Exception {
- for(ActorPath actorPath : cohortPaths){
-
- Object message = new CanCommitTransaction().toSerializable();
- LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
- ActorSelection cohort = actorContext.actorSelection(actorPath);
-
- try {
- Object response =
- actorContext.executeRemoteOperation(cohort,
- message,
- ActorContext.ASK_DURATION);
-
- if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
- CanCommitTransactionReply reply =
- CanCommitTransactionReply.fromSerializable(response);
- if (!reply.getCanCommit()) {
- return false;
- }
+ public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(failure);
+ return;
+ }
+
+ boolean result = true;
+ for(Object response: responses) {
+ if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
+ CanCommitTransactionReply reply =
+ CanCommitTransactionReply.fromSerializable(response);
+ if (!reply.getCanCommit()) {
+ result = false;
+ break;
}
- } catch(RuntimeException e){
- // FIXME : Need to properly handle this
- LOG.error("Unexpected Exception", e);
- return false;
+ } else {
+ LOG.error("Unexpected response type {}", response.getClass());
+ returnFuture.setException(new IllegalArgumentException(
+ String.format("Unexpected response type {}", response.getClass())));
+ return;
}
}
- return true;
+ returnFuture.set(Boolean.valueOf(result));
}
- };
+ }, actorContext.getActorSystem().dispatcher());
+
+ return returnFuture;
+ }
+
+ private Future<Iterable<Object>> invokeCohorts(Object message) {
+ List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
+ for(ActorPath actorPath : cohortPaths) {
+
+ LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
- return executor.submit(call);
+ ActorSelection cohort = actorContext.actorSelection(actorPath);
+
+ futureList.add(actorContext.executeRemoteOperationAsync(cohort, message,
+ ActorContext.ASK_DURATION));
+ }
+
+ return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
}
- @Override public ListenableFuture<Void> preCommit() {
+ @Override
+ public ListenableFuture<Void> preCommit() {
LOG.debug("txn {} preCommit", transactionId);
- return voidOperation(new PreCommitTransaction().toSerializable(), PreCommitTransactionReply.SERIALIZABLE_CLASS);
+ return voidOperation(new PreCommitTransaction().toSerializable(),
+ PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
}
- @Override public ListenableFuture<Void> abort() {
+ @Override
+ public ListenableFuture<Void> abort() {
LOG.debug("txn {} abort", transactionId);
- return voidOperation(new AbortTransaction().toSerializable(), AbortTransactionReply.SERIALIZABLE_CLASS);
+
+ // Note - we pass false for propagateException. In the front-end data broker, this method
+ // is called when one of the 3 phases fails with an exception. We'd rather have that
+ // original exception propagated to the client. If our abort fails and we propagate the
+ // exception then that exception will supersede and suppress the original exception. But
+ // it's the original exception that is the root cause and of more interest to the client.
+
+ return voidOperation(new AbortTransaction().toSerializable(),
+ AbortTransactionReply.SERIALIZABLE_CLASS, false);
}
- @Override public ListenableFuture<Void> commit() {
+ @Override
+ public ListenableFuture<Void> commit() {
LOG.debug("txn {} commit", transactionId);
- return voidOperation(new CommitTransaction().toSerializable(), CommitTransactionReply.SERIALIZABLE_CLASS);
+ return voidOperation(new CommitTransaction().toSerializable(),
+ CommitTransactionReply.SERIALIZABLE_CLASS, true);
}
- private ListenableFuture<Void> voidOperation(final Object message, final Class expectedResponseClass){
- Callable<Void> call = new Callable<Void>() {
-
- @Override public Void call() throws Exception {
- for(ActorPath actorPath : cohortPaths){
- ActorSelection cohort = actorContext.actorSelection(actorPath);
-
- LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
- try {
- Object response =
- actorContext.executeRemoteOperation(cohort,
- message,
- ActorContext.ASK_DURATION);
-
- if (response != null && !response.getClass()
- .equals(expectedResponseClass)) {
- throw new RuntimeException(
- String.format(
- "did not get the expected response \n\t\t expected : %s \n\t\t actual : %s",
- expectedResponseClass.toString(),
- response.getClass().toString())
- );
+ private ListenableFuture<Void> voidOperation(final Object message,
+ final Class<?> expectedResponseClass, final boolean propagateException) {
+
+ Future<Iterable<Object>> combinedFuture = invokeCohorts(message);
+
+ final SettableFuture<Void> returnFuture = SettableFuture.create();
+
+ combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
+ @Override
+ public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+
+ Throwable exceptionToPropagate = failure;
+ if(exceptionToPropagate == null) {
+ for(Object response: responses) {
+ if(!response.getClass().equals(expectedResponseClass)) {
+ exceptionToPropagate = new IllegalArgumentException(
+ String.format("Unexpected response type {}",
+ response.getClass()));
+ break;
}
- } catch(TimeoutException e){
- LOG.error(String.format("A timeout occurred when processing operation : %s", message));
}
}
- return null;
+
+ if(exceptionToPropagate != null) {
+ if(propagateException) {
+ // We don't log the exception here to avoid redundant logging since we're
+ // propagating to the caller in MD-SAL core who will log it.
+ returnFuture.setException(exceptionToPropagate);
+ } else {
+ // Since the caller doesn't want us to propagate the exception we'll also
+ // not log it normally. But it's usually not good to totally silence
+ // exceptions so we'll log it to debug level.
+ LOG.debug(String.format("%s failed", message.getClass().getSimpleName()),
+ exceptionToPropagate);
+ returnFuture.set(null);
+ }
+ } else {
+ returnFuture.set(null);
+ }
}
- };
+ }, actorContext.getActorSystem().dispatcher());
- return executor.submit(call);
+ return returnFuture;
}
public List<ActorPath> getCohortPaths() {
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import com.google.common.util.concurrent.ListeningExecutorService;
-
/**
* TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
*/
public class TransactionChainProxy implements DOMStoreTransactionChain{
private final ActorContext actorContext;
- private final ListeningExecutorService transactionExecutor;
private final SchemaContext schemaContext;
- public TransactionChainProxy(ActorContext actorContext, ListeningExecutorService transactionExecutor,
- SchemaContext schemaContext) {
+ public TransactionChainProxy(ActorContext actorContext, SchemaContext schemaContext) {
this.actorContext = actorContext;
- this.transactionExecutor = transactionExecutor;
this.schemaContext = schemaContext;
}
@Override
public DOMStoreReadTransaction newReadOnlyTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.READ_ONLY, schemaContext);
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.WRITE_ONLY, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_WRITE, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.READ_WRITE, schemaContext);
}
@Override
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.OnComplete;
+
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import com.google.common.util.concurrent.SettableFuture;
+
import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import scala.concurrent.Future;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
/**
private final ActorContext actorContext;
private final Map<String, TransactionContext> remoteTransactionPaths = new HashMap<>();
private final TransactionIdentifier identifier;
- private final ListeningExecutorService executor;
private final SchemaContext schemaContext;
+ private boolean inReadyState;
- public TransactionProxy(
- ActorContext actorContext,
- TransactionType transactionType,
- ListeningExecutorService executor,
- SchemaContext schemaContext
- ) {
+ public TransactionProxy(ActorContext actorContext, TransactionType transactionType,
+ SchemaContext schemaContext) {
this.actorContext = Preconditions.checkNotNull(actorContext, "actorContext should not be null");
this.transactionType = Preconditions.checkNotNull(transactionType, "transactionType should not be null");
- this.executor = Preconditions.checkNotNull(executor, "executor should not be null");
this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null");
String memberName = actorContext.getCurrentMemberName();
if(memberName == null){
memberName = "UNKNOWN-MEMBER";
}
- this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(counter.getAndIncrement()).build();
+
+ this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(
+ counter.getAndIncrement()).build();
LOG.debug("Created txn {}", identifier);
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read(
final YangInstanceIdentifier path) {
+ Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+ "Read operation on write-only transaction is not allowed");
+
LOG.debug("txn {} read {}", identifier, path);
createTransactionIfMissing(actorContext, path);
return transactionContext(path).readData(path);
}
- @Override public CheckedFuture<Boolean, ReadFailedException> exists(
- YangInstanceIdentifier path) {
+ @Override
+ public CheckedFuture<Boolean, ReadFailedException> exists(YangInstanceIdentifier path) {
+
+ Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+ "Exists operation on write-only transaction is not allowed");
+
LOG.debug("txn {} exists {}", identifier, path);
createTransactionIfMissing(actorContext, path);
return transactionContext(path).dataExists(path);
}
+ private void checkModificationState() {
+ Preconditions.checkState(transactionType != TransactionType.READ_ONLY,
+ "Modification operation on read-only transaction is not allowed");
+ Preconditions.checkState(!inReadyState,
+ "Transaction is sealed - further modifications are allowed");
+ }
+
@Override
public void write(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ checkModificationState();
+
LOG.debug("txn {} write {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public void merge(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ checkModificationState();
+
LOG.debug("txn {} merge {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public void delete(YangInstanceIdentifier path) {
+ checkModificationState();
+
LOG.debug("txn {} delete {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public DOMStoreThreePhaseCommitCohort ready() {
+
+ checkModificationState();
+
+ inReadyState = true;
+
List<ActorPath> cohortPaths = new ArrayList<>();
- LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier, remoteTransactionPaths.size());
+ LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier,
+ remoteTransactionPaths.size());
for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
- LOG.debug("txn {} Readying transaction for shard {}", identifier, transactionContext.getShardName());
+ LOG.debug("txn {} Readying transaction for shard {}", identifier,
+ transactionContext.getShardName());
Object result = transactionContext.readyTransaction();
if(result.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)){
- ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(actorContext.getActorSystem(),result);
- String resolvedCohortPath = transactionContext
- .getResolvedCohortPath(reply.getCohortPath().toString());
+ ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
+ actorContext.getActorSystem(),result);
+ String resolvedCohortPath = transactionContext.getResolvedCohortPath(
+ reply.getCohortPath().toString());
cohortPaths.add(actorContext.actorFor(resolvedCohortPath));
+ } else {
+ LOG.error("Was expecting {} but got {}", ReadyTransactionReply.SERIALIZABLE_CLASS,
+ result.getClass());
}
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString(), executor);
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString());
}
@Override
Object response = actorContext.executeShardOperation(shardName,
new CreateTransaction(identifier.toString(),this.transactionType.ordinal() ).toSerializable(),
ActorContext.ASK_DURATION);
- if (response.getClass()
- .equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
+ if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
CreateTransactionReply reply =
CreateTransactionReply.fromSerializable(response);
transactionActor);
remoteTransactionPaths.put(shardName, transactionContext);
+ } else {
+ LOG.error("Was expecting {} but got {}", CreateTransactionReply.SERIALIZABLE_CLASS,
+ response.getClass());
}
- } catch(TimeoutException | PrimaryNotFoundException e){
+ } catch(Exception e){
LOG.error("txn {} Creating NoOpTransaction because of : {}", identifier, e.getMessage());
- remoteTransactionPaths.put(shardName,
- new NoOpTransactionContext(shardName));
+ remoteTransactionPaths.put(shardName, new NoOpTransactionContext(shardName, e));
}
}
this.actor = actor;
}
- @Override public String getShardName() {
+ @Override
+ public String getShardName() {
return shardName;
}
return actor;
}
- @Override public String getResolvedCohortPath(String cohortPath) {
+ @Override
+ public String getResolvedCohortPath(String cohortPath) {
return actorContext.resolvePath(actorPath, cohortPath);
}
- @Override public void closeTransaction() {
- getActor().tell(
- new CloseTransaction().toSerializable(), null);
+ @Override
+ public void closeTransaction() {
+ actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
}
- @Override public Object readyTransaction() {
+ @Override
+ public Object readyTransaction() {
return actorContext.executeRemoteOperation(getActor(),
- new ReadyTransaction().toSerializable(),
- ActorContext.ASK_DURATION
- );
-
+ new ReadyTransaction().toSerializable(), ActorContext.ASK_DURATION);
}
- @Override public void deleteData(YangInstanceIdentifier path) {
- getActor().tell(new DeleteData(path).toSerializable(), null);
+ @Override
+ public void deleteData(YangInstanceIdentifier path) {
+ actorContext.sendRemoteOperationAsync(getActor(), new DeleteData(path).toSerializable() );
}
- @Override public void mergeData(YangInstanceIdentifier path,
- NormalizedNode<?, ?> data) {
- getActor()
- .tell(new MergeData(path, data, schemaContext).toSerializable(),
- null);
+ @Override
+ public void mergeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ actorContext.sendRemoteOperationAsync(getActor(),
+ new MergeData(path, data, schemaContext).toSerializable());
}
@Override
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
final YangInstanceIdentifier path) {
- Callable<Optional<NormalizedNode<?, ?>>> call =
- new Callable<Optional<NormalizedNode<?, ?>>>() {
-
- @Override public Optional<NormalizedNode<?, ?>> call()
- throws Exception {
- Object response = actorContext
- .executeRemoteOperation(getActor(),
- new ReadData(path).toSerializable(),
- ActorContext.ASK_DURATION);
- if (response.getClass()
- .equals(ReadDataReply.SERIALIZABLE_CLASS)) {
- ReadDataReply reply = ReadDataReply
- .fromSerializable(schemaContext, path,
- response);
+ final SettableFuture<Optional<NormalizedNode<?, ?>>> returnFuture = SettableFuture.create();
+
+ OnComplete<Object> onComplete = new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable failure, Object response) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(new ReadFailedException(
+ "Error reading data for path " + path, failure));
+ } else {
+ if (response.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
+ ReadDataReply reply = ReadDataReply.fromSerializable(schemaContext,
+ path, response);
if (reply.getNormalizedNode() == null) {
- return Optional.absent();
+ returnFuture.set(Optional.<NormalizedNode<?, ?>>absent());
+ } else {
+ returnFuture.set(Optional.<NormalizedNode<?, ?>>of(
+ reply.getNormalizedNode()));
}
- return Optional.<NormalizedNode<?, ?>>of(
- reply.getNormalizedNode());
+ } else {
+ returnFuture.setException(new ReadFailedException(
+ "Invalid response reading data for path " + path));
}
-
- throw new ReadFailedException("Read Failed " + path);
}
- };
+ }
+ };
- return MappingCheckedFuture
- .create(executor.submit(call), ReadFailedException.MAPPER);
- }
+ Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ new ReadData(path).toSerializable(), ActorContext.ASK_DURATION);
+ future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
- @Override public void writeData(YangInstanceIdentifier path,
- NormalizedNode<?, ?> data) {
- getActor()
- .tell(new WriteData(path, data, schemaContext).toSerializable(),
- null);
+ return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
}
- @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
- final YangInstanceIdentifier path) {
-
- Callable<Boolean> call = new Callable<Boolean>() {
-
- @Override public Boolean call() throws Exception {
- Object o = actorContext.executeRemoteOperation(getActor(),
- new DataExists(path).toSerializable(),
- ActorContext.ASK_DURATION
- );
-
+ @Override
+ public void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ actorContext.sendRemoteOperationAsync(getActor(),
+ new WriteData(path, data, schemaContext).toSerializable());
+ }
- if (DataExistsReply.SERIALIZABLE_CLASS
- .equals(o.getClass())) {
- return DataExistsReply.fromSerializable(o).exists();
+ @Override
+ public CheckedFuture<Boolean, ReadFailedException> dataExists(
+ final YangInstanceIdentifier path) {
+
+ final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+ OnComplete<Object> onComplete = new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable failure, Object response) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(new ReadFailedException(
+ "Error checking exists for path " + path, failure));
+ } else {
+ if (response.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) {
+ returnFuture.set(Boolean.valueOf(DataExistsReply.
+ fromSerializable(response).exists()));
+ } else {
+ returnFuture.setException(new ReadFailedException(
+ "Invalid response checking exists for path " + path));
+ }
}
-
- throw new ReadFailedException("Exists Failed " + path);
}
};
- return MappingCheckedFuture
- .create(executor.submit(call), ReadFailedException.MAPPER);
+
+ Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ new DataExists(path).toSerializable(), ActorContext.ASK_DURATION);
+ future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
+
+ return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
}
}
LOG = LoggerFactory.getLogger(NoOpTransactionContext.class);
private final String shardName;
+ private final Exception failure;
private ActorRef cohort;
- public NoOpTransactionContext(String shardName){
+ public NoOpTransactionContext(String shardName, Exception failure){
this.shardName = shardName;
+ this.failure = failure;
}
- @Override public String getShardName() {
+
+ @Override
+ public String getShardName() {
return shardName;
}
- @Override public String getResolvedCohortPath(String cohortPath) {
+ @Override
+ public String getResolvedCohortPath(String cohortPath) {
return cohort.path().toString();
}
- @Override public void closeTransaction() {
+ @Override
+ public void closeTransaction() {
LOG.warn("txn {} closeTransaction called", identifier);
}
return new ReadyTransactionReply(cohort.path()).toSerializable();
}
- @Override public void deleteData(YangInstanceIdentifier path) {
+ @Override
+ public void deleteData(YangInstanceIdentifier path) {
LOG.warn("txt {} deleteData called path = {}", identifier, path);
}
- @Override public void mergeData(YangInstanceIdentifier path,
+ @Override
+ public void mergeData(YangInstanceIdentifier path,
NormalizedNode<?, ?> data) {
LOG.warn("txn {} mergeData called path = {}", identifier, path);
}
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
YangInstanceIdentifier path) {
LOG.warn("txn {} readData called path = {}", identifier, path);
- return Futures.immediateCheckedFuture(
- Optional.<NormalizedNode<?, ?>>absent());
+ return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+ "Error reading data for path " + path, failure));
}
@Override public void writeData(YangInstanceIdentifier path,
@Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
YangInstanceIdentifier path) {
LOG.warn("txn {} dataExists called path = {}", identifier, path);
-
- // Returning false instead of an exception to keep this aligned with
- // read
- return Futures.immediateCheckedFuture(false);
+ return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+ "Error checking exists for path " + path, failure));
}
}
.setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path)).build();
}
- public static DeleteData fromSerizalizable(Object serializable){
+ public static DeleteData fromSerializable(Object serializable){
ShardTransactionMessages.DeleteData o = (ShardTransactionMessages.DeleteData) serializable;
return new DeleteData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
}
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
import akka.util.Timeout;
+
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.Configuration;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
private final ClusterWrapper clusterWrapper;
private final Configuration configuration;
- private SchemaContext schemaContext = null;
-
public ActorContext(ActorSystem actorSystem, ActorRef shardManager,
ClusterWrapper clusterWrapper,
Configuration configuration) {
}
}
+ /**
+ * Execute an operation on a remote actor asynchronously.
+ *
+ * @param actor the ActorSelection
+ * @param message the message to send
+ * @param duration the maximum amount of time to send he message
+ * @return a Future containing the eventual result
+ */
+ public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message,
+ FiniteDuration duration) {
+
+ LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
+
+ return ask(actor, message, new Timeout(duration));
+ }
+
+ /**
+ * Sends an operation to be executed by a remote actor asynchronously without waiting for a
+ * reply (essentially set and forget).
+ *
+ * @param actor the ActorSelection
+ * @param message the message to send
+ */
+ public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
+ actor.tell(message, ActorRef.noSender());
+ }
+
/**
* Execute an operation on the primary for a given shard
* <p>
package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
import org.opendaylight.controller.cluster.datastore.DistributedDataStoreFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
public class DistributedConfigDataStoreProviderModule extends
org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedConfigDataStoreProviderModule {
@Override
public java.lang.AutoCloseable createInstance() {
- return DistributedDataStoreFactory
- .createInstance("config", getConfigSchemaServiceDependency());
+ return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(),
+ InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
+ getMaxShardDataChangeExecutorQueueSize(),
+ getMaxShardDataChangeListenerQueueSize()));
}
}
package org.opendaylight.controller.config.yang.config.distributed_datastore_provider;
import org.opendaylight.controller.cluster.datastore.DistributedDataStoreFactory;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
public class DistributedOperationalDataStoreProviderModule extends
org.opendaylight.controller.config.yang.config.distributed_datastore_provider.AbstractDistributedOperationalDataStoreProviderModule {
@Override
public java.lang.AutoCloseable createInstance() {
- return DistributedDataStoreFactory
- .createInstance("operational", getOperationalSchemaServiceDependency());
+ return DistributedDataStoreFactory.createInstance("operational",
+ getOperationalSchemaServiceDependency(),
+ InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
+ getMaxShardDataChangeExecutorQueueSize(),
+ getMaxShardDataChangeListenerQueueSize()));
}
}
case distributed-config-datastore-provider {
when "/config:modules/config:module/config:type = 'distributed-config-datastore-provider'";
container config-schema-service {
- uses config:service-ref {
- refine type {
- mandatory false;
- config:required-identity sal:schema-service;
- }
- }
- }
+ uses config:service-ref {
+ refine type {
+ mandatory false;
+ config:required-identity sal:schema-service;
+ }
+ }
+ }
+
+ leaf max-shard-data-change-executor-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for each shard's data store data change notification executor.";
+ }
+
+ leaf max-shard-data-change-executor-pool-size {
+ default 20;
+ type uint16;
+ description "The maximum thread pool size for each shard's data store data change notification executor.";
+ }
+
+ leaf max-shard-data-change-listener-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for each shard's data store data change listeners.";
+ }
}
}
// Augments the 'configuration' choice node under modules/module.
- augment "/config:modules/config:module/config:configuration" {
- case distributed-operational-datastore-provider {
- when "/config:modules/config:module/config:type = 'distributed-operational-datastore-provider'";
+ augment "/config:modules/config:module/config:configuration" {
+ case distributed-operational-datastore-provider {
+ when "/config:modules/config:module/config:type = 'distributed-operational-datastore-provider'";
container operational-schema-service {
- uses config:service-ref {
- refine type {
- mandatory false;
- config:required-identity sal:schema-service;
- }
- }
- }
+ uses config:service-ref {
+ refine type {
+ mandatory false;
+ config:required-identity sal:schema-service;
+ }
+ }
+ }
+
+ leaf max-shard-data-change-executor-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for each shard's data store data change notification executor.";
+ }
+
+ leaf max-shard-data-change-executor-pool-size {
+ default 20;
+ type uint16;
+ description "The maximum thread pool size for each shard's data store data change notification executor.";
+ }
+
+ leaf max-shard-data-change-listener-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for each shard's data store data change listeners.";
+ }
}
}
}
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+ final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
final ActorRef shard = getSystem().actorOf(props);
new Within(duration("5 seconds")) {
protected void run() {
try {
final DistributedDataStore distributedDataStore =
- new DistributedDataStore(getSystem(), "config", new MockClusterWrapper(), configuration);
+ new DistributedDataStore(getSystem(), "config", new MockClusterWrapper(), configuration, null);
distributedDataStore.onGlobalContextUpdated(TestModel.createTestContext());
try {
final DistributedDataStore distributedDataStore =
new DistributedDataStore(getSystem(), "config",
- new MockClusterWrapper(), configuration);
+ new MockClusterWrapper(), configuration, null);
distributedDataStore.onGlobalContextUpdated(
SchemaContextHelper.full());
ActorSystem actorSystem = mock(ActorSystem.class);
new DistributedDataStore(actorSystem, "config",
- mock(ClusterWrapper.class), mock(Configuration.class));
+ mock(ClusterWrapper.class), mock(Configuration.class), null);
verify(actorSystem).actorOf(any(Props.class), eq("shardmanager-config"));
}
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", new MockClusterWrapper(),
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", new MockClusterWrapper(),
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", new MockClusterWrapper(),
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", mockClusterWrapper,
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", new MockClusterWrapper(),
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
new JavaTestKit(system) {{
final Props props = ShardManager
.props("config", new MockClusterWrapper(),
- new MockConfiguration());
+ new MockConfiguration(), null);
final TestActorRef<ShardManager> subject =
TestActorRef.create(system, props);
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+ final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
final ActorRef subject =
getSystem().actorOf(props, "testCreateTransactionChain");
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+ final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
final ActorRef subject =
getSystem().actorOf(props, "testRegisterChangeListener");
ShardIdentifier.builder().memberName("member-1")
.shardName("inventory").type("config").build();
- final Props props = Shard.props(identifier, Collections.EMPTY_MAP);
+ final Props props = Shard.props(identifier, Collections.EMPTY_MAP, null);
final ActorRef subject =
getSystem().actorOf(props, "testCreateTransaction");
.shardName("inventory").type("config").build();
peerAddresses.put(identifier, null);
- final Props props = Shard.props(identifier, peerAddresses);
+ final Props props = Shard.props(identifier, peerAddresses, null);
final ActorRef subject =
getSystem().actorOf(props, "testPeerAddressResolved");
/**
* Covers negative test cases
+ *
* @author Basheeruddin Ahmed <syedbahm@cisco.com>
*/
public class ShardTransactionFailureTest extends AbstractActorTest {
private static final ShardIdentifier SHARD_IDENTIFIER =
ShardIdentifier.builder().memberName("member-1")
- .shardName("inventory").type("config").build();
+ .shardName("inventory").type("operational").build();
static {
store.onGlobalContextUpdated(testSchemaContext);
throws Throwable {
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadOnlyTransaction(), shard,
TestModel.createTestContext());
throws Throwable {
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard,
TestModel.createTestContext());
throws Throwable {
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard,
TestModel.createTestContext());
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
TestModel.createTestContext());
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard,
TestModel.createTestContext());
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard,
TestModel.createTestContext());
final ActorRef shard =
- getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard,
TestModel.createTestContext());
}
-
-
}
@Test
public void testOnReceiveReadData() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext);
final ActorRef subject = getSystem().actorOf(props, "testReadData");
@Test
public void testOnReceiveReadDataWhenDataNotFound() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props( store.newReadOnlyTransaction(), shard, testSchemaContext);
final ActorRef subject = getSystem().actorOf(props, "testReadDataWhenDataNotFound");
@Test
public void testOnReceiveDataExistsPositive() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext);
final ActorRef subject = getSystem().actorOf(props, "testDataExistsPositive");
@Test
public void testOnReceiveDataExistsNegative() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER,
+ Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadOnlyTransaction(), shard, testSchemaContext);
final ActorRef subject = getSystem().actorOf(props, "testDataExistsNegative");
@Test
public void testOnReceiveWriteData() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newWriteOnlyTransaction(), shard, TestModel.createTestContext());
final ActorRef subject =
@Test
public void testOnReceiveMergeData() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard, testSchemaContext);
final ActorRef subject =
@Test
public void testOnReceiveDeleteData() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props( store.newWriteOnlyTransaction(), shard, TestModel.createTestContext());
final ActorRef subject =
@Test
public void testOnReceiveReadyTransaction() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props( store.newReadWriteTransaction(), shard, TestModel.createTestContext());
final ActorRef subject =
@Test
public void testOnReceiveCloseTransaction() throws Exception {
new JavaTestKit(getSystem()) {{
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadWriteTransaction(), shard, TestModel.createTestContext());
final ActorRef subject =
public void testNegativePerformingWriteOperationOnReadTransaction() throws Exception {
try {
- final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP));
+ final ActorRef shard = getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, null));
final Props props =
ShardTransaction.props(store.newReadOnlyTransaction(), shard, TestModel.createTestContext());
final TestActorRef subject = TestActorRef.apply(props,getSystem());
--- /dev/null
+/*
+ *
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ *
+ */
+
+package org.opendaylight.controller.cluster.datastore;
+
+import akka.actor.ActorRef;
+import akka.actor.Props;
+import akka.testkit.TestActorRef;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.ForwardedCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.modification.CompositeModification;
+import org.opendaylight.controller.cluster.datastore.modification.Modification;
+import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
+import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
+import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
+import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
+import org.opendaylight.controller.protobuff.messages.persistent.PersistentMessages;
+import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.Collections;
+import java.util.concurrent.TimeUnit;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.when;
+
+
+public class ThreePhaseCommitCohortFailureTest extends AbstractActorTest {
+
+ private static ListeningExecutorService storeExecutor =
+ MoreExecutors.listeningDecorator(MoreExecutors.sameThreadExecutor());
+
+ private static final InMemoryDOMDataStore store =
+ new InMemoryDOMDataStore("OPER", storeExecutor,
+ MoreExecutors.sameThreadExecutor());
+
+ private static final SchemaContext testSchemaContext =
+ TestModel.createTestContext();
+
+ private static final ShardIdentifier SHARD_IDENTIFIER =
+ ShardIdentifier.builder().memberName("member-1")
+ .shardName("inventory").type("config").build();
+
+ static {
+ store.onGlobalContextUpdated(testSchemaContext);
+ }
+
+ private FiniteDuration ASK_RESULT_DURATION = Duration.create(5000, TimeUnit.MILLISECONDS);
+
+
+ @Test(expected = TestException.class)
+ public void testNegativeAbortResultsInException() throws Exception {
+
+ final ActorRef shard =
+ getSystem()
+ .actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
+ final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
+ .mock(DOMStoreThreePhaseCommitCohort.class);
+ final CompositeModification mockComposite =
+ Mockito.mock(CompositeModification.class);
+ final Props props =
+ ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite);
+
+ final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
+ .create(getSystem(), props,
+ "testNegativeAbortResultsInException");
+
+ when(mockCohort.abort()).thenReturn(
+ Futures.<Void>immediateFailedFuture(new TestException()));
+
+ Future<Object> future =
+ akka.pattern.Patterns.ask(subject,
+ ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder()
+ .build(), 3000);
+ assertTrue(future.isCompleted());
+
+ Await.result(future, ASK_RESULT_DURATION);
+
+
+
+ }
+
+
+ @Test(expected = OptimisticLockFailedException.class)
+ public void testNegativeCanCommitResultsInException() throws Exception {
+
+ final ActorRef shard =
+ getSystem()
+ .actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
+ final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
+ .mock(DOMStoreThreePhaseCommitCohort.class);
+ final CompositeModification mockComposite =
+ Mockito.mock(CompositeModification.class);
+ final Props props =
+ ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite);
+
+ final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
+ .create(getSystem(), props,
+ "testNegativeCanCommitResultsInException");
+
+ when(mockCohort.canCommit()).thenReturn(
+ Futures
+ .<Boolean>immediateFailedFuture(
+ new OptimisticLockFailedException("some exception")));
+
+ Future<Object> future =
+ akka.pattern.Patterns.ask(subject,
+ ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder()
+ .build(), 3000);
+
+
+ Await.result(future, ASK_RESULT_DURATION);
+
+ }
+
+
+ @Test(expected = TestException.class)
+ public void testNegativePreCommitResultsInException() throws Exception {
+
+ final ActorRef shard =
+ getSystem()
+ .actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null));
+ final DOMStoreThreePhaseCommitCohort mockCohort = Mockito
+ .mock(DOMStoreThreePhaseCommitCohort.class);
+ final CompositeModification mockComposite =
+ Mockito.mock(CompositeModification.class);
+ final Props props =
+ ThreePhaseCommitCohort.props(mockCohort, shard, mockComposite);
+
+ final TestActorRef<ThreePhaseCommitCohort> subject = TestActorRef
+ .create(getSystem(), props,
+ "testNegativePreCommitResultsInException");
+
+ when(mockCohort.preCommit()).thenReturn(
+ Futures
+ .<Void>immediateFailedFuture(
+ new TestException()));
+
+ Future<Object> future =
+ akka.pattern.Patterns.ask(subject,
+ ThreePhaseCommitCohortMessages.PreCommitTransaction.newBuilder()
+ .build(), 3000);
+
+ Await.result(future, ASK_RESULT_DURATION);
+
+ }
+
+ @Test(expected = TestException.class)
+ public void testNegativeCommitResultsInException() throws Exception {
+
+ final TestActorRef<Shard> subject = TestActorRef
+ .create(getSystem(),
+ Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP,null),
+ "testNegativeCommitResultsInException");
+
+ final ActorRef shardTransaction =
+ getSystem().actorOf(
+ ShardTransaction.props(store.newReadWriteTransaction(), subject,
+ TestModel.createTestContext()));
+
+ ShardTransactionMessages.WriteData writeData =
+ ShardTransactionMessages.WriteData.newBuilder()
+ .setInstanceIdentifierPathArguments(
+ NormalizedNodeMessages.InstanceIdentifier.newBuilder()
+ .build()).setNormalizedNode(
+ NormalizedNodeMessages.Node.newBuilder().build()
+
+ ).build();
+
+ //This is done so that Modification list is updated which is used during commit
+ Future future =
+ akka.pattern.Patterns.ask(shardTransaction, writeData, 3000);
+
+ //ready transaction creates the cohort so that we get into the
+ //block where in commmit is done
+ ShardTransactionMessages.ReadyTransaction readyTransaction =
+ ShardTransactionMessages.ReadyTransaction.newBuilder().build();
+
+ future =
+ akka.pattern.Patterns.ask(shardTransaction, readyTransaction, 3000);
+
+ //but when the message is sent it will have the MockCommit object
+ //so that we can simulate throwing of exception
+ ForwardedCommitTransaction mockForwardCommitTransaction =
+ Mockito.mock(ForwardedCommitTransaction.class);
+ DOMStoreThreePhaseCommitCohort mockThreePhaseCommitTransaction =
+ Mockito.mock(DOMStoreThreePhaseCommitCohort.class);
+ when(mockForwardCommitTransaction.getCohort())
+ .thenReturn(mockThreePhaseCommitTransaction);
+ when(mockThreePhaseCommitTransaction.commit()).thenReturn(Futures
+ .<Void>immediateFailedFuture(
+ new TestException()));
+ Modification mockModification = Mockito.mock(
+ Modification.class);
+ when(mockForwardCommitTransaction.getModification())
+ .thenReturn(mockModification);
+
+ when(mockModification.toSerializable()).thenReturn(
+ PersistentMessages.CompositeModification.newBuilder().build());
+
+ future =
+ akka.pattern.Patterns.ask(subject,
+ mockForwardCommitTransaction
+ , 3000);
+ Await.result(future, ASK_RESULT_DURATION);
+
+
+ }
+
+ private class TestException extends Exception {
+ }
+
+
+}
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorRef;
+import akka.actor.ActorPath;
+import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.Futures;
+import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.isA;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.times;
-import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.stubbing.Stubber;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.SerializableMessage;
+import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
+import scala.concurrent.duration.FiniteDuration;
-import java.util.Arrays;
-import java.util.concurrent.Executors;
-
-import static org.junit.Assert.assertNotNull;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
public class ThreePhaseCommitCohortProxyTest extends AbstractActorTest {
- private ThreePhaseCommitCohortProxy proxy;
- private Props props;
- private ActorRef actorRef;
- private MockActorContext actorContext;
- private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(
- Executors.newSingleThreadExecutor());
+ @Mock
+ private ActorContext actorContext;
@Before
- public void setUp(){
- props = Props.create(MessageCollectorActor.class);
- actorRef = getSystem().actorOf(props);
- actorContext = new MockActorContext(this.getSystem());
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
- proxy =
- new ThreePhaseCommitCohortProxy(actorContext,
- Arrays.asList(actorRef.path()), "txn-1", executor);
+ doReturn(getSystem()).when(actorContext).getActorSystem();
+ }
+ private ThreePhaseCommitCohortProxy setupProxy(int nCohorts) {
+ List<ActorPath> cohorts = Lists.newArrayList();
+ for(int i = 1; i <= nCohorts; i++) {
+ ActorPath path = getSystem().actorOf(Props.create(MessageCollectorActor.class)).path();
+ cohorts.add(path);
+ doReturn(mock(ActorSelection.class)).when(actorContext).actorSelection(path);
+ }
+
+ return new ThreePhaseCommitCohortProxy(actorContext, cohorts, "txn-1");
}
- @After
- public void tearDown() {
- executor.shutdownNow();
+ private void setupMockActorContext(Class<?> requestType, Object... responses) {
+ Stubber stubber = doReturn(responses[0] instanceof Throwable ? Futures
+ .failed((Throwable) responses[0]) : Futures
+ .successful(((SerializableMessage) responses[0]).toSerializable()));
+
+ for(int i = 1; i < responses.length; i++) {
+ stubber = stubber.doReturn(responses[i] instanceof Throwable ? Futures
+ .failed((Throwable) responses[i]) : Futures
+ .successful(((SerializableMessage) responses[i]).toSerializable()));
+ }
+
+ stubber.when(actorContext).executeRemoteOperationAsync(any(ActorSelection.class),
+ isA(requestType), any(FiniteDuration.class));
+ }
+
+ private void verifyCohortInvocations(int nCohorts, Class<?> requestType) {
+ verify(actorContext, times(nCohorts)).executeRemoteOperationAsync(
+ any(ActorSelection.class), isA(requestType), any(FiniteDuration.class));
+ }
+
+ @Test
+ public void testCanCommitWithOneCohort() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true));
+
+ ListenableFuture<Boolean> future = proxy.canCommit();
+
+ assertEquals("canCommit", true, future.get());
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(false));
+
+ future = proxy.canCommit();
+
+ assertEquals("canCommit", false, future.get());
+
+ verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
}
@Test
- public void testCanCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new CanCommitTransactionReply(true).toSerializable());
+ public void testCanCommitWithMultipleCohorts() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true), new CanCommitTransactionReply(true));
ListenableFuture<Boolean> future = proxy.canCommit();
- Assert.assertTrue(future.get().booleanValue());
+ assertEquals("canCommit", true, future.get());
+ verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test
+ public void testCanCommitWithMultipleCohortsAndOneFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(3);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true), new CanCommitTransactionReply(false),
+ new CanCommitTransactionReply(true));
+
+ ListenableFuture<Boolean> future = proxy.canCommit();
+
+ assertEquals("canCommit", false, future.get());
+
+ verifyCohortInvocations(3, CanCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCanCommitWithExceptionFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+ proxy.canCommit().get();
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCanCommitWithInvalidResponseType() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply());
+
+ proxy.canCommit().get();
}
@Test
public void testPreCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new PreCommitTransactionReply().toSerializable());
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
- ListenableFuture<Void> future = proxy.preCommit();
+ setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply());
- future.get();
+ proxy.preCommit().get();
+ verifyCohortInvocations(1, PreCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testPreCommitWithFailure() throws Exception {
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply(), new RuntimeException("mock"));
+
+ proxy.preCommit().get();
}
@Test
public void testAbort() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new AbortTransactionReply().toSerializable());
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
- ListenableFuture<Void> future = proxy.abort();
+ setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new AbortTransactionReply());
- future.get();
+ proxy.abort().get();
+ verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test
+ public void testAbortWithFailure() throws Exception {
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+ // The exception should not get propagated.
+ proxy.abort().get();
+
+ verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
}
@Test
public void testCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new CommitTransactionReply().toSerializable());
- ListenableFuture<Void> future = proxy.commit();
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+ new CommitTransactionReply());
+
+ proxy.commit().get();
+
+ verifyCohortInvocations(2, CommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCommitWithFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
- future.get();
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+ new RuntimeException("mock"));
+
+ proxy.commit().get();
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void teseCommitWithInvalidResponseType() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new PreCommitTransactionReply());
+
+ proxy.commit().get();
}
@Test
- public void testGetCohortPaths() throws Exception {
- assertNotNull(proxy.getCohortPaths());
+ public void testGetCohortPaths() {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ List<ActorPath> paths = proxy.getCohortPaths();
+ assertNotNull("getCohortPaths returned null", paths);
+ assertEquals("getCohortPaths size", 2, paths.size());
}
}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+import akka.actor.ActorPath;
import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.Futures;
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import junit.framework.Assert;
-import org.junit.After;
+
import org.junit.Before;
import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
+
+import org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.DataExists;
import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
import org.opendaylight.controller.cluster.datastore.messages.MergeData;
-import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
+import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.DefaultShardStrategy;
import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
-import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
-import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-import java.util.List;
-import java.util.concurrent.Executors;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
-import static junit.framework.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.argThat;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.isA;
+
+@SuppressWarnings("resource")
public class TransactionProxyTest extends AbstractActorTest {
+ @SuppressWarnings("serial")
+ static class TestException extends RuntimeException {
+ }
+
+ static interface Invoker {
+ void invoke(TransactionProxy proxy) throws Exception;
+ }
+
private final Configuration configuration = new MockConfiguration();
- private final ActorContext testContext =
- new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)), new MockClusterWrapper(), configuration );
+ @Mock
+ private ActorContext mockActorContext;
- private final ListeningExecutorService transactionExecutor =
- MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
+ private SchemaContext schemaContext;
+
+ String memberName = "mock-member";
@Before
public void setUp(){
- ShardStrategyFactory.setConfiguration(configuration);
- }
+ MockitoAnnotations.initMocks(this);
- @After
- public void tearDown() {
- transactionExecutor.shutdownNow();
- }
+ schemaContext = TestModel.createTestContext();
- @Test
- public void testRead() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ doReturn(getSystem()).when(mockActorContext).getActorSystem();
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ ShardStrategyFactory.setConfiguration(configuration);
+ }
+ private CreateTransaction eqCreateTransaction(final String memberName,
+ final TransactionType type) {
+ ArgumentMatcher<CreateTransaction> matcher = new ArgumentMatcher<CreateTransaction>() {
+ @Override
+ public boolean matches(Object argument) {
+ CreateTransaction obj = CreateTransaction.fromSerializable(argument);
+ return obj.getTransactionId().startsWith(memberName) &&
+ obj.getTransactionType() == type.ordinal();
+ }
+ };
+
+ return argThat(matcher);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ private DataExists eqDataExists() {
+ ArgumentMatcher<DataExists> matcher = new ArgumentMatcher<DataExists>() {
+ @Override
+ public boolean matches(Object argument) {
+ DataExists obj = DataExists.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
+ return argThat(matcher);
+ }
- actorContext.setExecuteRemoteOperationResponse(
- new ReadDataReply(TestModel.createTestContext(), null)
- .toSerializable());
+ private ReadData eqReadData() {
+ ArgumentMatcher<ReadData> matcher = new ArgumentMatcher<ReadData>() {
+ @Override
+ public boolean matches(Object argument) {
+ ReadData obj = ReadData.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
+ return argThat(matcher);
+ }
- Optional<NormalizedNode<?, ?>> normalizedNodeOptional = read.get();
+ private WriteData eqWriteData(final NormalizedNode<?, ?> nodeToWrite) {
+ ArgumentMatcher<WriteData> matcher = new ArgumentMatcher<WriteData>() {
+ @Override
+ public boolean matches(Object argument) {
+ WriteData obj = WriteData.fromSerializable(argument, schemaContext);
+ return obj.getPath().equals(TestModel.TEST_PATH) &&
+ obj.getData().equals(nodeToWrite);
+ }
+ };
+
+ return argThat(matcher);
+ }
- Assert.assertFalse(normalizedNodeOptional.isPresent());
+ private MergeData eqMergeData(final NormalizedNode<?, ?> nodeToWrite) {
+ ArgumentMatcher<MergeData> matcher = new ArgumentMatcher<MergeData>() {
+ @Override
+ public boolean matches(Object argument) {
+ MergeData obj = MergeData.fromSerializable(argument, schemaContext);
+ return obj.getPath().equals(TestModel.TEST_PATH) &&
+ obj.getData().equals(nodeToWrite);
+ }
+ };
+
+ return argThat(matcher);
+ }
- actorContext.setExecuteRemoteOperationResponse(new ReadDataReply(
- TestModel.createTestContext(),ImmutableNodes.containerNode(TestModel.TEST_QNAME)).toSerializable());
+ private DeleteData eqDeleteData() {
+ ArgumentMatcher<DeleteData> matcher = new ArgumentMatcher<DeleteData>() {
+ @Override
+ public boolean matches(Object argument) {
+ DeleteData obj = DeleteData.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
- read = transactionProxy.read(TestModel.TEST_PATH);
+ return argThat(matcher);
+ }
- normalizedNodeOptional = read.get();
+ private Object readyTxReply(ActorPath path) {
+ return new ReadyTransactionReply(path).toSerializable();
+ }
- Assert.assertTrue(normalizedNodeOptional.isPresent());
+ private Future<Object> readDataReply(NormalizedNode<?, ?> data) {
+ return Futures.successful(new ReadDataReply(schemaContext, data)
+ .toSerializable());
}
- @Test
- public void testExists() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ private Future<Object> dataExistsReply(boolean exists) {
+ return Futures.successful(new DataExistsReply(exists).toSerializable());
+ }
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ private ActorSelection actorSelection(ActorRef actorRef) {
+ return getSystem().actorSelection(actorRef.path());
+ }
+ private FiniteDuration anyDuration() {
+ return any(FiniteDuration.class);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ private CreateTransactionReply createTransactionReply(ActorRef actorRef){
+ return CreateTransactionReply.newBuilder()
+ .setTransactionActorPath(actorRef.path().toString())
+ .setTransactionId("txn-1").build();
+ }
+ private ActorRef setupActorContextWithInitialCreateTransaction(TransactionType type) {
+ ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
+ doReturn(getSystem().actorSelection(actorRef.path())).
+ when(mockActorContext).actorSelection(actorRef.path().toString());
+ doReturn(memberName).when(mockActorContext).getCurrentMemberName();
+ doReturn(createTransactionReply(actorRef)).when(mockActorContext).
+ executeShardOperation(eq(DefaultShardStrategy.DEFAULT_SHARD),
+ eqCreateTransaction(memberName, type), anyDuration());
+ doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(
+ anyString(), eq(actorRef.path().toString()));
+ doReturn(actorRef.path()).when(mockActorContext).actorFor(actorRef.path().toString());
+
+ return actorRef;
+ }
- actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(false).toSerializable());
+ @Test
+ public void testRead() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
- CheckedFuture<Boolean, ReadFailedException> exists =
- transactionProxy.exists(TestModel.TEST_PATH);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- Assert.assertFalse(exists.checkedGet());
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(true).toSerializable());
+ Optional<NormalizedNode<?, ?>> readOptional = transactionProxy.read(
+ TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
- exists = transactionProxy.exists(TestModel.TEST_PATH);
+ assertEquals("NormalizedNode isPresent", false, readOptional.isPresent());
- Assert.assertTrue(exists.checkedGet());
+ NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- actorContext.setExecuteRemoteOperationResponse("bad message");
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- exists = transactionProxy.exists(TestModel.TEST_PATH);
+ readOptional = transactionProxy.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
- try {
- exists.checkedGet();
- fail();
- } catch(ReadFailedException e){
- }
+ assertEquals("NormalizedNode isPresent", true, readOptional.isPresent());
+ assertEquals("Response NormalizedNode", expectedNode, readOptional.get());
}
@Test(expected = ReadFailedException.class)
public void testReadWhenAnInvalidMessageIsSentInReply() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ doReturn(Futures.successful(new Object())).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException>
- read = transactionProxy.read(TestModel.TEST_PATH);
-
- read.checkedGet();
+ transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
}
- @Test
- public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
-
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new PrimaryNotFoundException("test"));
+ @Test(expected = TestException.class)
+ public void testReadWithAsyncRemoteOperatonFailure() throws Throwable {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ doThrow(new TestException()).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
-
- Assert.assertFalse(read.get().isPresent());
-
+ try {
+ transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
}
+ private void testExceptionOnInitialCreateTransaction(Exception exToThrow, Invoker invoker)
+ throws Throwable {
- @Test
- public void testReadWhenATimeoutExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
+ doThrow(exToThrow).when(mockActorContext).executeShardOperation(
+ anyString(), any(), anyDuration());
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new TimeoutException("test", new Exception("reason")));
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ try {
+ invoker.invoke(transactionProxy);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
+ }
+ private void testReadWithExceptionOnInitialCreateTransaction(Exception exToThrow) throws Throwable {
+ testExceptionOnInitialCreateTransaction(exToThrow, new Invoker() {
+ @Override
+ public void invoke(TransactionProxy proxy) throws Exception {
+ proxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
+ });
+ }
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
+ @Test(expected = PrimaryNotFoundException.class)
+ public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"));
+ }
- Assert.assertFalse(read.get().isPresent());
+ @Test(expected = TimeoutException.class)
+ public void testReadWhenATimeoutExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new TimeoutException("test",
+ new Exception("reason")));
+ }
+ @Test(expected = TestException.class)
+ public void testReadWhenAnyOtherExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new TestException());
}
@Test
- public void testReadWhenAAnyOtherExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
-
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new NullPointerException());
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ public void testExists() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- try {
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
- fail("A null pointer exception was expected");
- } catch(NullPointerException e){
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
- }
- }
+ Boolean exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
+ assertEquals("Exists response", false, exists);
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
- @Test
- public void testWrite() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ assertEquals("Exists response", true, exists);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ @Test(expected = PrimaryNotFoundException.class)
+ public void testExistsWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+ testExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"), new Invoker() {
+ @Override
+ public void invoke(TransactionProxy proxy) throws Exception {
+ proxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
+ });
+ }
- transactionProxy.write(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+ @Test(expected = ReadFailedException.class)
+ public void testExistsWhenAnInvalidMessageIsSentInReply() throws Exception {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
+ doReturn(Futures.successful(new Object())).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
- Assert.assertNotNull(messages);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- Assert.assertTrue(messages instanceof List);
+ transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
- List<Object> listMessages = (List<Object>) messages;
+ @Test(expected = TestException.class)
+ public void testExistsWithAsyncRemoteOperatonFailure() throws Throwable {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- Assert.assertEquals(1, listMessages.size());
+ doThrow(new TestException()).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
- Assert.assertEquals(WriteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
- }
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- private Object createPrimaryFound(ActorRef actorRef) {
- return new PrimaryFound(actorRef.path().toString()).toSerializable();
+ try {
+ transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
}
@Test
- public void testMerge() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ public void testWrite() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- transactionProxy.merge(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+ transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
+ }
- Assert.assertNotNull(messages);
+ @Test
+ public void testMerge() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- Assert.assertTrue(messages instanceof List);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
- List<Object> listMessages = (List<Object>) messages;
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- Assert.assertEquals(1, listMessages.size());
+ transactionProxy.merge(TestModel.TEST_PATH, nodeToWrite);
- Assert.assertEquals(MergeData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
}
@Test
public void testDelete() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
transactionProxy.delete(TestModel.TEST_PATH);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
-
- Assert.assertNotNull(messages);
-
- Assert.assertTrue(messages instanceof List);
-
- List<Object> listMessages = (List<Object>) messages;
-
- Assert.assertEquals(1, listMessages.size());
-
- Assert.assertEquals(DeleteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDeleteData());
}
+ @SuppressWarnings("unchecked")
@Test
public void testReady() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef doNothingActorRef = getSystem().actorOf(props);
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(doNothingActorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(doNothingActorRef));
- actorContext.setExecuteRemoteOperationResponse(new ReadyTransactionReply(doNothingActorRef.path()).toSerializable());
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperation(
+ eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_WRITE, schemaContext);
transactionProxy.read(TestModel.TEST_PATH);
DOMStoreThreePhaseCommitCohort ready = transactionProxy.ready();
- Assert.assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
+ assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- Assert.assertTrue("No cohort paths returned", proxy.getCohortPaths().size() > 0);
-
+ assertEquals("getCohortPaths", Arrays.asList(actorRef.path()), proxy.getCohortPaths());
}
@Test
- public void testGetIdentifier(){
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef doNothingActorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteShardOperationResponse( createTransactionReply(doNothingActorRef) );
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
-
- Assert.assertNotNull(transactionProxy.getIdentifier());
+ public void testGetIdentifier() {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ TransactionProxy.TransactionType.READ_ONLY, schemaContext);
+
+ Object id = transactionProxy.getIdentifier();
+ assertNotNull("getIdentifier returned null", id);
+ assertTrue("Invalid identifier: " + id, id.toString().startsWith(memberName));
}
+ @SuppressWarnings("unchecked")
@Test
- public void testClose(){
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ public void testClose() throws Exception{
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_WRITE, schemaContext);
transactionProxy.read(TestModel.TEST_PATH);
transactionProxy.close();
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
-
- Assert.assertNotNull(messages);
-
- Assert.assertTrue(messages instanceof List);
-
- List<Object> listMessages = (List<Object>) messages;
-
- Assert.assertEquals(1, listMessages.size());
-
- Assert.assertTrue(listMessages.get(0).getClass().equals(CloseTransaction.SERIALIZABLE_CLASS));
- }
-
- private CreateTransactionReply createTransactionReply(ActorRef actorRef){
- return CreateTransactionReply.newBuilder()
- .setTransactionActorPath(actorRef.path().toString())
- .setTransactionId("txn-1")
- .build();
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), isA(CloseTransaction.SERIALIZABLE_CLASS));
}
}
package org.opendaylight.controller.cluster.datastore.utils;
+import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.japi.Creator;
import akka.testkit.JavaTestKit;
+
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.AbstractActorTest;
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardNotFound;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
}
private static Props props(final boolean found, final ActorRef actorRef){
- return Props.create(new Creator<MockShardManager>() {
+ return Props.create(new MockShardManagerCreator(found, actorRef) );
+ }
- @Override public MockShardManager create()
- throws Exception {
- return new MockShardManager(found,
- actorRef);
- }
- });
+ @SuppressWarnings("serial")
+ private static class MockShardManagerCreator implements Creator<MockShardManager> {
+ final boolean found;
+ final ActorRef actorRef;
+
+ MockShardManagerCreator(boolean found, ActorRef actorRef) {
+ this.found = found;
+ this.actorRef = actorRef;
+ }
+
+ @Override
+ public MockShardManager create() throws Exception {
+ return new MockShardManager(found, actorRef);
+ }
}
}
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardManagerActorRef = getSystem()
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardManagerActorRef = getSystem()
}};
}
+
+ @Test
+ public void testExecuteRemoteOperation() {
+ new JavaTestKit(getSystem()) {{
+
+ new Within(duration("3 seconds")) {
+ @Override
+ protected void run() {
+
+ ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+ ActorRef shardManagerActorRef = getSystem()
+ .actorOf(MockShardManager.props(true, shardActorRef));
+
+ ActorContext actorContext =
+ new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+ Object out = actorContext.executeRemoteOperation(actor, "hello", duration("3 seconds"));
+
+ assertEquals("hello", out);
+
+ expectNoMsg();
+ }
+ };
+ }};
+ }
+
+ @Test
+ public void testExecuteRemoteOperationAsync() {
+ new JavaTestKit(getSystem()) {{
+
+ new Within(duration("3 seconds")) {
+ @Override
+ protected void run() {
+
+ ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+ ActorRef shardManagerActorRef = getSystem()
+ .actorOf(MockShardManager.props(true, shardActorRef));
+
+ ActorContext actorContext =
+ new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+ Future<Object> future = actorContext.executeRemoteOperationAsync(actor, "hello",
+ Duration.create(3, TimeUnit.SECONDS));
+
+ try {
+ Object result = Await.result(future, Duration.create(3, TimeUnit.SECONDS));
+ assertEquals("Result", "hello", result);
+ } catch(Exception e) {
+ throw new AssertionError(e);
+ }
+
+ expectNoMsg();
+ }
+ };
+ }};
+ }
}
package org.opendaylight.controller.cluster.datastore.utils;
-
+import static org.junit.Assert.assertNotNull;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
public class MockActorContext extends ActorContext {
- private Object executeShardOperationResponse;
- private Object executeRemoteOperationResponse;
- private Object executeLocalOperationResponse;
- private Object executeLocalShardOperationResponse;
+ private volatile Object executeShardOperationResponse;
+ private volatile Object executeRemoteOperationResponse;
+ private volatile Object executeLocalOperationResponse;
+ private volatile Object executeLocalShardOperationResponse;
+ private volatile Exception executeRemoteOperationFailure;
+ private volatile Object inputMessage;
public MockActorContext(ActorSystem actorSystem) {
super(actorSystem, null, new MockClusterWrapper(), new MockConfiguration());
executeRemoteOperationResponse = response;
}
+ public void setExecuteRemoteOperationFailure(Exception executeRemoteOperationFailure) {
+ this.executeRemoteOperationFailure = executeRemoteOperationFailure;
+ }
+
public void setExecuteLocalOperationResponse(
Object executeLocalOperationResponse) {
this.executeLocalOperationResponse = executeLocalOperationResponse;
this.executeLocalShardOperationResponse = executeLocalShardOperationResponse;
}
- @Override public Object executeLocalOperation(ActorRef actor,
+ @SuppressWarnings("unchecked")
+ public <T> T getInputMessage(Class<T> expType) throws Exception {
+ assertNotNull("Input message was null", inputMessage);
+ return (T) expType.getMethod("fromSerializable", Object.class).invoke(null, inputMessage);
+ }
+
+ @Override
+ public Object executeLocalOperation(ActorRef actor,
Object message, FiniteDuration duration) {
return this.executeLocalOperationResponse;
}
- @Override public Object executeLocalShardOperation(String shardName,
+ @Override
+ public Object executeLocalShardOperation(String shardName,
Object message, FiniteDuration duration) {
return this.executeLocalShardOperationResponse;
}
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
import org.opendaylight.yangtools.util.concurrent.DeadlockDetectingListeningExecutorService;
import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
-import org.opendaylight.yangtools.util.PropertyUtils;
-
import com.google.common.collect.ImmutableMap;
/**
public final class DomInmemoryDataBrokerModule extends
org.opendaylight.controller.config.yang.md.sal.dom.impl.AbstractDomInmemoryDataBrokerModule {
- private static final String FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE_PROP =
- "mdsal.datastore-future-callback-queue.size";
- private static final int DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE = 1000;
-
- private static final String FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE_PROP =
- "mdsal.datastore-future-callback-pool.size";
- private static final int DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE = 20;
- private static final String COMMIT_EXECUTOR_MAX_QUEUE_SIZE_PROP =
- "mdsal.datastore-commit-queue.size";
- private static final int DEFAULT_COMMIT_EXECUTOR_MAX_QUEUE_SIZE = 5000;
-
public DomInmemoryDataBrokerModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
super(identifier, dependencyResolver);
* system it's running on.
*/
ExecutorService commitExecutor = SpecialExecutors.newBoundedSingleThreadExecutor(
- PropertyUtils.getIntSystemProperty(
- COMMIT_EXECUTOR_MAX_QUEUE_SIZE_PROP,
- DEFAULT_COMMIT_EXECUTOR_MAX_QUEUE_SIZE), "WriteTxCommit");
+ getMaxDataBrokerCommitQueueSize(), "WriteTxCommit");
/*
* We use an executor for commit ListenableFuture callbacks that favors reusing available
* reached, subsequent submitted tasks will block the caller.
*/
Executor listenableFutureExecutor = SpecialExecutors.newBlockingBoundedCachedThreadPool(
- PropertyUtils.getIntSystemProperty(
- FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE_PROP,
- DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_POOL_SIZE),
- PropertyUtils.getIntSystemProperty(
- FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE_PROP,
- DEFAULT_FUTURE_CALLBACK_EXECUTOR_MAX_QUEUE_SIZE), "CommitFutures");
+ getMaxDataBrokerFutureCallbackPoolSize(), getMaxDataBrokerFutureCallbackQueueSize(),
+ "CommitFutures");
DOMDataBrokerImpl newDataBroker = new DOMDataBrokerImpl(datastores,
new DeadlockDetectingListeningExecutorService(commitExecutor,
}
}
}
+
+ leaf max-data-broker-future-callback-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for the data broker's commit future callback executor.";
+ }
+
+ leaf max-data-broker-future-callback-pool-size {
+ default 20;
+ type uint16;
+ description "The maximum thread pool size for the data broker's commit future callback executor.";
+ }
+
+ leaf max-data-broker-commit-queue-size {
+ default 5000;
+ type uint16;
+ description "The maximum queue size for the data broker's commit executor.";
+ }
}
}
package org.opendaylight.controller.config.yang.inmemory_datastore_provider;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
public class InMemoryConfigDataStoreProviderModule extends org.opendaylight.controller.config.yang.inmemory_datastore_provider.AbstractInMemoryConfigDataStoreProviderModule {
@Override
public java.lang.AutoCloseable createInstance() {
- return InMemoryDOMDataStoreFactory.create("DOM-CFG", getSchemaServiceDependency());
+ return InMemoryDOMDataStoreFactory.create("DOM-CFG", getSchemaServiceDependency(),
+ InMemoryDOMDataStoreConfigProperties.create(getMaxDataChangeExecutorPoolSize(),
+ getMaxDataChangeExecutorQueueSize(), getMaxDataChangeListenerQueueSize()));
}
}
package org.opendaylight.controller.config.yang.inmemory_datastore_provider;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreConfigProperties;
import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStoreFactory;
public class InMemoryOperationalDataStoreProviderModule extends org.opendaylight.controller.config.yang.inmemory_datastore_provider.AbstractInMemoryOperationalDataStoreProviderModule {
@Override
public java.lang.AutoCloseable createInstance() {
- return InMemoryDOMDataStoreFactory.create("DOM-OPER", getOperationalSchemaServiceDependency());
+ return InMemoryDOMDataStoreFactory.create("DOM-OPER", getOperationalSchemaServiceDependency(),
+ InMemoryDOMDataStoreConfigProperties.create(getMaxDataChangeExecutorPoolSize(),
+ getMaxDataChangeExecutorQueueSize(), getMaxDataChangeListenerQueueSize()));
}
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.base.Preconditions;
+
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.yangtools.util.concurrent.NotificationManager;
import org.slf4j.LoggerFactory;
class ChangeListenerNotifyTask implements Runnable {
-
private static final Logger LOG = LoggerFactory.getLogger(ChangeListenerNotifyTask.class);
- private final Iterable<? extends DataChangeListenerRegistration<?>> listeners;
- private final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event;
-
@SuppressWarnings("rawtypes")
- private final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent>
- notificationMgr;
+ private final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent> notificationMgr;
+ private final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event;
+ private final DataChangeListenerRegistration<?> listener;
@SuppressWarnings("rawtypes")
- public ChangeListenerNotifyTask(final Iterable<? extends DataChangeListenerRegistration<?>> listeners,
+ public ChangeListenerNotifyTask(final DataChangeListenerRegistration<?> listener,
final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event,
final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent> notificationMgr) {
- this.listeners = listeners;
- this.event = event;
- this.notificationMgr = notificationMgr;
+ this.notificationMgr = Preconditions.checkNotNull(notificationMgr);
+ this.listener = Preconditions.checkNotNull(listener);
+ this.event = Preconditions.checkNotNull(event);
}
@Override
public void run() {
-
- for (DataChangeListenerRegistration<?> listener : listeners) {
- notificationMgr.submitNotification(listener.getInstance(), event);
+ final AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> l = listener.getInstance();
+ if (l == null) {
+ LOG.trace("Skipping event delivery to unregistered listener {}", l);
+ return;
}
+ LOG.trace("Listener {} event {}", l, event);
+
+ // FIXME: Yo dawg I heard you like queues, so this was queued to be queued
+ notificationMgr.submitNotification(l, event);
}
@Override
public String toString() {
- return "ChangeListenerNotifyTask [listeners=" + listeners + ", event=" + event + "]";
+ return "ChangeListenerNotifyTask [listener=" + listener + ", event=" + event + "]";
}
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.base.Preconditions;
+
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import com.google.common.base.Preconditions;
-
public final class DOMImmutableDataChangeEvent implements
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
updated.put(path, after);
return this;
}
+
+ public boolean isEmpty() {
+ return created.isEmpty() && removed.isEmpty() && updated.isEmpty();
+ }
}
private static final class RemoveEventFactory implements SimpleEventFactory {
import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
import org.opendaylight.controller.md.sal.dom.store.impl.SnapshotBackedWriteTransaction.TransactionReadyPrototype;
import org.opendaylight.yangtools.util.ExecutorServiceUtil;
-import org.opendaylight.yangtools.util.PropertyUtils;
import org.opendaylight.yangtools.util.concurrent.NotificationManager;
import org.opendaylight.yangtools.util.concurrent.QueuedNotificationManager;
import org.opendaylight.yangtools.yang.data.api.schema.tree.ConflictingModificationAppliedException;
import javax.annotation.concurrent.GuardedBy;
-import java.util.Collections;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
}
};
- private static final String DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE_PROP =
- "mdsal.datastore-dcl-notification-queue.size";
-
- private static final int DEFAULT_DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE = 1000;
-
private final DataTree dataTree = InMemoryDataTreeFactory.getInstance().create();
private final ListenerTree listenerTree = ListenerTree.create();
private final AtomicLong txCounter = new AtomicLong(0);
public InMemoryDOMDataStore(final String name, final ListeningExecutorService listeningExecutor,
final ExecutorService dataChangeListenerExecutor) {
+ this(name, listeningExecutor, dataChangeListenerExecutor,
+ InMemoryDOMDataStoreConfigProperties.DEFAULT_MAX_DATA_CHANGE_LISTENER_QUEUE_SIZE);
+ }
+
+ public InMemoryDOMDataStore(final String name, final ListeningExecutorService listeningExecutor,
+ final ExecutorService dataChangeListenerExecutor, int maxDataChangeListenerQueueSize) {
this.name = Preconditions.checkNotNull(name);
this.listeningExecutor = Preconditions.checkNotNull(listeningExecutor);
this.dataChangeListenerExecutor = Preconditions.checkNotNull(dataChangeListenerExecutor);
- int maxDCLQueueSize = PropertyUtils.getIntSystemProperty(
- DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE_PROP, DEFAULT_DCL_NOTIFICATION_MGR_MAX_QUEUE_SIZE );
-
dataChangeListenerNotificationManager =
new QueuedNotificationManager<>(this.dataChangeListenerExecutor,
- DCL_NOTIFICATION_MGR_INVOKER, maxDCLQueueSize, "DataChangeListenerQueueMgr");
+ DCL_NOTIFICATION_MGR_INVOKER, maxDataChangeListenerQueueSize,
+ "DataChangeListenerQueueMgr");
}
@Override
.addCreated(path, data) //
.build();
- new ChangeListenerNotifyTask(Collections.singletonList(reg), event,
+ new ChangeListenerNotifyTask(reg, event,
dataChangeListenerNotificationManager).run();
}
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+/**
+ * Holds configuration properties when creating an {@link InMemoryDOMDataStore} instance via the
+ * {@link InMemoryDOMDataStoreFactory}
+ *
+ * @author Thomas Pantelis
+ * @see InMemoryDOMDataStoreFactory
+ */
+public class InMemoryDOMDataStoreConfigProperties {
+
+ public static final int DEFAULT_MAX_DATA_CHANGE_EXECUTOR_QUEUE_SIZE = 1000;
+ public static final int DEFAULT_MAX_DATA_CHANGE_EXECUTOR_POOL_SIZE = 20;
+ public static final int DEFAULT_MAX_DATA_CHANGE_LISTENER_QUEUE_SIZE = 1000;
+
+ private static final InMemoryDOMDataStoreConfigProperties DEFAULT =
+ create(DEFAULT_MAX_DATA_CHANGE_EXECUTOR_POOL_SIZE,
+ DEFAULT_MAX_DATA_CHANGE_EXECUTOR_QUEUE_SIZE,
+ DEFAULT_MAX_DATA_CHANGE_LISTENER_QUEUE_SIZE);
+
+ private final int maxDataChangeExecutorQueueSize;
+ private final int maxDataChangeExecutorPoolSize;
+ private final int maxDataChangeListenerQueueSize;
+
+ /**
+ * Constructs an instance with the given property values.
+ *
+ * @param maxDataChangeExecutorPoolSize
+ * maximum thread pool size for the data change notification executor.
+ * @param maxDataChangeExecutorQueueSize
+ * maximum queue size for the data change notification executor.
+ * @param maxDataChangeListenerQueueSize
+ * maximum queue size for the data change listeners.
+ */
+ public static InMemoryDOMDataStoreConfigProperties create(int maxDataChangeExecutorPoolSize,
+ int maxDataChangeExecutorQueueSize, int maxDataChangeListenerQueueSize) {
+ return new InMemoryDOMDataStoreConfigProperties(maxDataChangeExecutorPoolSize,
+ maxDataChangeExecutorQueueSize, maxDataChangeListenerQueueSize);
+ }
+
+ /**
+ * Returns the InMemoryDOMDataStoreConfigProperties instance with default values.
+ */
+ public static InMemoryDOMDataStoreConfigProperties getDefault() {
+ return DEFAULT;
+ }
+
+ private InMemoryDOMDataStoreConfigProperties(int maxDataChangeExecutorPoolSize,
+ int maxDataChangeExecutorQueueSize, int maxDataChangeListenerQueueSize) {
+ this.maxDataChangeExecutorQueueSize = maxDataChangeExecutorQueueSize;
+ this.maxDataChangeExecutorPoolSize = maxDataChangeExecutorPoolSize;
+ this.maxDataChangeListenerQueueSize = maxDataChangeListenerQueueSize;
+ }
+
+ /**
+ * Returns the maximum queue size for the data change notification executor.
+ */
+ public int getMaxDataChangeExecutorQueueSize() {
+ return maxDataChangeExecutorQueueSize;
+ }
+
+ /**
+ * Returns the maximum thread pool size for the data change notification executor.
+ */
+ public int getMaxDataChangeExecutorPoolSize() {
+ return maxDataChangeExecutorPoolSize;
+ }
+
+ /**
+ * Returns the maximum queue size for the data change listeners.
+ */
+ public int getMaxDataChangeListenerQueueSize() {
+ return maxDataChangeListenerQueueSize;
+ }
+}
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
-import org.opendaylight.yangtools.util.PropertyUtils;
import com.google.common.util.concurrent.MoreExecutors;
/**
*/
public final class InMemoryDOMDataStoreFactory {
- private static final String DCL_EXECUTOR_MAX_QUEUE_SIZE_PROP =
- "mdsal.datastore-dcl-notification-queue.size";
- private static final int DEFAULT_DCL_EXECUTOR_MAX_QUEUE_SIZE = 1000;
-
- private static final String DCL_EXECUTOR_MAX_POOL_SIZE_PROP =
- "mdsal.datastore-dcl-notification-pool.size";
- private static final int DEFAULT_DCL_EXECUTOR_MAX_POOL_SIZE = 20;
-
private InMemoryDOMDataStoreFactory() {
}
+ public static InMemoryDOMDataStore create(final String name,
+ @Nullable final SchemaService schemaService) {
+ return create(name, schemaService, null);
+ }
+
/**
* Creates an InMemoryDOMDataStore instance.
*
* @param name the name of the data store
* @param schemaService the SchemaService to which to register the data store.
+ * @param properties configuration properties for the InMemoryDOMDataStore instance. If null,
+ * default property values are used.
* @return an InMemoryDOMDataStore instance
*/
public static InMemoryDOMDataStore create(final String name,
- @Nullable final SchemaService schemaService) {
+ @Nullable final SchemaService schemaService,
+ @Nullable final InMemoryDOMDataStoreConfigProperties properties) {
+
+ InMemoryDOMDataStoreConfigProperties actualProperties = properties;
+ if(actualProperties == null) {
+ actualProperties = InMemoryDOMDataStoreConfigProperties.getDefault();
+ }
// For DataChangeListener notifications we use an executor that provides the fastest
// task execution time to get higher throughput as DataChangeListeners typically provide
// much of the business logic for a data model. If the executor queue size limit is reached,
// subsequent submitted notifications will block the calling thread.
- int dclExecutorMaxQueueSize = PropertyUtils.getIntSystemProperty(
- DCL_EXECUTOR_MAX_QUEUE_SIZE_PROP, DEFAULT_DCL_EXECUTOR_MAX_QUEUE_SIZE);
- int dclExecutorMaxPoolSize = PropertyUtils.getIntSystemProperty(
- DCL_EXECUTOR_MAX_POOL_SIZE_PROP, DEFAULT_DCL_EXECUTOR_MAX_POOL_SIZE);
+ int dclExecutorMaxQueueSize = actualProperties.getMaxDataChangeExecutorQueueSize();
+ int dclExecutorMaxPoolSize = actualProperties.getMaxDataChangeExecutorPoolSize();
ExecutorService dataChangeListenerExecutor = SpecialExecutors.newBlockingBoundedFastThreadPool(
dclExecutorMaxPoolSize, dclExecutorMaxQueueSize, name + "-DCL" );
InMemoryDOMDataStore dataStore = new InMemoryDOMDataStore(name,
MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()),
- dataChangeListenerExecutor);
+ dataChangeListenerExecutor, actualProperties.getMaxDataChangeListenerQueueSize());
if(schemaService != null) {
schemaService.registerSchemaContextListener(dataStore);
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
-import static org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.builder;
-
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
+import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
+import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
import java.util.Map.Entry;
-import java.util.Set;
import java.util.concurrent.Callable;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.Builder;
import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.SimpleEventFactory;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Node;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Walker;
import org.opendaylight.yangtools.util.concurrent.NotificationManager;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNodeContainer;
*/
final class ResolveDataChangeEventsTask implements Callable<Iterable<ChangeListenerNotifyTask>> {
private static final Logger LOG = LoggerFactory.getLogger(ResolveDataChangeEventsTask.class);
- private static final DOMImmutableDataChangeEvent NO_CHANGE = builder(DataChangeScope.BASE).build();
- private final Multimap<ListenerTree.Node, DOMImmutableDataChangeEvent> events = HashMultimap.create();
+ @SuppressWarnings("rawtypes")
+ private final NotificationManager<AsyncDataChangeListener, AsyncDataChangeEvent> notificationMgr;
private final DataTreeCandidate candidate;
private final ListenerTree listenerRoot;
- @SuppressWarnings("rawtypes")
- private final NotificationManager<AsyncDataChangeListener, AsyncDataChangeEvent> notificationMgr;
+ private Multimap<DataChangeListenerRegistration<?>, DOMImmutableDataChangeEvent> collectedEvents;
@SuppressWarnings("rawtypes")
public ResolveDataChangeEventsTask(final DataTreeCandidate candidate, final ListenerTree listenerTree,
* order to delivery data change events.
*/
@Override
- public Iterable<ChangeListenerNotifyTask> call() {
+ public synchronized Iterable<ChangeListenerNotifyTask> call() {
try (final Walker w = listenerRoot.getWalker()) {
- resolveAnyChangeEvent(candidate.getRootPath(), Collections.singleton(w.getRootNode()), candidate.getRootNode());
- return createNotificationTasks();
- }
- }
-
- /**
- *
- * Walks map of listeners to data change events, creates notification
- * delivery tasks.
- *
- * Walks map of registered and affected listeners and creates notification
- * tasks from set of listeners and events to be delivered.
- *
- * If set of listeners has more then one event (applicable to wildcarded
- * listeners), merges all data change events into one, final which contains
- * all separate updates.
- *
- * Dispatch between merge variant and reuse variant of notification task is
- * done in
- * {@link #addNotificationTask(com.google.common.collect.ImmutableList.Builder, Node, java.util.Collection)}
- *
- * @return Collection of notification tasks.
- */
- private Collection<ChangeListenerNotifyTask> createNotificationTasks() {
- ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder = ImmutableList.builder();
- for (Entry<ListenerTree.Node, Collection<DOMImmutableDataChangeEvent>> entry : events.asMap().entrySet()) {
- addNotificationTask(taskListBuilder, entry.getKey(), entry.getValue());
- }
- return taskListBuilder.build();
- }
-
- /**
- * Adds notification task to task list.
- *
- * If entry collection contains one event, this event is reused and added to
- * notification tasks for listeners (see
- * {@link #addNotificationTaskByScope(com.google.common.collect.ImmutableList.Builder, Node, DOMImmutableDataChangeEvent)}
- * . Otherwise events are merged by scope and distributed between listeners
- * to particular scope. See
- * {@link #addNotificationTasksAndMergeEvents(com.google.common.collect.ImmutableList.Builder, Node, java.util.Collection)}
- * .
- *
- * @param taskListBuilder
- * @param listeners
- * @param entries
- */
- private void addNotificationTask(final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder,
- final ListenerTree.Node listeners, final Collection<DOMImmutableDataChangeEvent> entries) {
-
- if (!entries.isEmpty()) {
- if (entries.size() == 1) {
- addNotificationTaskByScope(taskListBuilder, listeners, Iterables.getOnlyElement(entries));
- } else {
- addNotificationTasksAndMergeEvents(taskListBuilder, listeners, entries);
- }
- }
- }
+ // Defensive: reset internal state
+ collectedEvents = ArrayListMultimap.create();
- /**
- *
- * Add notification deliveries task to the listener.
- *
- *
- * @param taskListBuilder
- * @param listeners
- * @param event
- */
- private void addNotificationTaskByScope(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final ListenerTree.Node listeners,
- final DOMImmutableDataChangeEvent event) {
- DataChangeScope eventScope = event.getScope();
- for (DataChangeListenerRegistration<?> listenerReg : listeners.getListeners()) {
- DataChangeScope listenerScope = listenerReg.getScope();
- List<DataChangeListenerRegistration<?>> listenerSet = Collections
- .<DataChangeListenerRegistration<?>> singletonList(listenerReg);
- if (eventScope == DataChangeScope.BASE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- } else if (eventScope == DataChangeScope.ONE && listenerScope != DataChangeScope.BASE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- } else if (eventScope == DataChangeScope.SUBTREE && listenerScope == DataChangeScope.SUBTREE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- }
- }
- }
+ // Run through the tree
+ final ResolveDataChangeState s = ResolveDataChangeState.initial(candidate.getRootPath(), w.getRootNode());
+ resolveAnyChangeEvent(s, candidate.getRootNode());
- /**
- *
- * Add notification tasks with merged event
- *
- * Separate Events by scope and creates merged notification tasks for each
- * and every scope which is present.
- *
- * Adds merged events to task list based on scope requested by client.
- *
- * @param taskListBuilder
- * @param listeners
- * @param entries
- */
- private void addNotificationTasksAndMergeEvents(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final ListenerTree.Node listeners,
- final Collection<DOMImmutableDataChangeEvent> entries) {
-
- final Builder baseBuilder = builder(DataChangeScope.BASE);
- final Builder oneBuilder = builder(DataChangeScope.ONE);
- final Builder subtreeBuilder = builder(DataChangeScope.SUBTREE);
-
- boolean baseModified = false;
- boolean oneModified = false;
- boolean subtreeModified = false;
- for (final DOMImmutableDataChangeEvent entry : entries) {
- switch (entry.getScope()) {
- // Absence of breaks is intentional here. Subtree contains base and
- // one, one also contains base
- case BASE:
- baseBuilder.merge(entry);
- baseModified = true;
- case ONE:
- oneBuilder.merge(entry);
- oneModified = true;
- case SUBTREE:
- subtreeBuilder.merge(entry);
- subtreeModified = true;
+ /*
+ * Convert to tasks, but be mindful of multiple values -- those indicate multiple
+ * wildcard matches, which need to be merged.
+ */
+ final Collection<ChangeListenerNotifyTask> ret = new ArrayList<>();
+ for (Entry<DataChangeListenerRegistration<?>, Collection<DOMImmutableDataChangeEvent>> e : collectedEvents.asMap().entrySet()) {
+ final Collection<DOMImmutableDataChangeEvent> col = e.getValue();
+ final DOMImmutableDataChangeEvent event;
+
+ if (col.size() != 1) {
+ final Builder b = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE);
+ for (DOMImmutableDataChangeEvent i : col) {
+ b.merge(i);
+ }
+
+ event = b.build();
+ LOG.trace("Merged events {} into event {}", col, event);
+ } else {
+ event = col.iterator().next();
+ }
+
+ ret.add(new ChangeListenerNotifyTask(e.getKey(), event, notificationMgr));
}
- }
- if (baseModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, baseBuilder.build());
- }
- if (oneModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, oneBuilder.build());
- }
- if (subtreeModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, subtreeBuilder.build());
- }
- }
-
- private void addNotificationTaskExclusively(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final Node listeners,
- final DOMImmutableDataChangeEvent event) {
- for (DataChangeListenerRegistration<?> listener : listeners.getListeners()) {
- if (listener.getScope() == event.getScope()) {
- Set<DataChangeListenerRegistration<?>> listenerSet = Collections
- .<DataChangeListenerRegistration<?>> singleton(listener);
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- }
+ // FIXME: so now we have tasks to submit tasks... Inception-style!
+ LOG.debug("Created tasks {}", ret);
+ return ret;
}
}
* - Original (before) state of current node
* @param after
* - After state of current node
- * @return Data Change Event of this node and all it's children
+ * @return True if the subtree changed, false otherwise
*/
- private DOMImmutableDataChangeEvent resolveAnyChangeEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final DataTreeCandidateNode node) {
-
+ private boolean resolveAnyChangeEvent(final ResolveDataChangeState state, final DataTreeCandidateNode node) {
if (node.getModificationType() != ModificationType.UNMODIFIED &&
!node.getDataAfter().isPresent() && !node.getDataBefore().isPresent()) {
LOG.debug("Modification at {} has type {}, but no before- and after-data. Assuming unchanged.",
- path, node.getModificationType());
- return NO_CHANGE;
+ state.getPath(), node.getModificationType());
+ return false;
}
// no before and after state is present
switch (node.getModificationType()) {
case SUBTREE_MODIFIED:
- return resolveSubtreeChangeEvent(path, listeners, node);
+ return resolveSubtreeChangeEvent(state, node);
case MERGE:
case WRITE:
Preconditions.checkArgument(node.getDataAfter().isPresent(),
- "Modification at {} has type {} but no after-data", path, node.getModificationType());
- if (node.getDataBefore().isPresent()) {
- return resolveReplacedEvent(path, listeners, node.getDataBefore().get(), node.getDataAfter().get());
- } else {
- return resolveCreateEvent(path, listeners, node.getDataAfter().get());
+ "Modification at {} has type {} but no after-data", state.getPath(), node.getModificationType());
+ if (!node.getDataBefore().isPresent()) {
+ resolveCreateEvent(state, node.getDataAfter().get());
+ return true;
}
+
+ return resolveReplacedEvent(state, node.getDataBefore().get(), node.getDataAfter().get());
case DELETE:
Preconditions.checkArgument(node.getDataBefore().isPresent(),
- "Modification at {} has type {} but no before-data", path, node.getModificationType());
- return resolveDeleteEvent(path, listeners, node.getDataBefore().get());
+ "Modification at {} has type {} but no before-data", state.getPath(), node.getModificationType());
+ resolveDeleteEvent(state, node.getDataBefore().get());
+ return true;
case UNMODIFIED:
- return NO_CHANGE;
+ return false;
}
- throw new IllegalStateException(String.format("Unhandled node state %s at %s", node.getModificationType(), path));
+ throw new IllegalStateException(String.format("Unhandled node state %s at %s", node.getModificationType(), state.getPath()));
}
- private DOMImmutableDataChangeEvent resolveReplacedEvent(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<?, ?> beforeData,
- final NormalizedNode<?, ?> afterData) {
-
- // FIXME: BUG-1493: check the listeners to prune unneeded changes:
- // for subtrees, we have to do all
- // for one, we need to expand children
- // for base, we just report replacement
+ private boolean resolveReplacedEvent(final ResolveDataChangeState state,
+ final NormalizedNode<?, ?> beforeData, final NormalizedNode<?, ?> afterData) {
if (beforeData instanceof NormalizedNodeContainer<?, ?, ?>) {
- // Node is container (contains child) and we have interested
- // listeners registered for it, that means we need to do
- // resolution of changes on children level and can not
- // shortcut resolution.
- LOG.trace("Resolving subtree replace event for {} before {}, after {}",path,beforeData,afterData);
+ /*
+ * Node is a container (contains a child) and we have interested
+ * listeners registered for it, that means we need to do
+ * resolution of changes on children level and can not
+ * shortcut resolution.
+ */
+ LOG.trace("Resolving subtree replace event for {} before {}, after {}", state.getPath(), beforeData, afterData);
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> beforeCont = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) beforeData;
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> afterCont = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) afterData;
- return resolveNodeContainerReplaced(path, listeners, beforeCont, afterCont);
- } else if (!beforeData.equals(afterData)) {
- // Node is Leaf type (does not contain child nodes)
- // so normal equals method is sufficient for determining change.
- LOG.trace("Resolving leaf replace event for {} , before {}, after {}",path,beforeData,afterData);
- DOMImmutableDataChangeEvent event = builder(DataChangeScope.BASE).setBefore(beforeData).setAfter(afterData)
- .addUpdated(path, beforeData, afterData).build();
- addPartialTask(listeners, event);
- return event;
- } else {
- return NO_CHANGE;
+ return resolveNodeContainerReplaced(state, beforeCont, afterCont);
}
+
+ // Node is a Leaf type (does not contain child nodes)
+ // so normal equals method is sufficient for determining change.
+ if (beforeData.equals(afterData)) {
+ LOG.trace("Skipping equal leaf {}", state.getPath());
+ return false;
+ }
+
+ LOG.trace("Resolving leaf replace event for {} , before {}, after {}", state.getPath(), beforeData, afterData);
+ DOMImmutableDataChangeEvent event = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE).addUpdated(state.getPath(), beforeData, afterData).build();
+ state.addEvent(event);
+ state.collectEvents(beforeData, afterData, collectedEvents);
+ return true;
}
- private DOMImmutableDataChangeEvent resolveNodeContainerReplaced(final YangInstanceIdentifier path,
- final Collection<Node> listeners,
+ private boolean resolveNodeContainerReplaced(final ResolveDataChangeState state,
final NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> beforeCont,
final NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> afterCont) {
- final List<DOMImmutableDataChangeEvent> childChanges = new LinkedList<>();
+ if (!state.needsProcessing()) {
+ LOG.trace("Not processing replaced container {}", state.getPath());
+ return true;
+ }
// We look at all children from before and compare it with after state.
+ boolean childChanged = false;
for (NormalizedNode<PathArgument, ?> beforeChild : beforeCont.getValue()) {
final PathArgument childId = beforeChild.getIdentifier();
- YangInstanceIdentifier childPath = path.node(childId);
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- Optional<NormalizedNode<PathArgument, ?>> afterChild = afterCont.getChild(childId);
- DOMImmutableDataChangeEvent childChange = resolveNodeContainerChildUpdated(childPath, childListeners,
- beforeChild, afterChild);
- // If change is empty (equals to NO_CHANGE)
- if (childChange != NO_CHANGE) {
- childChanges.add(childChange);
+ if (resolveNodeContainerChildUpdated(state.child(childId), beforeChild, afterCont.getChild(childId))) {
+ childChanged = true;
}
}
* created.
*/
if (!beforeCont.getChild(childId).isPresent()) {
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- YangInstanceIdentifier childPath = path.node(childId);
- childChanges.add(resolveSameEventRecursivelly(childPath , childListeners, afterChild,
- DOMImmutableDataChangeEvent.getCreateEventFactory()));
+ resolveSameEventRecursivelly(state.child(childId), afterChild, DOMImmutableDataChangeEvent.getCreateEventFactory());
+ childChanged = true;
}
}
- if (childChanges.isEmpty()) {
- return NO_CHANGE;
- }
- Builder eventBuilder = builder(DataChangeScope.BASE) //
- .setBefore(beforeCont) //
- .setAfter(afterCont)
- .addUpdated(path, beforeCont, afterCont);
- for (DOMImmutableDataChangeEvent childChange : childChanges) {
- eventBuilder.merge(childChange);
+ if (childChanged) {
+ DOMImmutableDataChangeEvent event = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE)
+ .addUpdated(state.getPath(), beforeCont, afterCont).build();
+ state.addEvent(event);
}
- DOMImmutableDataChangeEvent replaceEvent = eventBuilder.build();
- addPartialTask(listeners, replaceEvent);
- return replaceEvent;
+ state.collectEvents(beforeCont, afterCont, collectedEvents);
+ return childChanged;
}
- private DOMImmutableDataChangeEvent resolveNodeContainerChildUpdated(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<PathArgument, ?> before,
- final Optional<NormalizedNode<PathArgument, ?>> after) {
-
+ private boolean resolveNodeContainerChildUpdated(final ResolveDataChangeState state,
+ final NormalizedNode<PathArgument, ?> before, final Optional<NormalizedNode<PathArgument, ?>> after) {
if (after.isPresent()) {
// REPLACE or SUBTREE Modified
- return resolveReplacedEvent(path, listeners, before, after.get());
-
- } else {
- // AFTER state is not present - child was deleted.
- return resolveSameEventRecursivelly(path, listeners, before,
- DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ return resolveReplacedEvent(state, before, after.get());
}
+
+ // AFTER state is not present - child was deleted.
+ resolveSameEventRecursivelly(state, before, DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ return true;
}
/**
* Resolves create events deep down the interest listener tree.
*
- *
* @param path
* @param listeners
* @param afterState
* @return
*/
- private DOMImmutableDataChangeEvent resolveCreateEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final NormalizedNode<?, ?> afterState) {
+ private void resolveCreateEvent(final ResolveDataChangeState state, final NormalizedNode<?, ?> afterState) {
@SuppressWarnings({ "unchecked", "rawtypes" })
final NormalizedNode<PathArgument, ?> node = (NormalizedNode) afterState;
- return resolveSameEventRecursivelly(path, listeners, node, DOMImmutableDataChangeEvent.getCreateEventFactory());
+ resolveSameEventRecursivelly(state, node, DOMImmutableDataChangeEvent.getCreateEventFactory());
}
- private DOMImmutableDataChangeEvent resolveDeleteEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final NormalizedNode<?, ?> beforeState) {
-
+ private void resolveDeleteEvent(final ResolveDataChangeState state, final NormalizedNode<?, ?> beforeState) {
@SuppressWarnings({ "unchecked", "rawtypes" })
final NormalizedNode<PathArgument, ?> node = (NormalizedNode) beforeState;
- return resolveSameEventRecursivelly(path, listeners, node, DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ resolveSameEventRecursivelly(state, node, DOMImmutableDataChangeEvent.getRemoveEventFactory());
}
- private DOMImmutableDataChangeEvent resolveSameEventRecursivelly(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<PathArgument, ?> node,
- final SimpleEventFactory eventFactory) {
- final DOMImmutableDataChangeEvent event = eventFactory.create(path, node);
- DOMImmutableDataChangeEvent propagateEvent = event;
+ private void resolveSameEventRecursivelly(final ResolveDataChangeState state,
+ final NormalizedNode<PathArgument, ?> node, final SimpleEventFactory eventFactory) {
+ if (!state.needsProcessing()) {
+ LOG.trace("Skipping child {}", state.getPath());
+ return;
+ }
+
// We have listeners for this node or it's children, so we will try
// to do additional processing
if (node instanceof NormalizedNodeContainer<?, ?, ?>) {
- LOG.trace("Resolving subtree recursive event for {}, type {}", path, eventFactory);
-
- Builder eventBuilder = builder(DataChangeScope.BASE);
- eventBuilder.merge(event);
- eventBuilder.setBefore(event.getOriginalSubtree());
- eventBuilder.setAfter(event.getUpdatedSubtree());
+ LOG.trace("Resolving subtree recursive event for {}, type {}", state.getPath(), eventFactory);
// Node has children, so we will try to resolve it's children
// changes.
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> container = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) node;
for (NormalizedNode<PathArgument, ?> child : container.getValue()) {
- PathArgument childId = child.getIdentifier();
+ final PathArgument childId = child.getIdentifier();
+
LOG.trace("Resolving event for child {}", childId);
- Collection<Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- eventBuilder.merge(resolveSameEventRecursivelly(path.node(childId), childListeners, child, eventFactory));
+ resolveSameEventRecursivelly(state.child(childId), child, eventFactory);
}
- propagateEvent = eventBuilder.build();
}
- if (!listeners.isEmpty()) {
- addPartialTask(listeners, propagateEvent);
- }
- return propagateEvent;
- }
- private DOMImmutableDataChangeEvent resolveSubtreeChangeEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final DataTreeCandidateNode modification) {
+ final DOMImmutableDataChangeEvent event = eventFactory.create(state.getPath(), node);
+ LOG.trace("Adding event {} at path {}", event, state.getPath());
+ state.addEvent(event);
+ state.collectEvents(event.getOriginalSubtree(), event.getUpdatedSubtree(), collectedEvents);
+ }
- Preconditions.checkArgument(modification.getDataBefore().isPresent(), "Subtree change with before-data not present at path %s", path);
- Preconditions.checkArgument(modification.getDataAfter().isPresent(), "Subtree change with after-data not present at path %s", path);
+ private boolean resolveSubtreeChangeEvent(final ResolveDataChangeState state, final DataTreeCandidateNode modification) {
+ Preconditions.checkArgument(modification.getDataBefore().isPresent(), "Subtree change with before-data not present at path %s", state.getPath());
+ Preconditions.checkArgument(modification.getDataAfter().isPresent(), "Subtree change with after-data not present at path %s", state.getPath());
- Builder one = builder(DataChangeScope.ONE).
- setBefore(modification.getDataBefore().get()).
- setAfter(modification.getDataAfter().get());
- Builder subtree = builder(DataChangeScope.SUBTREE).
- setBefore(modification.getDataBefore().get()).
- setAfter(modification.getDataAfter().get());
- boolean oneModified = false;
+ DataChangeScope scope = null;
for (DataTreeCandidateNode childMod : modification.getChildNodes()) {
- PathArgument childId = childMod.getIdentifier();
- YangInstanceIdentifier childPath = path.node(childId);
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
-
+ final ResolveDataChangeState childState = state.child(childMod.getIdentifier());
switch (childMod.getModificationType()) {
case WRITE:
case MERGE:
case DELETE:
- one.merge(resolveAnyChangeEvent(childPath, childListeners, childMod));
- oneModified = true;
+ if (resolveAnyChangeEvent(childState, childMod)) {
+ scope = DataChangeScope.ONE;
+ }
break;
case SUBTREE_MODIFIED:
- subtree.merge(resolveSubtreeChangeEvent(childPath, childListeners, childMod));
+ if (resolveSubtreeChangeEvent(childState, childMod) && scope == null) {
+ scope = DataChangeScope.SUBTREE;
+ }
break;
case UNMODIFIED:
// no-op
break;
}
}
- final DOMImmutableDataChangeEvent oneChangeEvent;
- if(oneModified) {
- one.addUpdated(path, modification.getDataBefore().get(), modification.getDataAfter().get());
- oneChangeEvent = one.build();
- subtree.merge(oneChangeEvent);
- } else {
- oneChangeEvent = null;
- subtree.addUpdated(path, modification.getDataBefore().get(), modification.getDataAfter().get());
- }
- DOMImmutableDataChangeEvent subtreeEvent = subtree.build();
- if (!listeners.isEmpty()) {
- if(oneChangeEvent != null) {
- addPartialTask(listeners, oneChangeEvent);
- }
- addPartialTask(listeners, subtreeEvent);
- }
- return subtreeEvent;
- }
- private DOMImmutableDataChangeEvent addPartialTask(final Collection<ListenerTree.Node> listeners,
- final DOMImmutableDataChangeEvent event) {
- for (ListenerTree.Node listenerNode : listeners) {
- if (!listenerNode.getListeners().isEmpty()) {
- LOG.trace("Adding event {} for listeners {}",event,listenerNode);
- events.put(listenerNode, event);
- }
- }
- return event;
- }
+ final NormalizedNode<?, ?> before = modification.getDataBefore().get();
+ final NormalizedNode<?, ?> after = modification.getDataAfter().get();
- private static Collection<ListenerTree.Node> getListenerChildrenWildcarded(final Collection<ListenerTree.Node> parentNodes,
- final PathArgument child) {
- if (parentNodes.isEmpty()) {
- return Collections.emptyList();
- }
- com.google.common.collect.ImmutableList.Builder<ListenerTree.Node> result = ImmutableList.builder();
- if (child instanceof NodeWithValue || child instanceof NodeIdentifierWithPredicates) {
- NodeIdentifier wildcardedIdentifier = new NodeIdentifier(child.getNodeType());
- addChildrenNodesToBuilder(result, parentNodes, wildcardedIdentifier);
+ if (scope != null) {
+ DOMImmutableDataChangeEvent one = DOMImmutableDataChangeEvent.builder(scope).addUpdated(state.getPath(), before, after).build();
+ state.addEvent(one);
}
- addChildrenNodesToBuilder(result, parentNodes, child);
- return result.build();
- }
- private static void addChildrenNodesToBuilder(final ImmutableList.Builder<ListenerTree.Node> result,
- final Collection<ListenerTree.Node> parentNodes, final PathArgument childIdentifier) {
- for (ListenerTree.Node node : parentNodes) {
- Optional<ListenerTree.Node> child = node.getChild(childIdentifier);
- if (child.isPresent()) {
- result.add(child.get());
- }
- }
+ state.collectEvents(before, after, collectedEvents);
+ return scope != null;
}
@SuppressWarnings("rawtypes")
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Multimap;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.Builder;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Node;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Recursion state used in {@link ResolveDataChangeEventsTask}. Instances of this
+ * method track which listeners are affected by a particular change node. It takes
+ * care of properly inheriting SUB/ONE listeners and also provides a means to
+ * understand when actual processing need not occur.
+ */
+final class ResolveDataChangeState {
+ private static final Logger LOG = LoggerFactory.getLogger(ResolveDataChangeState.class);
+ /**
+ * Inherited from all parents
+ */
+ private final Iterable<Builder> inheritedSub;
+ /**
+ * Inherited from immediate parent
+ */
+ private final Iterable<Builder> inheritedOne;
+ private final YangInstanceIdentifier nodeId;
+ private final Collection<Node> nodes;
+
+ private final Map<DataChangeListenerRegistration<?>, Builder> subBuilders = new HashMap<>();
+ private final Map<DataChangeListenerRegistration<?>, Builder> oneBuilders = new HashMap<>();
+ private final Map<DataChangeListenerRegistration<?>, Builder> baseBuilders = new HashMap<>();
+
+ private ResolveDataChangeState(final YangInstanceIdentifier nodeId,
+ final Iterable<Builder> inheritedSub, final Iterable<Builder> inheritedOne,
+ final Collection<Node> nodes) {
+ this.nodeId = Preconditions.checkNotNull(nodeId);
+ this.nodes = Preconditions.checkNotNull(nodes);
+ this.inheritedSub = Preconditions.checkNotNull(inheritedSub);
+ this.inheritedOne = Preconditions.checkNotNull(inheritedOne);
+
+ /*
+ * Collect the nodes which need to be propagated from us to the child.
+ */
+ for (Node n : nodes) {
+ for (DataChangeListenerRegistration<?> l : n.getListeners()) {
+ final Builder b = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE);
+ switch (l.getScope()) {
+ case BASE:
+ baseBuilders.put(l, b);
+ break;
+ case ONE:
+ oneBuilders.put(l, b);
+ break;
+ case SUBTREE:
+ subBuilders.put(l, b);
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Create an initial state handle at a particular root node.
+ *
+ * @param rootId root instance identifier
+ * @param root root node
+ * @return
+ */
+ public static ResolveDataChangeState initial(final YangInstanceIdentifier rootId, final Node root) {
+ return new ResolveDataChangeState(rootId, Collections.<Builder>emptyList(),
+ Collections.<Builder>emptyList(), Collections.singletonList(root));
+ }
+
+ /**
+ * Create a state handle for iterating over a particular child.
+ *
+ * @param childId ID of the child
+ * @return State handle
+ */
+ public ResolveDataChangeState child(final PathArgument childId) {
+ return new ResolveDataChangeState(nodeId.node(childId),
+ Iterables.concat(inheritedSub, subBuilders.values()),
+ oneBuilders.values(), getListenerChildrenWildcarded(nodes, childId));
+ }
+
+ /**
+ * Get the current path
+ *
+ * @return Current path.
+ */
+ public YangInstanceIdentifier getPath() {
+ return nodeId;
+ }
+
+ /**
+ * Check if this child needs processing.
+ *
+ * @return True if processing needs to occur, false otherwise.
+ */
+ public boolean needsProcessing() {
+ // May have underlying listeners, so we need to process
+ if (!nodes.isEmpty()) {
+ return true;
+ }
+ // Have SUBTREE listeners
+ if (!Iterables.isEmpty(inheritedSub)) {
+ return true;
+ }
+ // Have ONE listeners
+ if (!Iterables.isEmpty(inheritedOne)) {
+ return true;
+ }
+
+ // FIXME: do we need anything else? If not, flip this to 'false'
+ return true;
+ }
+
+ /**
+ * Add an event to all current listeners.
+ *
+ * @param event
+ */
+ public void addEvent(final DOMImmutableDataChangeEvent event) {
+ // Subtree builders get always notified
+ for (Builder b : subBuilders.values()) {
+ b.merge(event);
+ }
+ for (Builder b : inheritedSub) {
+ b.merge(event);
+ }
+
+ if (event.getScope() == DataChangeScope.ONE || event.getScope() == DataChangeScope.BASE) {
+ for (Builder b : oneBuilders.values()) {
+ b.merge(event);
+ }
+ }
+
+ if (event.getScope() == DataChangeScope.BASE) {
+ for (Builder b : inheritedOne) {
+ b.merge(event);
+ }
+ for (Builder b : baseBuilders.values()) {
+ b.merge(event);
+ }
+ }
+ }
+
+ /**
+ * Gather all non-empty events into the provided map.
+ *
+ * @param before before-image
+ * @param after after-image
+ * @param map target map
+ */
+ public void collectEvents(final NormalizedNode<?, ?> before, final NormalizedNode<?, ?> after,
+ final Multimap<DataChangeListenerRegistration<?>, DOMImmutableDataChangeEvent> map) {
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : baseBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : oneBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : subBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+
+ LOG.trace("Collected events {}", map);
+ }
+
+ private static Collection<Node> getListenerChildrenWildcarded(final Collection<Node> parentNodes,
+ final PathArgument child) {
+ if (parentNodes.isEmpty()) {
+ return Collections.emptyList();
+ }
+
+ final List<Node> result = new ArrayList<>();
+ if (child instanceof NodeWithValue || child instanceof NodeIdentifierWithPredicates) {
+ NodeIdentifier wildcardedIdentifier = new NodeIdentifier(child.getNodeType());
+ addChildNodes(result, parentNodes, wildcardedIdentifier);
+ }
+ addChildNodes(result, parentNodes, child);
+ return result;
+ }
+
+ private static void addChildNodes(final List<Node> result, final Collection<Node> parentNodes, final PathArgument childIdentifier) {
+ for (Node node : parentNodes) {
+ Optional<Node> child = node.getChild(childIdentifier);
+ if (child.isPresent()) {
+ result.add(child.get());
+ }
+ }
+ }
+}
when "/config:modules/config:module/config:type = 'inmemory-config-datastore-provider'";
container schema-service {
- uses config:service-ref {
+ uses config:service-ref {
refine type {
mandatory false;
config:required-identity sal:schema-service;
}
- }
+ }
+ }
+
+ leaf max-data-change-executor-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for the data change notification executor.";
+ }
+
+ leaf max-data-change-executor-pool-size {
+ default 20;
+ type uint16;
+ description "The maximum thread pool size for the data change notification executor.";
+ }
+
+ leaf max-data-change-listener-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for the data change listeners.";
}
}
}
+ // Augments the 'configuration' choice node under modules/module.
+ augment "/config:modules/config:module/config:configuration" {
+ case inmemory-operational-datastore-provider {
+ when "/config:modules/config:module/config:type = 'inmemory-operational-datastore-provider'";
+ // Yang does not allow two cases from same namespaces with same children
+ // Schema-service dependency renamed to operational-schema-service
+ // to prevent conflict with schema-service container from inmemory-config-datastore-provider
+ container operational-schema-service {
+ uses config:service-ref {
+ refine type {
+ mandatory false;
+ config:required-identity sal:schema-service;
+ }
+ }
+ }
+
+ leaf max-data-change-executor-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for the data change notification executor.";
+ }
- // Augments the 'configuration' choice node under modules/module.
- augment "/config:modules/config:module/config:configuration" {
- case inmemory-operational-datastore-provider {
- when "/config:modules/config:module/config:type = 'inmemory-operational-datastore-provider'";
+ leaf max-data-change-executor-pool-size {
+ default 20;
+ type uint16;
+ description "The maximum thread pool size for the data change notification executor.";
+ }
- // Yang does not allow two cases from same namespaces with same children
- // Schema-service dependency renamed to operational-schema-service
- // to prevent conflict with schema-service container from inmemory-config-datastore-provider
- container operational-schema-service {
- uses config:service-ref {
- refine type {
- mandatory false;
- config:required-identity sal:schema-service;
- }
- }
- }
+ leaf max-data-change-listener-queue-size {
+ default 1000;
+ type uint16;
+ description "The maximum queue size for the data change listeners.";
}
}
+ }
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.util.concurrent.MoreExecutors;
+
import java.util.Collection;
import java.util.Map;
+
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeBuilder;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import com.google.common.util.concurrent.MoreExecutors;
-
public abstract class AbstractDataChangeListenerTest {
protected static final YangInstanceIdentifier TOP_LEVEL = YangInstanceIdentifier
}
}
+ /**
+ * Create a new test task. The task will operate on the backed database,
+ * and will use the proper background executor service.
+ *
+ * @return Test task initialized to clean up {@value #TOP_LEVEL} and its
+ * children.
+ */
public final DatastoreTestTask newTestTask() {
return new DatastoreTestTask(datastore, dclExecutorService).cleanup(DatastoreTestTask
.simpleDelete(TOP_LEVEL));
import org.opendaylight.controller.md.sal.dom.store.impl.DatastoreTestTask.WriteTransactionCustomizer;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+/**
+ * Base template for a test suite for testing DataChangeListener functionality.
+ */
public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataChangeListenerTest {
protected static final String FOO_SIBLING = "foo-sibling";
+ /**
+ * Callback invoked when the test suite can modify task parameters.
+ *
+ * @param task Update task configuration as needed
+ */
abstract protected void customizeTask(DatastoreTestTask task);
@Test
assertNotNull(change);
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR));
+ /*
+ * Created data must not contain nested-list item, since that is two-level deep.
+ */
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR));
+ assertContains(change.getCreatedData(), path(FOO) );
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change = task.getChangeEvent();
assertNotNull(change);
-
- assertContains(change.getCreatedData(), path(FOO, BAZ));
+ /*
+ * Created data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getCreatedData(), path(FOO, BAZ));
assertContains(change.getUpdatedData(), path(FOO));
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO, BAR));
+ /*
+ * Removed data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAR));
}
assertNotNull(change);
assertFalse(change.getCreatedData().isEmpty());
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR), path(FOO, BAZ));
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
+ // Base event should contain only changed item, no details about child.
+ assertContains(change.getCreatedData(), path(FOO));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR), path(FOO, BAZ));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
assertEmpty(change.getUpdatedData());
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO),path(FOO, BAZ),path(FOO,BAR));
+ /*
+ * Scope base listener event should contain top-level-list item and nested list path
+ * and should not contain baz, bar which are two-level deep
+ */
+ assertContains(change.getRemovedPaths(), path(FOO));
+ assertNotContains(change.getRemovedPaths(),path(FOO, BAZ),path(FOO,BAR));
}
@Override
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.top.level.list.NestedList;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
assertNotNull(change);
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR));
+ assertContains(change.getCreatedData(), path(FOO), path(FOO).node(NestedList.QNAME));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change = task.getChangeEvent();
assertNotNull(change);
-
- assertContains(change.getCreatedData(), path(FOO, BAZ));
- assertContains(change.getUpdatedData(), path(FOO));
+ /*
+ * Created data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getCreatedData(), path(FOO, BAZ));
+ assertContains(change.getUpdatedData(), path(FOO),path(FOO).node(NestedList.QNAME));
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO, BAR));
+ /*
+ * Removed data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAR));
}
assertNotNull(change);
assertFalse(change.getCreatedData().isEmpty());
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR), path(FOO, BAZ));
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
+ // Base event should contain only changed item, and details about immediate child.
+ assertContains(change.getCreatedData(), path(FOO),path(FOO).node(NestedList.QNAME));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR), path(FOO, BAZ));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
assertEmpty(change.getUpdatedData());
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO),path(FOO, BAZ),path(FOO,BAR));
+ assertContains(change.getRemovedPaths(), path(FOO),path(FOO).node(NestedList.QNAME));
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAZ),path(FOO,BAR));
}
@Override
final CompositeNode schemasNode =
(CompositeNode) NetconfMessageTransformUtil.findNode(schemasNodeResult.getResult(), DATA_STATE_SCHEMAS_IDENTIFIER);
- return create(schemasNode);
+ return create(id, schemasNode);
}
/**
* Parse response of get(netconf-state/schemas) to find all schemas under netconf-state/schemas
*/
@VisibleForTesting
- protected static NetconfStateSchemas create(final CompositeNode schemasNode) {
+ protected static NetconfStateSchemas create(final RemoteDeviceId id, final CompositeNode schemasNode) {
final Set<RemoteYangSchema> availableYangSchemas = Sets.newHashSet();
for (final CompositeNode schemaNode : schemasNode.getCompositesByName(Schema.QNAME.withoutRevision())) {
- availableYangSchemas.add(RemoteYangSchema.createFromCompositeNode(schemaNode));
+ final Optional<RemoteYangSchema> fromCompositeNode = RemoteYangSchema.createFromCompositeNode(id, schemaNode);
+ if(fromCompositeNode.isPresent()) {
+ availableYangSchemas.add(fromCompositeNode.get());
+ }
}
return new NetconfStateSchemas(availableYangSchemas);
return qname;
}
- static RemoteYangSchema createFromCompositeNode(final CompositeNode schemaNode) {
+ static Optional<RemoteYangSchema> createFromCompositeNode(final RemoteDeviceId id, final CompositeNode schemaNode) {
Preconditions.checkArgument(schemaNode.getKey().equals(Schema.QNAME.withoutRevision()), "Wrong QName %s", schemaNode.getKey());
QName childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_FORMAT.withoutRevision();
final String formatAsString = getSingleChildNodeValue(schemaNode, childNode).get();
- Preconditions.checkArgument(formatAsString.equals(Yang.QNAME.getLocalName()),
- "Expecting format to be only %s, not %s", Yang.QNAME.getLocalName(), formatAsString);
+ if(formatAsString.equals(Yang.QNAME.getLocalName()) == false) {
+ logger.debug("{}: Ignoring schema due to unsupported format: {}", id, formatAsString);
+ return Optional.absent();
+ }
childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_LOCATION.withoutRevision();
final Set<String> locationsAsString = getAllChildNodeValues(schemaNode, childNode);
- Preconditions.checkArgument(locationsAsString.contains(Schema.Location.Enumeration.NETCONF.toString()),
- "Expecting location to be %s, not %s", Schema.Location.Enumeration.NETCONF.toString(), locationsAsString);
+ if(locationsAsString.contains(Schema.Location.Enumeration.NETCONF.toString()) == false) {
+ logger.debug("{}: Ignoring schema due to unsupported location: {}", id, locationsAsString);
+ return Optional.absent();
+ }
childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_NAMESPACE.withoutRevision();
final String namespaceAsString = getSingleChildNodeValue(schemaNode, childNode).get();
? QName.create(namespaceAsString, revisionAsString.get(), moduleNameAsString)
: QName.create(URI.create(namespaceAsString), null, moduleNameAsString).withoutRevision();
- return new RemoteYangSchema(moduleQName);
+ return Optional.of(new RemoteYangSchema(moduleQName));
}
private static Set<String> getAllChildNodeValues(final CompositeNode schemaNode, final QName childNodeQName) {
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
-
import com.google.common.collect.Sets;
+
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
}
// FIXME: do we really want to continue here?
- moduleBasedCaps.add(QName.create(namespace, revision, moduleName));
+ moduleBasedCaps.add(QName.cachedReference(QName.create(namespace, revision, moduleName)));
nonModuleCaps.remove(capability);
}
import java.util.Set;
import org.junit.Test;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.opendaylight.controller.sal.connect.util.RemoteDeviceId;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.impl.codec.xml.XmlDocumentUtils;
public void testCreate() throws Exception {
final Document schemasXml = XmlUtil.readXmlToDocument(getClass().getResourceAsStream("/netconf-state.schemas.payload.xml"));
final CompositeNode compositeNodeSchemas = (CompositeNode) XmlDocumentUtils.toDomNode(schemasXml);
- final NetconfStateSchemas schemas = NetconfStateSchemas.create(compositeNodeSchemas);
+ final NetconfStateSchemas schemas = NetconfStateSchemas.create(new RemoteDeviceId("device"), compositeNodeSchemas);
final Set<QName> availableYangSchemasQNames = schemas.getAvailableYangSchemasQNames();
assertEquals(73, availableYangSchemasQNames.size());
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
</dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>netconf-util</artifactId>
- </dependency>
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-spi</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-spi</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-impl</artifactId>
</dependency>
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-impl</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-clustering-commons</artifactId>
+ </dependency>
+
<!-- Yang tools-->
<dependency>
<artifactId>scala-library</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>
<Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
<Export-package></Export-package>
<Private-Package></Private-Package>
- <Import-Package>!org.jboss.*;!com.jcraft.*;*</Import-Package>
+ <Import-Package>!org.iq80.*;!*snappy;!org.jboss.*;!com.jcraft.*;!org.fusesource.*;*</Import-Package>
<Embed-Dependency>
+ sal-clustering-commons;
+ sal-akka-raft;
+ *metrics*;
!sal*;
!*config-api*;
!*testkit*;
import org.opendaylight.controller.remote.rpc.messages.InvokeRpc;
import org.opendaylight.controller.remote.rpc.messages.RpcResponse;
import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.controller.remote.rpc.registry.RpcRegistry;
import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
import org.opendaylight.controller.remote.rpc.utils.RoutingLogic;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.yangtools.yang.common.RpcResult;
public RpcRegistry() {
bucketStore = getContext().actorOf(Props.create(BucketStore.class), "store");
+
+ log.info("Bucket store path = {}", bucketStore.path().toString());
}
public RpcRegistry(ActorRef bucketStore) {
import akka.cluster.Cluster;
import akka.event.Logging;
import akka.event.LoggingAdapter;
+import org.opendaylight.controller.utils.ConditionalProbe;
import java.util.HashMap;
import java.util.Map;
*/
private ActorRef gossiper;
+ private ConditionalProbe probe;
+
public BucketStore(){
gossiper = getContext().actorOf(Props.create(Gossiper.class), "gossiper");
}
@Override
public void onReceive(Object message) throws Exception {
- log.debug("Received message: node[{}], message[{}]", selfAddress, message);
+ log.debug("Received message: node[{}], message[{}]", selfAddress,
+ message);
- if (message instanceof UpdateBucket)
- receiveUpdateBucket(((UpdateBucket) message).getBucket());
+ if (probe != null) {
- else if (message instanceof GetAllBuckets)
- receiveGetAllBucket();
+ probe.tell(message, getSelf());
+ }
- else if (message instanceof GetLocalBucket)
+ if (message instanceof ConditionalProbe) {
+ log.info("Received probe {} {}", getSelf(), message);
+ probe = (ConditionalProbe) message;
+ } else if (message instanceof UpdateBucket) {
+ receiveUpdateBucket(((UpdateBucket) message).getBucket());
+ } else if (message instanceof GetAllBuckets) {
+ receiveGetAllBucket();
+ } else if (message instanceof GetLocalBucket) {
receiveGetLocalBucket();
-
- else if (message instanceof GetBucketsByMembers)
- receiveGetBucketsByMembers(((GetBucketsByMembers) message).getMembers());
-
- else if (message instanceof GetBucketVersions)
+ } else if (message instanceof GetBucketsByMembers) {
+ receiveGetBucketsByMembers(
+ ((GetBucketsByMembers) message).getMembers());
+ } else if (message instanceof GetBucketVersions) {
receiveGetBucketVersions();
-
- else if (message instanceof UpdateRemoteBuckets)
- receiveUpdateRemoteBuckets(((UpdateRemoteBuckets) message).getBuckets());
-
- else {
+ } else if (message instanceof UpdateRemoteBuckets) {
+ receiveUpdateRemoteBuckets(
+ ((UpdateRemoteBuckets) message).getBuckets());
+ } else {
log.debug("Unhandled message [{}]", message);
unhandled(message);
}
Address getSelfAddress() {
return selfAddress;
}
+
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.utils;
+
+import akka.actor.ActorRef;
+import com.google.common.base.Predicate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ConditionalProbe {
+ private final ActorRef actorRef;
+ private final Predicate predicate;
+ Logger log = LoggerFactory.getLogger(ConditionalProbe.class);
+
+ public ConditionalProbe(ActorRef actorRef, Predicate predicate) {
+ this.actorRef = actorRef;
+ this.predicate = predicate;
+ }
+
+ public void tell(Object message, ActorRef sender){
+ if(predicate.apply(message)) {
+ log.info("sending message to probe {}", message);
+ actorRef.tell(message, sender);
+ }
+ }
+}
package org.opendaylight.controller.remote.rpc.registry;
+
import akka.actor.ActorPath;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.ChildActorPath;
import akka.actor.Props;
-import akka.japi.Pair;
import akka.testkit.JavaTestKit;
+import com.google.common.base.Predicate;
import com.typesafe.config.ConfigFactory;
+
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.remote.rpc.RouteIdentifierImpl;
+import org.opendaylight.controller.remote.rpc.registry.gossip.Messages;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
+import org.opendaylight.controller.utils.ConditionalProbe;
import org.opendaylight.yangtools.yang.common.QName;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
+import javax.annotation.Nullable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
+import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter;
import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.AddOrUpdateRoutes;
import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.RemoveRoutes;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRouters;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRoutersReply;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter;
public class RpcRegistryTest {
- private static ActorSystem node1;
- private static ActorSystem node2;
- private static ActorSystem node3;
-
- private ActorRef registry1;
- private ActorRef registry2;
- private ActorRef registry3;
-
- @BeforeClass
- public static void setup() throws InterruptedException {
- Thread.sleep(1000); //give some time for previous test to close netty ports
- node1 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberA"));
- node2 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberB"));
- node3 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberC"));
- }
-
- @AfterClass
- public static void teardown(){
- JavaTestKit.shutdownActorSystem(node1);
- JavaTestKit.shutdownActorSystem(node2);
- JavaTestKit.shutdownActorSystem(node3);
- if (node1 != null)
- node1.shutdown();
- if (node2 != null)
- node2.shutdown();
- if (node3 != null)
- node3.shutdown();
-
- }
-
- @Before
- public void createRpcRegistry() throws InterruptedException {
- registry1 = node1.actorOf(Props.create(RpcRegistry.class));
- registry2 = node2.actorOf(Props.create(RpcRegistry.class));
- registry3 = node3.actorOf(Props.create(RpcRegistry.class));
- }
-
- @After
- public void stopRpcRegistry() throws InterruptedException {
- if (registry1 != null)
- node1.stop(registry1);
- if (registry2 != null)
- node2.stop(registry2);
- if (registry3 != null)
- node3.stop(registry3);
- }
+ private static ActorSystem node1;
+ private static ActorSystem node2;
+ private static ActorSystem node3;
+
+ private ActorRef registry1;
+ private ActorRef registry2;
+ private ActorRef registry3;
+
+ @BeforeClass
+ public static void setup() throws InterruptedException {
+ node1 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberA"));
+ node2 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberB"));
+ node3 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberC"));
+ }
+
+ @AfterClass
+ public static void teardown() {
+ JavaTestKit.shutdownActorSystem(node1);
+ JavaTestKit.shutdownActorSystem(node2);
+ JavaTestKit.shutdownActorSystem(node3);
+ if (node1 != null)
+ node1.shutdown();
+ if (node2 != null)
+ node2.shutdown();
+ if (node3 != null)
+ node3.shutdown();
+
+ }
+
+ @Before
+ public void createRpcRegistry() throws InterruptedException {
+ registry1 = node1.actorOf(Props.create(RpcRegistry.class));
+ registry2 = node2.actorOf(Props.create(RpcRegistry.class));
+ registry3 = node3.actorOf(Props.create(RpcRegistry.class));
+ }
+
+ @After
+ public void stopRpcRegistry() throws InterruptedException {
+ if (registry1 != null)
+ node1.stop(registry1);
+ if (registry2 != null)
+ node2.stop(registry2);
+ if (registry3 != null)
+ node3.stop(registry3);
+ }
+
+ /**
+ * One node cluster.
+ * 1. Register rpc, ensure router can be found
+ * 2. Then remove rpc, ensure its deleted
+ *
+ * @throws URISyntaxException
+ * @throws InterruptedException
+ */
+ @Test
+ public void testAddRemoveRpcOnSameNode() throws URISyntaxException, InterruptedException {
+ validateSystemStartup();
+
+ final JavaTestKit mockBroker = new JavaTestKit(node1);
+
+ final ActorPath bucketStorePath = new ChildActorPath(registry1.path(), "store");
+
+ //install probe
+ final JavaTestKit probe1 = createProbeForMessage(
+ node1, bucketStorePath, Messages.BucketStoreMessages.UpdateBucket.class);
+
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker.getRef()), mockBroker.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker.getRef());
+
+ //Bucket store should get an update bucket message. Updated bucket contains added rpc.
+ probe1.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateBucket.class);
+
+ //Now remove rpc
+ registry1.tell(getRemoveRouteMessage(), mockBroker.getRef());
+
+ //Bucket store should get an update bucket message. Rpc is removed in the updated bucket
+ probe1.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateBucket.class);
+
+
+ }
+
+
+ /**
+ * Three node cluster.
+ * 1. Register rpc on 1 node, ensure 2nd node gets updated
+ * 2. Remove rpc on 1 node, ensure 2nd node gets updated
+ *
+ * @throws URISyntaxException
+ * @throws InterruptedException
+ */
+ @Test
+ public void testRpcAddRemoveInCluster() throws URISyntaxException, InterruptedException {
- /**
- * One node cluster.
- * 1. Register rpc, ensure router can be found
- * 2. Then remove rpc, ensure its deleted
- *
- * @throws URISyntaxException
- * @throws InterruptedException
- */
- @Test
- public void testAddRemoveRpcOnSameNode() throws URISyntaxException, InterruptedException {
-
- final JavaTestKit mockBroker = new JavaTestKit(node1);
-
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker.getRef()), mockBroker.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker.getRef());
-
- Thread.sleep(1000);//
-
- //find the route on node 1's registry
- registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef());
- FindRoutersReply message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- List<Pair<ActorRef, Long>> pairs = message.getRouterWithUpdateTime();
-
- validateRouterReceived(pairs, mockBroker.getRef());
-
- //Now remove rpc
- registry1.tell(getRemoveRouteMessage(), mockBroker.getRef());
- Thread.sleep(1000);
- //find the route on node 1's registry
- registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef());
- message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- pairs = message.getRouterWithUpdateTime();
-
- Assert.assertTrue(pairs.isEmpty());
- }
+ validateSystemStartup();
+
+ final JavaTestKit mockBroker1 = new JavaTestKit(node1);
+
+ //install probe on node2's bucket store
+ final ActorPath bucketStorePath = new ChildActorPath(registry2.path(), "store");
+ final JavaTestKit probe2 = createProbeForMessage(
+ node2, bucketStorePath, Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- /**
- * Three node cluster.
- * 1. Register rpc on 1 node, ensure its router can be found on other 2.
- * 2. Remove rpc on 1 node, ensure its removed on other 2.
- *
- * @throws URISyntaxException
- * @throws InterruptedException
- */
- @Test
- public void testRpcAddRemoveInCluster() throws URISyntaxException, InterruptedException {
- validateSystemStartup();
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- final JavaTestKit mockBroker1 = new JavaTestKit(node1);
- final JavaTestKit mockBroker2 = new JavaTestKit(node2);
- final JavaTestKit mockBroker3 = new JavaTestKit(node3);
+ //Bucket store on node2 should get a message to update its local copy of remote buckets
+ probe2.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
+ //Now remove
+ registry1.tell(getRemoveRouteMessage(), mockBroker1.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ //Bucket store on node2 should get a message to update its local copy of remote buckets
+ probe2.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //find the route in node 2's registry
- List<Pair<ActorRef, Long>> pairs = findRouters(registry2, mockBroker2);
- validateRouterReceived(pairs, mockBroker1.getRef());
+ }
- //find the route in node 3's registry
- pairs = findRouters(registry3, mockBroker3);
- validateRouterReceived(pairs, mockBroker1.getRef());
+ /**
+ * Three node cluster.
+ * Register rpc on 2 nodes. Ensure 3rd gets updated.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testRpcAddedOnMultiNodes() throws Exception {
- //Now remove
- registry1.tell(getRemoveRouteMessage(), mockBroker1.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ validateSystemStartup();
- pairs = findRouters(registry2, mockBroker2);
- Assert.assertTrue(pairs.isEmpty());
+ final JavaTestKit mockBroker1 = new JavaTestKit(node1);
+ final JavaTestKit mockBroker2 = new JavaTestKit(node2);
+ final JavaTestKit mockBroker3 = new JavaTestKit(node3);
- pairs = findRouters(registry3, mockBroker3);
- Assert.assertTrue(pairs.isEmpty());
- }
+ registry3.tell(new SetLocalRouter(mockBroker3.getRef()), mockBroker3.getRef());
- /**
- * Three node cluster.
- * Register rpc on 2 nodes. Ensure 2 routers are found on 3rd.
- *
- * @throws Exception
- */
- @Test
- public void testAnRpcAddedOnMultiNodesShouldReturnMultiRouter() throws Exception {
+ //install probe on node 3
+ final ActorPath bucketStorePath = new ChildActorPath(registry3.path(), "store");
+ final JavaTestKit probe3 = createProbeForMessage(
+ node3, bucketStorePath, Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- validateSystemStartup();
- final JavaTestKit mockBroker1 = new JavaTestKit(node1);
- final JavaTestKit mockBroker2 = new JavaTestKit(node2);
- final JavaTestKit mockBroker3 = new JavaTestKit(node3);
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- //Thread.sleep(5000);//let system come up
+ probe3.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- //Add same rpc on node 2
- registry2.tell(new SetLocalRouter(mockBroker2.getRef()), mockBroker2.getRef());
- registry2.tell(getAddRouteMessage(), mockBroker2.getRef());
+ //Add same rpc on node 2
+ registry2.tell(new SetLocalRouter(mockBroker2.getRef()), mockBroker2.getRef());
+ registry2.tell(getAddRouteMessage(), mockBroker2.getRef());
- registry3.tell(new SetLocalRouter(mockBroker3.getRef()), mockBroker3.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ probe3.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
+ }
- //find the route in node 3's registry
- registry3.tell(new FindRouters(createRouteId()), mockBroker3.getRef());
- FindRoutersReply message = mockBroker3.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- List<Pair<ActorRef, Long>> pairs = message.getRouterWithUpdateTime();
+ private JavaTestKit createProbeForMessage(ActorSystem node, ActorPath subjectPath, final Class clazz) {
+ final JavaTestKit probe = new JavaTestKit(node);
- validateMultiRouterReceived(pairs, mockBroker1.getRef(), mockBroker2.getRef());
+ ConditionalProbe conditionalProbe =
+ new ConditionalProbe(probe.getRef(), new Predicate() {
+ @Override
+ public boolean apply(@Nullable Object input) {
+ return clazz.equals(input.getClass());
+ }
+ });
- }
+ ActorSelection subject = node.actorSelection(subjectPath);
+ subject.tell(conditionalProbe, ActorRef.noSender());
- private List<Pair<ActorRef, Long>> findRouters(ActorRef registry, JavaTestKit receivingActor) throws URISyntaxException {
- registry.tell(new FindRouters(createRouteId()), receivingActor.getRef());
- FindRoutersReply message = receivingActor.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- return message.getRouterWithUpdateTime();
- }
+ return probe;
- private void validateMultiRouterReceived(List<Pair<ActorRef, Long>> actual, ActorRef... expected) {
- Assert.assertTrue(actual != null);
- Assert.assertTrue(actual.size() == expected.length);
- }
+ }
- private void validateRouterReceived(List<Pair<ActorRef, Long>> actual, ActorRef expected){
- Assert.assertTrue(actual != null);
- Assert.assertTrue(actual.size() == 1);
+ private void validateSystemStartup() throws InterruptedException {
- for (Pair<ActorRef, Long> pair : actual){
- Assert.assertTrue(expected.path().uid() == pair.first().path().uid());
- }
- }
+ ActorPath gossiper1Path = new ChildActorPath(new ChildActorPath(registry1.path(), "store"), "gossiper");
+ ActorPath gossiper2Path = new ChildActorPath(new ChildActorPath(registry2.path(), "store"), "gossiper");
+ ActorPath gossiper3Path = new ChildActorPath(new ChildActorPath(registry3.path(), "store"), "gossiper");
- private void validateSystemStartup() throws InterruptedException {
+ ActorSelection gossiper1 = node1.actorSelection(gossiper1Path);
+ ActorSelection gossiper2 = node2.actorSelection(gossiper2Path);
+ ActorSelection gossiper3 = node3.actorSelection(gossiper3Path);
- Thread.sleep(5000);
- ActorPath gossiper1Path = new ChildActorPath(new ChildActorPath(registry1.path(), "store"), "gossiper");
- ActorPath gossiper2Path = new ChildActorPath(new ChildActorPath(registry2.path(), "store"), "gossiper");
- ActorPath gossiper3Path = new ChildActorPath(new ChildActorPath(registry3.path(), "store"), "gossiper");
- ActorSelection gossiper1 = node1.actorSelection(gossiper1Path);
- ActorSelection gossiper2 = node2.actorSelection(gossiper2Path);
- ActorSelection gossiper3 = node3.actorSelection(gossiper3Path);
+ if (!resolveReference(gossiper1, gossiper2, gossiper3))
+ Assert.fail("Could not find gossipers");
+ }
+ private Boolean resolveReference(ActorSelection... gossipers) {
- if (!resolveReference(gossiper1, gossiper2, gossiper3))
- Assert.fail("Could not find gossipers");
- }
+ Boolean resolved = true;
+ for (int i = 0; i < 5; i++) {
- private Boolean resolveReference(ActorSelection... gossipers) throws InterruptedException {
+ resolved = true;
+ System.out.println(System.currentTimeMillis() + " Resolving gossipers; trial #" + i);
- Boolean resolved = true;
+ for (ActorSelection gossiper : gossipers) {
+ ActorRef ref = null;
- for (int i=0; i< 5; i++) {
- Thread.sleep(1000);
- for (ActorSelection gossiper : gossipers) {
- Future<ActorRef> future = gossiper.resolveOne(new FiniteDuration(5000, TimeUnit.MILLISECONDS));
+ try {
+ Future<ActorRef> future = gossiper.resolveOne(new FiniteDuration(15000, TimeUnit.MILLISECONDS));
+ ref = Await.result(future, new FiniteDuration(10000, TimeUnit.MILLISECONDS));
+ } catch (Exception e) {
+ System.out.println("Could not find gossiper in attempt#" + i + ". Got exception " + e.getMessage());
+ }
- ActorRef ref = null;
- try {
- ref = Await.result(future, new FiniteDuration(10000, TimeUnit.MILLISECONDS));
- } catch (Exception e) {
- e.printStackTrace();
- }
+ if (ref == null)
+ resolved = false;
+ }
- if (ref == null)
- resolved = false;
- }
+ if (resolved) break;
- if (resolved) break;
- }
- return resolved;
}
+ return resolved;
+ }
- private AddOrUpdateRoutes getAddRouteMessage() throws URISyntaxException {
- return new AddOrUpdateRoutes(createRouteIds());
- }
+ private AddOrUpdateRoutes getAddRouteMessage() throws URISyntaxException {
+ return new AddOrUpdateRoutes(createRouteIds());
+ }
- private RemoveRoutes getRemoveRouteMessage() throws URISyntaxException {
- return new RemoveRoutes(createRouteIds());
- }
+ private RemoveRoutes getRemoveRouteMessage() throws URISyntaxException {
+ return new RemoveRoutes(createRouteIds());
+ }
- private List<RpcRouter.RouteIdentifier<?,?,?>> createRouteIds() throws URISyntaxException {
- QName type = new QName(new URI("/mockrpc"), "mockrpc");
- List<RpcRouter.RouteIdentifier<?,?,?>> routeIds = new ArrayList<>();
- routeIds.add(new RouteIdentifierImpl(null, type, null));
- return routeIds;
- }
+ private List<RpcRouter.RouteIdentifier<?, ?, ?>> createRouteIds() throws URISyntaxException {
+ QName type = new QName(new URI("/mockrpc"), "mockrpc");
+ List<RpcRouter.RouteIdentifier<?, ?, ?>> routeIds = new ArrayList<>();
+ routeIds.add(new RouteIdentifierImpl(null, type, null));
+ return routeIds;
+ }
- private RpcRouter.RouteIdentifier<?,?,?> createRouteId() throws URISyntaxException {
- QName type = new QName(new URI("/mockrpc"), "mockrpc");
- return new RouteIdentifierImpl(null, type, null);
- }
-}
\ No newline at end of file
+}
odl-cluster{
akka {
- loglevel = "INFO"
+ loglevel = "DEBUG"
#log-config-on-start = on
actor {
loggers = ["akka.event.slf4j.Slf4jLogger"]
actor {
provider = "akka.cluster.ClusterActorRefProvider"
+ debug {
+ #lifecycle = on
+ }
}
remote {
log-received-messages = off
--- /dev/null
+<configuration scan="true">
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
+ </pattern>
+ </encoder>
+ </appender>
+
+ <root level="debug">
+ <appender-ref ref="STDOUT" />
+ </root>
+</configuration>
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
-import com.google.common.util.concurrent.ListenableFuture;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType.CONFIGURATION;
private NormalizedNode<?, ?> readDataViaTransaction(final DOMDataReadTransaction transaction,
LogicalDatastoreType datastore, YangInstanceIdentifier path) {
LOG.trace("Read " + datastore.name() + " via Restconf: {}", path);
- final ListenableFuture<Optional<NormalizedNode<?, ?>>> listenableFuture = transaction.read(datastore, path);
- if (listenableFuture != null) {
- Optional<NormalizedNode<?, ?>> optional;
- try {
- LOG.debug("Reading result data from transaction.");
- optional = listenableFuture.get();
- } catch (InterruptedException | ExecutionException e) {
- throw new RestconfDocumentedException("Problem to get data from transaction.", e.getCause());
+ final CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> listenableFuture =
+ transaction.read(datastore, path);
- }
- if (optional != null) {
- if (optional.isPresent()) {
- return optional.get();
- }
- }
+ try {
+ Optional<NormalizedNode<?, ?>> optional = listenableFuture.checkedGet();
+ return optional.isPresent() ? optional.get() : null;
+ } catch(ReadFailedException e) {
+ throw new RestconfDocumentedException(e.getMessage(), e, e.getErrorList());
}
- return null;
}
private CheckedFuture<Void, TransactionCommitFailedException> postDataViaTransaction(
final DOMDataReadWriteTransaction rWTransaction, final LogicalDatastoreType datastore,
final YangInstanceIdentifier path, final NormalizedNode<?, ?> payload, DataNormalizationOperation<?> root) {
- ListenableFuture<Optional<NormalizedNode<?, ?>>> futureDatastoreData = rWTransaction.read(datastore, path);
+ CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> futureDatastoreData =
+ rWTransaction.read(datastore, path);
try {
- final Optional<NormalizedNode<?, ?>> optionalDatastoreData = futureDatastoreData.get();
+ final Optional<NormalizedNode<?, ?>> optionalDatastoreData = futureDatastoreData.checkedGet();
if (optionalDatastoreData.isPresent() && payload.equals(optionalDatastoreData.get())) {
- String errMsg = "Post Configuration via Restconf was not executed because data already exists";
- LOG.trace(errMsg + ":{}", path);
+ LOG.trace("Post Configuration via Restconf was not executed because data already exists :{}", path);
throw new RestconfDocumentedException("Data already exists for path: " + path, ErrorType.PROTOCOL,
ErrorTag.DATA_EXISTS);
}
- } catch (InterruptedException | ExecutionException e) {
- LOG.trace("It wasn't possible to get data loaded from datastore at path " + path);
+ } catch(ReadFailedException e) {
+ LOG.warn("Error reading from datastore with path: " + path, e);
}
ensureParentsByMerge(datastore, path, rWTransaction, root);
try {
currentOp = currentOp.getChild(currentArg);
} catch (DataNormalizationException e) {
- throw new IllegalArgumentException(
- String.format("Invalid child encountered in path %s", normalizedPath), e);
+ throw new RestconfDocumentedException(
+ String.format("Error normalizing data for path %s", normalizedPath), e);
}
currentArguments.add(currentArg);
YangInstanceIdentifier currentPath = YangInstanceIdentifier.create(currentArguments);
- final Boolean exists;
-
try {
- CheckedFuture<Boolean, ReadFailedException> future =
- rwTx.exists(store, currentPath);
- exists = future.checkedGet();
+ boolean exists = rwTx.exists(store, currentPath).checkedGet();
+ if (!exists && iterator.hasNext()) {
+ rwTx.merge(store, currentPath, currentOp.createDefault(currentArg));
+ }
} catch (ReadFailedException e) {
LOG.error("Failed to read pre-existing data from store {} path {}", store, currentPath, e);
- throw new IllegalStateException("Failed to read pre-existing data", e);
- }
-
-
- if (!exists && iterator.hasNext()) {
- rwTx.merge(store, currentPath, currentOp.createDefault(currentArg));
+ throw new RestconfDocumentedException("Failed to read pre-existing data", e);
}
}
}
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+
+import java.util.Collection;
import java.util.List;
+
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response.Status;
+
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorTag;
import org.opendaylight.controller.sal.restconf.impl.RestconfError.ErrorType;
+import org.opendaylight.yangtools.yang.common.RpcError;
/**
* Unchecked exception to communicate error information, as defined in the ietf restcong draft, to be sent to the
}
/**
- * Constructs an instance with an error message and exception cause. The stack trace of the exception is included in
- * the error info.
+ * Constructs an instance with an error message and exception cause.
+ * The stack trace of the exception is included in the error info.
*
* @param message
* A string which provides a plain text string describing the error.
/**
* Constructs an instance with the given errors.
*/
- public RestconfDocumentedException(List<RestconfError> errors) {
- this.errors = ImmutableList.copyOf(errors);
- Preconditions.checkArgument(!this.errors.isEmpty(), "RestconfError list can't be empty");
+ public RestconfDocumentedException(String message, Throwable cause, List<RestconfError> errors) {
+ super(message, cause);
+ if(!errors.isEmpty()) {
+ this.errors = ImmutableList.copyOf(errors);
+ } else {
+ this.errors = ImmutableList.of(new RestconfError(RestconfError.ErrorType.APPLICATION,
+ RestconfError.ErrorTag.OPERATION_FAILED, message));
+ }
+
status = null;
}
+ /**
+ * Constructs an instance with the given RpcErrors.
+ */
+ public RestconfDocumentedException(String message, Throwable cause, Collection<RpcError> rpcErrors) {
+ this(message, cause, convertToRestconfErrors(rpcErrors));
+ }
+
/**
* Constructs an instance with an HTTP status and no error information.
*
status = null;
}
+ private static List<RestconfError> convertToRestconfErrors(Collection<RpcError> rpcErrors) {
+ List<RestconfError> errorList = Lists.newArrayList();
+ if(rpcErrors != null) {
+ for (RpcError rpcError : rpcErrors) {
+ errorList.add(new RestconfError(rpcError));
+ }
+ }
+
+ return errorList;
+ }
+
+
public List<RestconfError> getErrors() {
return errors;
}
RESOURCE_DENIED("resource-denied", 409 /* Conflict */),
ROLLBACK_FAILED("rollback-failed", 500 /* INTERNAL_SERVER_ERROR */),
DATA_EXISTS("data-exists", 409 /* Conflict */),
- DATA_MISSING("data-missing", 409 /* Conflict */),
+ DATA_MISSING("data-missing", 404 /* Resource Not Found */),
OPERATION_NOT_SUPPORTED("operation-not-supported", 501 /* Not Implemented */),
OPERATION_FAILED("operation-failed", 500 /* INTERNAL_SERVER_ERROR */),
PARTIAL_OPERATION("partial-operation", 500 /* INTERNAL_SERVER_ERROR */),
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import org.apache.commons.lang3.StringUtils;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
+import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizer;
import org.opendaylight.controller.md.sal.dom.api.DOMMountPoint;
import org.opendaylight.controller.sal.rest.api.Draft02;
import org.opendaylight.yangtools.concepts.Codec;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.QNameModule;
-import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.MutableCompositeNode;
private void checkRpcSuccessAndThrowException(final RpcResult<CompositeNode> rpcResult) {
if (rpcResult.isSuccessful() == false) {
- Collection<RpcError> rpcErrors = rpcResult.getErrors();
- if (rpcErrors == null || rpcErrors.isEmpty()) {
- throw new RestconfDocumentedException(
- "The operation was not successful and there were no RPC errors returned", ErrorType.RPC,
- ErrorTag.OPERATION_FAILED);
- }
-
- List<RestconfError> errorList = Lists.newArrayList();
- for (RpcError rpcError : rpcErrors) {
- errorList.add(new RestconfError(rpcError));
- }
-
- throw new RestconfDocumentedException(errorList);
+ throw new RestconfDocumentedException("The operation was not successful", null,
+ rpcResult.getErrors());
}
}
iiWithData.getSchemaNode());
YangInstanceIdentifier normalizedII;
+ if (mountPoint != null) {
+ normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(
+ iiWithData.getInstanceIdentifier());
+ } else {
+ normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier());
+ }
- try {
- if (mountPoint != null) {
- normalizedII = new DataNormalizer(mountPoint.getSchemaContext()).toNormalized(iiWithData
- .getInstanceIdentifier());
- broker.commitConfigurationDataPut(mountPoint, normalizedII, datastoreNormalizedNode).get();
- } else {
- normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier());
- broker.commitConfigurationDataPut(normalizedII, datastoreNormalizedNode).get();
+ /*
+ * There is a small window where another write transaction could be updating the same data
+ * simultaneously and we get an OptimisticLockFailedException. This error is likely
+ * transient and The WriteTransaction#submit API docs state that a retry will likely
+ * succeed. So we'll try again if that scenario occurs. If it fails a third time then it
+ * probably will never succeed so we'll fail in that case.
+ *
+ * By retrying we're attempting to hide the internal implementation of the data store and
+ * how it handles concurrent updates from the restconf client. The client has instructed us
+ * to put the data and we should make every effort to do so without pushing optimistic lock
+ * failures back to the client and forcing them to handle it via retry (and having to
+ * document the behavior).
+ */
+ int tries = 2;
+ while(true) {
+ try {
+ if (mountPoint != null) {
+ broker.commitConfigurationDataPut(mountPoint, normalizedII,
+ datastoreNormalizedNode).checkedGet();
+ } else {
+ broker.commitConfigurationDataPut(normalizedII,
+ datastoreNormalizedNode).checkedGet();
+ }
+
+ break;
+ } catch (TransactionCommitFailedException e) {
+ if(e instanceof OptimisticLockFailedException) {
+ if(--tries <= 0) {
+ LOG.debug("Got OptimisticLockFailedException on last try - failing");
+ throw new RestconfDocumentedException(e.getMessage(), e, e.getErrorList());
+ }
+
+ LOG.debug("Got OptimisticLockFailedException - trying again");
+ } else {
+ throw new RestconfDocumentedException(e.getMessage(), e, e.getErrorList());
+ }
}
- } catch (Exception e) {
- throw new RestconfDocumentedException("Error updating data", e);
}
return Response.status(Status.OK).build();
normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier());
broker.commitConfigurationDataPost(normalizedII, datastoreNormalizedData);
}
+ } catch(RestconfDocumentedException e) {
+ throw e;
} catch (Exception e) {
throw new RestconfDocumentedException("Error creating data", e);
}
normalizedII = controllerContext.toNormalized(iiWithData.getInstanceIdentifier());
broker.commitConfigurationDataPost(normalizedII, datastoreNormalizedData);
}
+ } catch(RestconfDocumentedException e) {
+ throw e;
} catch (Exception e) {
throw new RestconfDocumentedException("Error creating data", e);
}
private CompositeNode normalizeNode(final Node<?> node, final DataSchemaNode schema, final DOMMountPoint mountPoint) {
if (schema == null) {
- QName nodeType = node == null ? null : node.getNodeType();
- String localName = nodeType == null ? null : nodeType.getLocalName();
+ String localName = node == null ? null :
+ node instanceof NodeWrapper ? ((NodeWrapper<?>)node).getLocalName() :
+ node.getNodeType().getLocalName();
throw new RestconfDocumentedException("Data schema node was not found for " + localName,
ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE);
restconfImpl.invokeRpc("toaster:cancel-toast", "", uriInfo);
fail("Expected an exception to be thrown.");
} catch (RestconfDocumentedException e) {
- verifyRestconfDocumentedException(e, 0, ErrorType.RPC, ErrorTag.OPERATION_FAILED,
+ verifyRestconfDocumentedException(e, 0, ErrorType.APPLICATION, ErrorTag.OPERATION_FAILED,
Optional.<String> absent(), Optional.<String> absent());
}
}
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
+
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URISyntaxException;
+
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
+
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.opendaylight.controller.md.sal.common.api.data.OptimisticLockFailedException;
import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException;
import org.opendaylight.controller.md.sal.dom.api.DOMMountPoint;
import org.opendaylight.controller.md.sal.dom.api.DOMMountPointService;
assertEquals(200, put(uri, MediaType.APPLICATION_XML, xmlData3));
}
+ @Test
+ public void putWithOptimisticLockFailedException() throws UnsupportedEncodingException {
+
+ String uri = "/config/ietf-interfaces:interfaces/interface/eth0";
+
+ doThrow(OptimisticLockFailedException.class).
+ when(brokerFacade).commitConfigurationDataPut(
+ any(YangInstanceIdentifier.class), any(NormalizedNode.class));
+
+ assertEquals(500, put(uri, MediaType.APPLICATION_XML, xmlData));
+
+ doThrow(OptimisticLockFailedException.class).doReturn(mock(CheckedFuture.class)).
+ when(brokerFacade).commitConfigurationDataPut(
+ any(YangInstanceIdentifier.class), any(NormalizedNode.class));
+
+ assertEquals(200, put(uri, MediaType.APPLICATION_XML, xmlData));
+ }
+
+ @Test
+ public void putWithTransactionCommitFailedException() throws UnsupportedEncodingException {
+
+ String uri = "/config/ietf-interfaces:interfaces/interface/eth0";
+
+ doThrow(TransactionCommitFailedException.class).
+ when(brokerFacade).commitConfigurationDataPut(
+ any(YangInstanceIdentifier.class), any(NormalizedNode.class));
+
+ assertEquals(500, put(uri, MediaType.APPLICATION_XML, xmlData));
+ }
+
private int put(String uri, String mediaType, String data) throws UnsupportedEncodingException {
return target(uri).request(mediaType).put(Entity.entity(data, mediaType)).getStatus();
}
public void testToJsonResponseWithDataMissingErrorTag() throws Exception {
testJsonResponse(new RestconfDocumentedException("mock error", ErrorType.PROTOCOL, ErrorTag.DATA_MISSING),
- Status.CONFLICT, ErrorType.PROTOCOL, ErrorTag.DATA_MISSING, "mock error", null, null);
+ Status.NOT_FOUND, ErrorType.PROTOCOL, ErrorTag.DATA_MISSING, "mock error", null, null);
}
@Test
List<RestconfError> errorList = Arrays.asList(new RestconfError(ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
"mock error1"), new RestconfError(ErrorType.RPC, ErrorTag.ROLLBACK_FAILED, "mock error2"));
- stageMockEx(new RestconfDocumentedException(errorList));
+ stageMockEx(new RestconfDocumentedException("mock", null, errorList));
Response resp = target("/operational/foo").request(MediaType.APPLICATION_JSON).get();
public void testToXMLResponseWithDataMissingErrorTag() throws Exception {
testXMLResponse(new RestconfDocumentedException("mock error", ErrorType.PROTOCOL, ErrorTag.DATA_MISSING),
- Status.CONFLICT, ErrorType.PROTOCOL, ErrorTag.DATA_MISSING, "mock error", null, null);
+ Status.NOT_FOUND, ErrorType.PROTOCOL, ErrorTag.DATA_MISSING, "mock error", null, null);
}
@Test
List<RestconfError> errorList = Arrays.asList(new RestconfError(ErrorType.APPLICATION, ErrorTag.LOCK_DENIED,
"mock error1"), new RestconfError(ErrorType.RPC, ErrorTag.ROLLBACK_FAILED, "mock error2"));
- stageMockEx(new RestconfDocumentedException(errorList));
+ stageMockEx(new RestconfDocumentedException("mock", null, errorList));
Response resp = target("/operational/foo").request(MediaType.APPLICATION_XML).get();
lookUpMap.put("resource-denied", 409);
lookUpMap.put("rollback-failed", 500);
lookUpMap.put("data-exists", 409);
- lookUpMap.put("data-missing", 409);
+ lookUpMap.put("data-missing", 404);
lookUpMap.put("operation-not-supported", 501);
lookUpMap.put("operation-failed", 500);
lookUpMap.put("partial-operation", 500);
protected static final String API_VERSION = "1.0.0";
protected static final String SWAGGER_VERSION = "1.2";
protected static final String RESTCONF_CONTEXT_ROOT = "restconf";
+
+ static final String MODULE_NAME_SUFFIX = "_module";
protected final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
private final ModelGenerator jsonConverter = new ModelGenerator();
List<Parameter> pathParams = new ArrayList<Parameter>();
String resourcePath = getDataStorePath("/config/", context);
+ addRootPostLink(m, (DataNodeContainer) node, pathParams, resourcePath, apis);
addApis(node, apis, resourcePath, pathParams, schemaContext, true);
pathParams = new ArrayList<Parameter>();
return null;
}
+ private void addRootPostLink(final Module m, final DataNodeContainer node, final List<Parameter> pathParams,
+ final String resourcePath, final List<Api> apis) {
+ if (containsListOrContainer(m.getChildNodes())) {
+ final Api apiForRootPostUri = new Api();
+ apiForRootPostUri.setPath(resourcePath);
+ apiForRootPostUri.setOperations(operationPost(m.getName()+MODULE_NAME_SUFFIX, m.getDescription(), m, pathParams, true));
+ apis.add(apiForRootPostUri);
+ }
+ }
+
protected ApiDeclaration createApiDeclaration(String basePath) {
ApiDeclaration doc = new ApiDeclaration();
doc.setApiVersion(API_VERSION);
String resourcePath = parentPath + createPath(node, pathParams, schemaContext) + "/";
_logger.debug("Adding path: [{}]", resourcePath);
api.setPath(resourcePath);
- api.setOperations(operations(node, pathParams, addConfigApi));
- apis.add(api);
+
+ Iterable<DataSchemaNode> childSchemaNodes = Collections.<DataSchemaNode> emptySet();
if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- DataNodeContainer schemaNode = (DataNodeContainer) node;
-
- for (DataSchemaNode childNode : schemaNode.getChildNodes()) {
- // We don't support going to leaf nodes today. Only lists and
- // containers.
- if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
- // keep config and operation attributes separate.
- if (childNode.isConfiguration() == addConfigApi) {
- addApis(childNode, apis, resourcePath, pathParams, schemaContext, addConfigApi);
- }
+ DataNodeContainer dataNodeContainer = (DataNodeContainer) node;
+ childSchemaNodes = dataNodeContainer.getChildNodes();
+ }
+ api.setOperations(operation(node, pathParams, addConfigApi, childSchemaNodes));
+ apis.add(api);
+
+ for (DataSchemaNode childNode : childSchemaNodes) {
+ if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
+ // keep config and operation attributes separate.
+ if (childNode.isConfiguration() == addConfigApi) {
+ addApis(childNode, apis, resourcePath, pathParams, schemaContext, addConfigApi);
}
}
}
}
+ private boolean containsListOrContainer(final Iterable<DataSchemaNode> nodes) {
+ for (DataSchemaNode child : nodes) {
+ if (child instanceof ListSchemaNode || child instanceof ContainerSchemaNode) {
+ return true;
+ }
+ }
+ return false;
+ }
+
/**
* @param node
* @param pathParams
* @return
*/
- private List<Operation> operations(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig) {
+ private List<Operation> operation(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig, Iterable<DataSchemaNode> childSchemaNodes) {
List<Operation> operations = new ArrayList<>();
OperationBuilder.Get getBuilder = new OperationBuilder.Get(node, isConfig);
operations.add(getBuilder.pathParams(pathParams).build());
if (isConfig) {
- OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
- operations.add(postBuilder.pathParams(pathParams).build());
-
- OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
+ OperationBuilder.Put putBuilder = new OperationBuilder.Put(node.getQName().getLocalName(),
+ node.getDescription());
operations.add(putBuilder.pathParams(pathParams).build());
OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
operations.add(deleteBuilder.pathParams(pathParams).build());
+
+ if (containsListOrContainer(childSchemaNodes)) {
+ operations.addAll(operationPost(node.getQName().getLocalName(), node.getDescription(), (DataNodeContainer) node,
+ pathParams, isConfig));
+ }
+ }
+ return operations;
+ }
+
+ /**
+ * @param node
+ * @param pathParams
+ * @return
+ */
+ private List<Operation> operationPost(final String name, final String description, final DataNodeContainer dataNodeContainer, List<Parameter> pathParams, boolean isConfig) {
+ List<Operation> operations = new ArrayList<>();
+ if (isConfig) {
+ OperationBuilder.Post postBuilder = new OperationBuilder.Post(name, description, dataNodeContainer);
+ operations.add(postBuilder.pathParams(pathParams).build());
}
return operations;
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
+import static org.opendaylight.controller.sal.rest.doc.impl.BaseYangSwaggerGenerator.MODULE_NAME_SUFFIX;
+import static org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder.Post.METHOD_NAME;
import static org.opendaylight.controller.sal.rest.doc.util.RestDocgenUtil.resolveNodesName;
+import com.google.common.base.Preconditions;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import org.opendaylight.yangtools.yang.model.api.ChoiceNode;
import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.IdentitySchemaNode;
import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
public JSONObject convertToJsonSchema(Module module, SchemaContext schemaContext) throws IOException, JSONException {
JSONObject models = new JSONObject();
topLevelModule = module;
- processContainers(module, models, schemaContext);
+ processModules(module, models);
+ processContainersAndLists(module, models, schemaContext);
processRPCs(module, models, schemaContext);
processIdentities(module, models);
return models;
}
- private void processContainers(Module module, JSONObject models, SchemaContext schemaContext) throws IOException,
- JSONException {
+ private void processModules(Module module, JSONObject models) throws JSONException {
+ createConcreteModelForPost(models, module.getName()+MODULE_NAME_SUFFIX, createPropertiesForPost(module));
+ }
+
+ private void processContainersAndLists(Module module, JSONObject models, SchemaContext schemaContext)
+ throws IOException, JSONException {
String moduleName = module.getName();
for (DataSchemaNode childNode : module.getChildNodes()) {
- JSONObject configModuleJSON = null;
- JSONObject operationalModuleJSON = null;
-
- String childNodeName = childNode.getQName().getLocalName();
- /*
- * For every container in the module
- */
- if (childNode instanceof ContainerSchemaNode) {
- configModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models, true,
- schemaContext);
- operationalModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models,
- false, schemaContext);
- }
-
- if (configModuleJSON != null) {
- _logger.debug("Adding model for [{}]", OperationBuilder.CONFIG + childNodeName);
- configModuleJSON.put("id", OperationBuilder.CONFIG + childNodeName);
- models.put(OperationBuilder.CONFIG + childNodeName, configModuleJSON);
- }
- if (operationalModuleJSON != null) {
- _logger.debug("Adding model for [{}]", OperationBuilder.OPERATIONAL + childNodeName);
- operationalModuleJSON.put("id", OperationBuilder.OPERATIONAL + childNodeName);
- models.put(OperationBuilder.OPERATIONAL + childNodeName, operationalModuleJSON);
- }
+ // For every container and list in the module
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
}
}
ContainerSchemaNode input = rpc.getInput();
if (input != null) {
- JSONObject inputJSON = processContainer(input, moduleName, true, models, schemaContext);
+ JSONObject inputJSON = processDataNodeContainer(input, moduleName, models, schemaContext);
String filename = "(" + rpc.getQName().getLocalName() + ")input";
inputJSON.put("id", filename);
// writeToFile(filename, inputJSON.toString(2), moduleName);
ContainerSchemaNode output = rpc.getOutput();
if (output != null) {
- JSONObject outputJSON = processContainer(output, moduleName, true, models, schemaContext);
+ JSONObject outputJSON = processDataNodeContainer(output, moduleName, models, schemaContext);
String filename = "(" + rpc.getQName().getLocalName() + ")output";
outputJSON.put("id", filename);
models.put(filename, outputJSON);
}
/**
- * Processes the container node and populates the moduleJSON
+ * Processes the container and list nodes and populates the moduleJSON
*
* @param container
* @param moduleName
* @throws JSONException
* @throws IOException
*/
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt,
- JSONObject models, SchemaContext schemaContext) throws JSONException, IOException {
- return processContainer(container, moduleName, addSchemaStmt, models, (Boolean) null, schemaContext);
+ private JSONObject processDataNodeContainer(DataNodeContainer dataNode, String moduleName, JSONObject models,
+ SchemaContext schemaContext) throws JSONException, IOException {
+ return processDataNodeContainer(dataNode, moduleName, models, (Boolean) null, schemaContext);
}
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt,
- JSONObject models, Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
- JSONObject moduleJSON = getSchemaTemplate();
- if (addSchemaStmt) {
- moduleJSON = getSchemaTemplate();
- } else {
- moduleJSON = new JSONObject();
+ private JSONObject processDataNodeContainer(DataNodeContainer dataNode, String moduleName, JSONObject models,
+ Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
+ if (dataNode instanceof ListSchemaNode || dataNode instanceof ContainerSchemaNode) {
+ Preconditions.checkArgument(dataNode instanceof SchemaNode, "Data node should be also schema node");
+ Iterable<DataSchemaNode> containerChildren = dataNode.getChildNodes();
+ JSONObject properties = processChildren(containerChildren, ((SchemaNode) dataNode).getQName(), moduleName,
+ models, isConfig, schemaContext);
+
+ String nodeName = (BooleanUtils.isNotFalse(isConfig) ? OperationBuilder.CONFIG
+ : OperationBuilder.OPERATIONAL) + ((SchemaNode) dataNode).getQName().getLocalName();
+
+ JSONObject childSchema = getSchemaTemplate();
+ childSchema.put(TYPE_KEY, OBJECT_TYPE);
+ childSchema.put(PROPERTIES_KEY, properties);
+ childSchema.put("id", nodeName);
+ models.put(nodeName, childSchema);
+
+ if (BooleanUtils.isNotFalse(isConfig)) {
+ createConcreteModelForPost(models, ((SchemaNode) dataNode).getQName().getLocalName(),
+ createPropertiesForPost(dataNode));
+ }
+
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY, nodeName);
+ JSONObject dataNodeProperties = new JSONObject();
+ dataNodeProperties.put(TYPE_KEY, dataNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
+ dataNodeProperties.put(ITEMS_KEY, items);
+
+ return dataNodeProperties;
}
- moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
+ return null;
+ }
- String containerDescription = container.getDescription();
- moduleJSON.put(DESCRIPTION_KEY, containerDescription);
+ private void createConcreteModelForPost(final JSONObject models, final String localName, final JSONObject properties)
+ throws JSONException {
+ String nodePostName = OperationBuilder.CONFIG + localName + METHOD_NAME;
+ JSONObject postSchema = getSchemaTemplate();
+ postSchema.put(TYPE_KEY, OBJECT_TYPE);
+ postSchema.put("id", nodePostName);
+ postSchema.put(PROPERTIES_KEY, properties);
+ models.put(nodePostName, postSchema);
+ }
- JSONObject properties = processChildren(container.getChildNodes(), container.getQName(), moduleName, models,
- isConfig, schemaContext);
- moduleJSON.put(PROPERTIES_KEY, properties);
- return moduleJSON;
+ private JSONObject createPropertiesForPost(final DataNodeContainer dataNodeContainer) throws JSONException {
+ JSONObject properties = new JSONObject();
+ for (DataSchemaNode childNode : dataNodeContainer.getChildNodes()) {
+ if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY, "(config)" + childNode.getQName().getLocalName());
+ JSONObject property = new JSONObject();
+ property.put(TYPE_KEY, childNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
+ property.put(ITEMS_KEY, items);
+ properties.put(childNode.getQName().getLocalName(), property);
+ }
+ }
+ return properties;
}
private JSONObject processChildren(Iterable<DataSchemaNode> nodes, QName parentQName, String moduleName,
if (node instanceof LeafSchemaNode) {
property = processLeafNode((LeafSchemaNode) node);
} else if (node instanceof ListSchemaNode) {
- property = processListSchemaNode((ListSchemaNode) node, moduleName, models, isConfig, schemaContext);
+ property = processDataNodeContainer((ListSchemaNode) node, moduleName, models, isConfig,
+ schemaContext);
} else if (node instanceof LeafListSchemaNode) {
property = processLeafListNode((LeafListSchemaNode) node);
property = processAnyXMLNode((AnyXmlSchemaNode) node);
} else if (node instanceof ContainerSchemaNode) {
- property = processContainer((ContainerSchemaNode) node, moduleName, false, models, isConfig,
+ property = processDataNodeContainer((ContainerSchemaNode) node, moduleName, models, isConfig,
schemaContext);
} else {
}
}
- /**
- * Parses a ListSchema node.
- *
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence, we
- * have to write some properties to a new file, while continuing to process the rest.
- *
- * @param listNode
- * @param moduleName
- * @param isConfig
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models,
- Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
-
- String fileName = (BooleanUtils.isNotFalse(isConfig) ? OperationBuilder.CONFIG : OperationBuilder.OPERATIONAL)
- + listNode.getQName().getLocalName();
-
- JSONObject childSchemaProperties = processChildren(listNode.getChildNodes(), listNode.getQName(), moduleName,
- models, schemaContext);
- JSONObject childSchema = getSchemaTemplate();
- childSchema.put(TYPE_KEY, OBJECT_TYPE);
- childSchema.put(PROPERTIES_KEY, childSchemaProperties);
-
- /*
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence,
- * we have to write some properties to a new file, while continuing to process the rest.
- */
- // writeToFile(fileName, childSchema.toString(2), moduleName);
- childSchema.put("id", fileName);
- models.put(fileName, childSchema);
-
- JSONObject listNodeProperties = new JSONObject();
- listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject items = new JSONObject();
- items.put(REF_KEY, fileName);
- listNodeProperties.put(ITEMS_KEY, items);
-
- return listNodeProperties;
-
- }
-
/**
*
* @param leafNode
import java.util.ArrayList;
import java.util.List;
-
import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
/**
*
*/
public static class Put {
protected Operation spec;
- protected DataSchemaNode schemaNode;
+ protected String nodeName;
private final String METHOD_NAME = "PUT";
- public Put(DataSchemaNode node) {
- this.schemaNode = node;
+ public Put(String nodeName, final String description) {
+ this.nodeName = nodeName;
spec = new Operation();
- spec.setType(CONFIG + node.getQName().getLocalName());
- spec.setNotes(node.getDescription());
+ spec.setType(CONFIG + nodeName);
+ spec.setNotes(description);
}
public Put pathParams(List<Parameter> params) {
List<Parameter> parameters = new ArrayList<>(params);
Parameter payload = new Parameter();
payload.setParamType("body");
- payload.setType(CONFIG + schemaNode.getQName().getLocalName());
+ payload.setType(CONFIG + nodeName);
parameters.add(payload);
spec.setParameters(parameters);
return this;
public Operation build() {
spec.setMethod(METHOD_NAME);
- spec.setNickname(METHOD_NAME + "-" + schemaNode.getQName().getLocalName());
+ spec.setNickname(METHOD_NAME + "-" + nodeName);
return spec;
}
}
*/
public static final class Post extends Put {
- private final String METHOD_NAME = "POST";
+ public static final String METHOD_NAME = "POST";
+ private final DataNodeContainer dataNodeContainer;
- public Post(DataSchemaNode node) {
- super(node);
+ public Post(final String nodeName, final String description, final DataNodeContainer dataNodeContainer) {
+ super(nodeName, description);
+ this.dataNodeContainer = dataNodeContainer;
+ spec.setType(CONFIG + nodeName + METHOD_NAME);
}
@Override
public Operation build() {
spec.setMethod(METHOD_NAME);
- spec.setNickname(METHOD_NAME + "-" + schemaNode.getQName().getLocalName());
+ spec.setNickname(METHOD_NAME + "-" + nodeName);
return spec;
}
+
+ @Override
+ public Put pathParams(List<Parameter> params) {
+ List<Parameter> parameters = new ArrayList<>(params);
+ for (DataSchemaNode node : dataNodeContainer.getChildNodes()) {
+ if (node instanceof ListSchemaNode || node instanceof ContainerSchemaNode) {
+ Parameter payload = new Parameter();
+ payload.setParamType("body");
+ payload.setType(CONFIG + node.getQName().getLocalName());
+ payload.setName("**"+CONFIG + node.getQName().getLocalName());
+ parameters.add(payload);
+ }
+ }
+ spec.setParameters(parameters);
+ return this;
+
+ }
+
+ public Post summary(final String summary) {
+ spec.setSummary(summary);
+ return this;
+ }
}
/**
import java.io.File;
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import org.opendaylight.controller.sal.rest.doc.swagger.Api;
import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
+import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
import org.opendaylight.yangtools.yang.model.api.Module;
}
/**
- * Method: getApiDeclaration(String module, String revision, UriInfo
- * uriInfo)
+ * Method: getApiDeclaration(String module, String revision, UriInfo uriInfo)
*/
@Test
public void testGetModuleDoc() throws Exception {
for (Entry<File, Module> m : helper.getModules().entrySet()) {
if (m.getKey().getAbsolutePath().endsWith("toaster_short.yang")) {
- ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(),
- "http://localhost:8080/restconf", "",schemaContext);
+ ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), "http://localhost:8080/restconf", "",
+ schemaContext);
validateToaster(doc);
validateTosterDocContainsModulePrefixes(doc);
- Assert.assertNotNull(doc);
+ validateSwaggerModules(doc);
+ validateSwaggerApisForPost(doc);
+ }
+ }
+ }
+
+ /**
+ * Validate whether ApiDelcaration contains Apis with concrete path and whether this Apis contain specified POST
+ * operations.
+ */
+ private void validateSwaggerApisForPost(final ApiDeclaration doc) {
+ // two POST URI with concrete schema name in summary
+ Api lstApi = findApi("/config/toaster2:lst/", doc);
+ assertNotNull("Api /config/toaster2:lst/ wasn't found", lstApi);
+ assertTrue("POST for cont1 in lst is missing",
+ findOperation(lstApi.getOperations(), "POST", "(config)lstPOST", "(config)lst1", "(config)cont1"));
+
+ Api cont1Api = findApi("/config/toaster2:lst/cont1/", doc);
+ assertNotNull("Api /config/toaster2:lst/cont1/ wasn't found", cont1Api);
+ assertTrue("POST for cont11 in cont1 is missing",
+ findOperation(cont1Api.getOperations(), "POST", "(config)cont1POST", "(config)cont11", "(config)lst11"));
+
+ // no POST URI
+ Api cont11Api = findApi("/config/toaster2:lst/cont1/cont11/", doc);
+ assertNotNull("Api /config/toaster2:lst/cont1/cont11/ wasn't found", cont11Api);
+ assertTrue("POST operation shouldn't be present.", findOperations(cont11Api.getOperations(), "POST").isEmpty());
+
+ }
+
+ /**
+ * Tries to find operation with name {@code operationName} and with summary {@code summary}
+ */
+ private boolean findOperation(List<Operation> operations, String operationName, String type,
+ String... searchedParameters) {
+ Set<Operation> filteredOperations = findOperations(operations, operationName);
+ for (Operation operation : filteredOperations) {
+ if (operation.getType().equals(type)) {
+ List<Parameter> parameters = operation.getParameters();
+ return containAllParameters(parameters, searchedParameters);
}
}
+ return false;
+ }
+
+ private Set<Operation> findOperations(final List<Operation> operations, final String operationName) {
+ final Set<Operation> filteredOperations = new HashSet<>();
+ for (Operation operation : operations) {
+ if (operation.getMethod().equals(operationName)) {
+ filteredOperations.add(operation);
+ }
+ }
+ return filteredOperations;
+ }
+
+ private boolean containAllParameters(final List<Parameter> searchedIns, String[] searchedWhats) {
+ for (String searchedWhat : searchedWhats) {
+ boolean parameterFound = false;
+ for (Parameter searchedIn : searchedIns) {
+ if (searchedIn.getType().equals(searchedWhat)) {
+ parameterFound = true;
+ }
+ }
+ if (!parameterFound) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Tries to find {@code Api} with path {@code path}
+ */
+ private Api findApi(final String path, final ApiDeclaration doc) {
+ for (Api api : doc.getApis()) {
+ if (api.getPath().equals(path)) {
+ return api;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Validates whether doc {@code doc} contains concrete specified models.
+ */
+ private void validateSwaggerModules(ApiDeclaration doc) {
+ JSONObject models = doc.getModels();
+ assertNotNull(models);
+ try {
+ JSONObject configLst = models.getJSONObject("(config)lst");
+ assertNotNull(configLst);
+
+ containsReferences(configLst, "lst1");
+ containsReferences(configLst, "cont1");
+
+ JSONObject configLst1 = models.getJSONObject("(config)lst1");
+ assertNotNull(configLst1);
+
+ JSONObject configCont1 = models.getJSONObject("(config)cont1");
+ assertNotNull(configCont1);
+
+ containsReferences(configCont1, "cont11");
+ containsReferences(configCont1, "lst11");
+
+ JSONObject configCont11 = models.getJSONObject("(config)cont11");
+ assertNotNull(configCont11);
+
+ JSONObject configLst11 = models.getJSONObject("(config)lst11");
+ assertNotNull(configLst11);
+ } catch (JSONException e) {
+ fail("JSONException wasn't expected");
+ }
+
+ }
+
+ /**
+ * Checks whether object {@code mainObject} contains in properties/items key $ref with concrete value.
+ */
+ private void containsReferences(final JSONObject mainObject, final String childObject) throws JSONException {
+ JSONObject properties = mainObject.getJSONObject("properties");
+ assertNotNull(properties);
+
+ JSONObject nodeInProperties = properties.getJSONObject(childObject);
+ assertNotNull(nodeInProperties);
+
+ JSONObject itemsInNodeInProperties = nodeInProperties.getJSONObject("items");
+ assertNotNull(itemsInNodeInProperties);
+
+ String itemRef = itemsInNodeInProperties.getString("$ref");
+ assertEquals("(config)" + childObject, itemRef);
}
@Test
for (Entry<File, Module> m : helper.getModules().entrySet()) {
if (m.getKey().getAbsolutePath().endsWith("toaster.yang")) {
- ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(),
- "http://localhost:8080/restconf", "",schemaContext);
+ ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), "http://localhost:8080/restconf", "",
+ schemaContext);
Assert.assertNotNull(doc);
- //testing bugs.opendaylight.org bug 1290. UnionType model type.
+ // testing bugs.opendaylight.org bug 1290. UnionType model type.
String jsonString = doc.getModels().toString();
- assertTrue(
- jsonString.contains( "testUnion\":{\"type\":\"integer or string\",\"required\":false}" ) );
+ assertTrue(jsonString.contains("testUnion\":{\"type\":\"integer or string\",\"required\":false}"));
}
}
}
* @throws Exception
*/
private void validateToaster(ApiDeclaration doc) throws Exception {
- Set<String> expectedUrls = new TreeSet<>(Arrays.asList(new String[] {
- "/config/toaster2:toaster/", "/operational/toaster2:toaster/",
- "/operations/toaster2:cancel-toast", "/operations/toaster2:make-toast",
- "/operations/toaster2:restock-toaster",
+ Set<String> expectedUrls = new TreeSet<>(Arrays.asList(new String[] { "/config/toaster2:toaster/",
+ "/operational/toaster2:toaster/", "/operations/toaster2:cancel-toast",
+ "/operations/toaster2:make-toast", "/operations/toaster2:restock-toaster",
"/config/toaster2:toaster/toasterSlot/{slotId}/toaster-augmented:slotInfo/" }));
Set<String> actualUrls = new TreeSet<>();
fail("Missing expected urls: " + expectedUrls);
}
- Set<String> expectedConfigMethods = new TreeSet<>(Arrays.asList(new String[] { "GET",
- "PUT", "DELETE" }));
+ Set<String> expectedConfigMethods = new TreeSet<>(Arrays.asList(new String[] { "GET", "PUT", "DELETE" }));
Set<String> actualConfigMethods = new TreeSet<>();
for (Operation oper : configApi.getOperations()) {
actualConfigMethods.add(oper.getMethod());
// TODO: we should really do some more validation of the
// documentation...
/**
- * Missing validation: Explicit validation of URLs, and their methods
- * Input / output models.
+ * Missing validation: Explicit validation of URLs, and their methods Input / output models.
*/
}
try {
JSONObject configToaster = topLevelJson.getJSONObject("(config)toaster");
assertNotNull("(config)toaster JSON object missing", configToaster);
- //without module prefix
+ // without module prefix
containsProperties(configToaster, "toasterSlot");
JSONObject toasterSlot = topLevelJson.getJSONObject("(config)toasterSlot");
assertNotNull("(config)toasterSlot JSON object missing", toasterSlot);
- //with module prefix
+ // with module prefix
containsProperties(toasterSlot, "toaster-augmented:slotInfo");
} catch (JSONException e) {
- fail("Json exception while reading JSON object. Original message "+e.getMessage());
+ fail("Json exception while reading JSON object. Original message " + e.getMessage());
}
}
- private void containsProperties(final JSONObject jsonObject,final String...properties) throws JSONException {
+ private void containsProperties(final JSONObject jsonObject, final String... properties) throws JSONException {
for (String property : properties) {
JSONObject propertiesObject = jsonObject.getJSONObject("properties");
assertNotNull("Properties object missing in ", propertiesObject);
JSONObject concretePropertyObject = propertiesObject.getJSONObject(property);
- assertNotNull(property + " is missing",concretePropertyObject);
+ assertNotNull(property + " is missing", concretePropertyObject);
}
}
}
"This variable indicates the current state of
the toaster.";
}
- }
+ }
rpc make-toast {
description
"The darkness factor. Basically, the number of ms to multiple the doneness value by.";
}
} // container toaster
+
+ list lst {
+ container cont1 {
+ container cont11 {
+ leaf lf111 {
+ type uint32;
+ }
+ leaf lf112 {
+ type string;
+ }
+ }
+ list lst11 {
+ leaf lf111 {
+ type string;
+ }
+ }
+ }
+ list lst1 {
+ key "key1 key2";
+ leaf key1 {
+ type int32;
+ }
+ leaf key2 {
+ type int8;
+ }
+ leaf lf11 {
+ type int16;
+ }
+ }
+ leaf lf1 {
+ type string;
+ }
+ }
rpc make-toast {
description
dataChangeListenerRegistration.close();
rpcRegistration.close();
runtimeReg.close();
- opendaylightToaster.close();
+ closeQuietly(opendaylightToaster);
log.info("Toaster provider (instance {}) torn down.", this);
}
+
+ private void closeQuietly(final AutoCloseable resource) {
+ try {
+ resource.close();
+ } catch (final Exception e) {
+ log.debug("Ignoring exception while closing {}", resource, e);
+ }
+ }
}
AutoCloseable ret = new AutoCloseableToaster();
}
public static boolean flowEquals(Flow statsFlow, Flow storedFlow) {
+ if (statsFlow == null || storedFlow == null) {
+ return false;
+ }
if (statsFlow.getClass() != storedFlow.getClass()) {
return false;
}
} else if(!statsFlow.getContainerName().equals(storedFlow.getContainerName())) {
return false;
}
- if (statsFlow.getMatch()== null) {
- if (storedFlow.getMatch() != null) {
+ if (storedFlow.getPriority() == null) {
+ if (statsFlow.getPriority() != null && statsFlow.getPriority()!= 0x8000) {
return false;
}
- } //else if(!statsFlow.getMatch().equals(storedFlow.getMatch())) {
- else if(!matchEquals(statsFlow.getMatch(), storedFlow.getMatch())) {
+ } else if(!statsFlow.getPriority().equals(storedFlow.getPriority())) {
return false;
}
- if (storedFlow.getPriority() == null) {
- if (statsFlow.getPriority() != null && statsFlow.getPriority()!= 0x8000) {
+ if (statsFlow.getMatch()== null) {
+ if (storedFlow.getMatch() != null) {
return false;
}
- } else if(!statsFlow.getPriority().equals(storedFlow.getPriority())) {
+ } else if(!matchEquals(statsFlow.getMatch(), storedFlow.getMatch())) {
return false;
}
if (statsFlow.getTableId() == null) {
*/
package org.opendaylight.controller.md.statistics.manager;
+import java.math.BigInteger;
import java.util.Collection;
+import java.util.Collections;
import java.util.Map.Entry;
import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCookieMapping;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowId;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.nodes.node.table.FlowCookieMap;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.nodes.node.table.FlowCookieMapBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.nodes.node.table.FlowCookieMapKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.TableKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.statistics.rev130819.flow.and.statistics.map.list.FlowAndStatisticsMapList;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.statistics.rev130819.flow.and.statistics.map.list.FlowAndStatisticsMapListBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.statistics.rev130819.flow.statistics.FlowStatisticsBuilder;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
import org.opendaylight.yang.gen.v1.urn.opendaylight.model.statistics.types.rev130925.GenericStatistics;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Optional;
+
final class FlowStatsTracker extends AbstractListeningStatsTracker<FlowAndStatisticsMapList, FlowStatsEntry> {
- private static final Logger logger = LoggerFactory.getLogger(FlowStatsTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlowStatsTracker.class);
+ private static final String ALIEN_SYSTEM_FLOW_ID = "#UF$TABLE*";
private final OpendaylightFlowStatisticsService flowStatsService;
private FlowTableStatsTracker flowTableStats;
private int unaccountedFlowsCounter = 1;
+
FlowStatsTracker(final OpendaylightFlowStatisticsService flowStatsService, final FlowCapableContext context) {
super(context);
this.flowStatsService = flowStatsService;
FlowStatisticsDataBuilder flowStatisticsData = new FlowStatisticsDataBuilder();
- FlowBuilder flow = new FlowBuilder(map);
- if(map.getFlowId() != null) {
- flow.setId(new FlowId(map.getFlowId().getValue()));
+ FlowBuilder flowBuilder = new FlowBuilder(map);
+ if (map.getFlowId() != null) {
+ flowBuilder.setId(new FlowId(map.getFlowId().getValue()));
}
- if(map.getFlowId()!= null) {
- flow.setKey(new FlowKey(new FlowId(map.getKey().getFlowId().getValue())));
+ if (map.getFlowId() != null) {
+ flowBuilder.setKey(new FlowKey(new FlowId(map.getKey().getFlowId().getValue())));
}
- Flow flowRule = flow.build();
+ Flow flowRule = flowBuilder.build();
FlowAndStatisticsMapListBuilder stats = new FlowAndStatisticsMapListBuilder();
stats.setByteCount(map.getByteCount());
flowStatisticsData.setFlowStatistics(flowStatistics.build());
- logger.debug("Flow : {}",flowRule.toString());
- logger.debug("Statistics to augment : {}",flowStatistics.build().toString());
+ LOG.debug("Flow : {}",flowRule.toString());
+ LOG.debug("Statistics to augment : {}",flowStatistics.build().toString());
InstanceIdentifier<Table> tableRef = getNodeIdentifierBuilder()
- .augmentation(FlowCapableNode.class).child(Table.class, new TableKey(tableId)).toInstance();
-
- //TODO: Not a good way to do it, need to figure out better way.
- //TODO: major issue in any alternate approach is that flow key is incrementally assigned
- //to the flows stored in data store.
- // Augment same statistics to all the matching masked flow
- Table table= (Table)trans.readConfigurationData(tableRef);
- if(table != null){
- for(Flow existingFlow : table.getFlow()){
- logger.debug("Existing flow in data store : {}",existingFlow.toString());
- if(FlowComparator.flowEquals(flowRule,existingFlow)){
- InstanceIdentifier<Flow> flowRef = getNodeIdentifierBuilder()
- .augmentation(FlowCapableNode.class)
- .child(Table.class, new TableKey(tableId))
- .child(Flow.class,existingFlow.getKey()).toInstance();
- flow.setKey(existingFlow.getKey());
- flow.addAugmentation(FlowStatisticsData.class, flowStatisticsData.build());
- logger.debug("Found matching flow in the datastore, augmenting statistics");
- // Update entry with timestamp of latest response
- FlowStatsEntry flowStatsEntry = new FlowStatsEntry(tableId,flow.build());
- trans.putOperationalData(flowRef, flow.build());
- return flowStatsEntry;
- }
- }
- }
-
- table = (Table)trans.readOperationalData(tableRef);
- if(table != null){
- for(Flow existingFlow : table.getFlow()){
- FlowStatisticsData augmentedflowStatisticsData = existingFlow.getAugmentation(FlowStatisticsData.class);
- if(augmentedflowStatisticsData != null){
- FlowBuilder existingOperationalFlow = new FlowBuilder();
- existingOperationalFlow.fieldsFrom(augmentedflowStatisticsData.getFlowStatistics());
- logger.debug("Existing unaccounted flow in operational data store : {}",existingFlow.toString());
- if(FlowComparator.flowEquals(flowRule,existingOperationalFlow.build())){
- InstanceIdentifier<Flow> flowRef = getNodeIdentifierBuilder()
- .augmentation(FlowCapableNode.class)
- .child(Table.class, new TableKey(tableId))
- .child(Flow.class,existingFlow.getKey()).toInstance();
- flow.setKey(existingFlow.getKey());
- flow.addAugmentation(FlowStatisticsData.class, flowStatisticsData.build());
- logger.debug("Found matching unaccounted flow in the operational datastore, augmenting statistics");
- // Update entry with timestamp of latest response
- FlowStatsEntry flowStatsEntry = new FlowStatsEntry(tableId,flow.build());
- trans.putOperationalData(flowRef, flow.build());
- return flowStatsEntry;
- }
- }
+ .augmentation(FlowCapableNode.class)
+ .child(Table.class, new TableKey(tableId)).toInstance();
+
+ final FlowCookie flowCookie = flowRule.getCookie() != null
+ ? flowRule.getCookie() : new FlowCookie(BigInteger.ZERO);
+ final InstanceIdentifier<FlowCookieMap> flowCookieRef = tableRef
+ .augmentation(FlowCookieMapping.class)
+ .child(FlowCookieMap.class, new FlowCookieMapKey(flowCookie));
+
+ FlowCookieMap cookieMap = (FlowCookieMap) trans.readOperationalData(flowCookieRef);
+
+ /* find flowKey in FlowCookieMap from DataStore/OPERATIONAL */
+ Optional<FlowKey> flowKey = this.getExistFlowKey(flowRule, tableRef, trans, cookieMap);
+ if ( ! flowKey.isPresent()) {
+ /* DataStore/CONFIG For every first statistic needs to be created */
+ flowKey = this.getFlowKeyFromExistFlow(flowRule, tableRef, trans);
+ if ( ! flowKey.isPresent()) {
+ /* Alien flow */
+ flowKey = this.makeAlienFlowKey(flowRule);
}
+ cookieMap = applyNewFlowKey(cookieMap, flowKey, flowCookie);
+ trans.putOperationalData(flowCookieRef, cookieMap);
}
- String flowKey = "#UF$TABLE*"+Short.toString(tableId)+"*"+Integer.toString(this.unaccountedFlowsCounter);
- this.unaccountedFlowsCounter++;
- FlowKey newFlowKey = new FlowKey(new FlowId(flowKey));
- InstanceIdentifier<Flow> flowRef = getNodeIdentifierBuilder().augmentation(FlowCapableNode.class)
- .child(Table.class, new TableKey(tableId))
- .child(Flow.class,newFlowKey).toInstance();
- flow.setKey(newFlowKey);
- flow.addAugmentation(FlowStatisticsData.class, flowStatisticsData.build());
- logger.debug("Flow {} is not present in config data store, augmenting statistics as an unaccounted flow",
- flow.build());
+ InstanceIdentifier<Flow> flowRef = getNodeIdentifierBuilder()
+ .augmentation(FlowCapableNode.class)
+ .child(Table.class, new TableKey(tableId))
+ .child(Flow.class, flowKey.get()).toInstance();
+ flowBuilder.setKey(flowKey.get());
+ flowBuilder.addAugmentation(FlowStatisticsData.class, flowStatisticsData.build());
// Update entry with timestamp of latest response
- flow.setKey(newFlowKey);
- FlowStatsEntry flowStatsEntry = new FlowStatsEntry(tableId,flow.build());
- trans.putOperationalData(flowRef, flow.build());
+ flowBuilder.setKey(flowKey.get());
+ FlowStatsEntry flowStatsEntry = new FlowStatsEntry(tableId, flowBuilder.build());
+ trans.putOperationalData(flowRef, flowBuilder.build());
return flowStatsEntry;
}
// FIXME: it does not make sense to trigger this before sendAllFlowTablesStatisticsRequest()
// comes back -- we do not have any tables anyway.
final Collection<TableKey> tables = flowTableStats.getTables();
- logger.debug("Node {} supports {} table(s)", this.getNodeRef(), tables.size());
+ LOG.debug("Node {} supports {} table(s)", this.getNodeRef(), tables.size());
for (final TableKey key : tables) {
- logger.debug("Send aggregate stats request for flow table {} to node {}", key.getId(), this.getNodeRef());
+ LOG.debug("Send aggregate stats request for flow table {} to node {}", key.getId(), this.getNodeRef());
this.requestAggregateFlows(key);
}
for (Entry<InstanceIdentifier<?>, DataObject> e : change.getCreatedConfigurationData().entrySet()) {
if (Flow.class.equals(e.getKey().getTargetType())) {
final Flow flow = (Flow) e.getValue();
- logger.debug("Key {} triggered request for flow {}", e.getKey(), flow);
+ LOG.debug("Key {} triggered request for flow {}", e.getKey(), flow);
requestFlow(flow);
} else {
- logger.debug("Ignoring key {}", e.getKey());
+ LOG.debug("Ignoring key {}", e.getKey());
}
}
if (Flow.class.equals(key.getTargetType())) {
@SuppressWarnings("unchecked")
final InstanceIdentifier<Flow> flow = (InstanceIdentifier<Flow>)key;
- logger.debug("Key {} triggered remove of Flow from operational space.", key);
+ LOG.debug("Key {} triggered remove of Flow from operational space.", key);
trans.removeOperationalData(flow);
}
}
@Override
public void start(final DataBrokerService dbs) {
if (flowStatsService == null) {
- logger.debug("No Flow Statistics service, not subscribing to flows on node {}", getNodeIdentifier());
+ LOG.debug("No Flow Statistics service, not subscribing to flows on node {}", getNodeIdentifier());
return;
}
super.start(dbs);
}
+
+ /* Returns Exist FlowKey from exist FlowCookieMap identified by cookie
+ * and by switch flow identification (priority and match)*/
+ private Optional<FlowKey> getExistFlowKey(final Flow flowRule, final InstanceIdentifier<Table> tableRef,
+ final DataModificationTransaction trans, final FlowCookieMap cookieMap) {
+
+ if (cookieMap != null) {
+ for (FlowId flowId : cookieMap.getFlowIds()) {
+ InstanceIdentifier<Flow> flowIdent = tableRef.child(Flow.class, new FlowKey(flowId));
+ if (flowId.getValue().startsWith(ALIEN_SYSTEM_FLOW_ID)) {
+ LOG.debug("Search for flow in the operational datastore by flowID: {} ", flowIdent);
+ Flow readedFlow = (Flow) trans.readOperationalData(flowIdent);
+ if (FlowComparator.flowEquals(flowRule, readedFlow)) {
+ return Optional.<FlowKey> of(new FlowKey(flowId));
+ }
+ } else {
+ LOG.debug("Search for flow in the configuration datastore by flowID: {} ", flowIdent);
+ Flow readedFlow = (Flow) trans.readConfigurationData(flowIdent);
+ if (FlowComparator.flowEquals(flowRule, readedFlow)) {
+ return Optional.<FlowKey> of(new FlowKey(flowId));
+ }
+ }
+ }
+ LOG.debug("Flow was not found in the datastore. Flow {} ", flowRule);
+ }
+ return Optional.absent();
+ }
+
+ /* Returns FlowKey from existing Flow in DataStore/CONFIGURATIONAL which is identified by cookie
+ * and by switch flow identification (priority and match) */
+ private Optional<FlowKey> getFlowKeyFromExistFlow(final Flow flowRule, final InstanceIdentifier<Table> tableRef,
+ final DataModificationTransaction trans) {
+
+ /* Try to find it in DataSotre/CONFIG */
+ Table table= (Table)trans.readConfigurationData(tableRef);
+ if(table != null) {
+ for(Flow existingFlow : table.getFlow()) {
+ LOG.debug("Existing flow in data store : {}",existingFlow.toString());
+ if(FlowComparator.flowEquals(flowRule,existingFlow)){
+ return Optional.<FlowKey> of(new FlowKey(existingFlow.getId()));
+ }
+ }
+ }
+ return Optional.absent();
+ }
+
+ /* Returns FlowKey which doesn't exist in any DataStore for now */
+ private Optional<FlowKey> makeAlienFlowKey(final Flow flowRule) {
+
+ StringBuilder sBuilder = new StringBuilder(ALIEN_SYSTEM_FLOW_ID)
+ .append(flowRule.getTableId()).append("-").append(this.unaccountedFlowsCounter);
+ this.unaccountedFlowsCounter++;
+ final FlowId flowId = new FlowId(sBuilder.toString());
+ return Optional.<FlowKey> of(new FlowKey(flowId));
+ }
+
+ /* Build new whole FlowCookieMap or add new flowKey */
+ private FlowCookieMap applyNewFlowKey(FlowCookieMap flowCookieMap, final Optional<FlowKey> flowKey,
+ final FlowCookie flowCookie) {
+ if (flowCookieMap != null) {
+ flowCookieMap.getFlowIds().add(flowKey.get().getId());
+ } else {
+ final FlowCookieMapBuilder flowCookieMapBuilder = new FlowCookieMapBuilder();
+ flowCookieMapBuilder.setCookie(flowCookie);
+ flowCookieMapBuilder.setFlowIds(Collections.singletonList(flowKey.get().getId()));
+ flowCookieMap = flowCookieMapBuilder.build();
+ }
+ return flowCookieMap;
+ }
}
}
@Override
- protected void cleanupSingleStat(DataModificationTransaction trans, NodeConnectorStatisticsAndPortNumberMap item) {
+ protected void cleanupSingleStat(final DataModificationTransaction trans, final NodeConnectorStatisticsAndPortNumberMap item) {
// TODO Auto-generated method stub
}
@Override
- protected NodeConnectorStatisticsAndPortNumberMap updateSingleStat(DataModificationTransaction trans, NodeConnectorStatisticsAndPortNumberMap item) {
+ protected NodeConnectorStatisticsAndPortNumberMap updateSingleStat(final DataModificationTransaction trans, final NodeConnectorStatisticsAndPortNumberMap item) {
FlowCapableNodeConnectorStatisticsBuilder statisticsBuilder
= new FlowCapableNodeConnectorStatisticsBuilder();
statisticsBuilder.setBytes(item.getBytes());
statisticsDataBuilder.setFlowCapableNodeConnectorStatistics(statisticsBuilder.build());
- InstanceIdentifier<NodeConnector> nodeConnectorRef = getNodeIdentifierBuilder()
- .child(NodeConnector.class, new NodeConnectorKey(item.getNodeConnectorId())).build();
+ final NodeConnectorKey key = new NodeConnectorKey(item.getNodeConnectorId());
+ final InstanceIdentifier<NodeConnector> nodeConnectorRef = getNodeIdentifier().child(NodeConnector.class, key);
// FIXME: can we bypass this read?
NodeConnector nodeConnector = (NodeConnector)trans.readOperationalData(nodeConnectorRef);
if(nodeConnector != null){
final FlowCapableNodeConnectorStatisticsData stats = statisticsDataBuilder.build();
logger.debug("Augmenting port statistics {} to port {}",stats,nodeConnectorRef.toString());
- NodeConnectorBuilder nodeConnectorBuilder = new NodeConnectorBuilder();
- nodeConnectorBuilder.addAugmentation(FlowCapableNodeConnectorStatisticsData.class, stats);
+ NodeConnectorBuilder nodeConnectorBuilder = new NodeConnectorBuilder()
+ .setKey(key).setId(item.getNodeConnectorId())
+ .addAugmentation(FlowCapableNodeConnectorStatisticsData.class, stats);
trans.putOperationalData(nodeConnectorRef, nodeConnectorBuilder.build());
}
package org.opendaylight.controller.netconf.client;
import io.netty.channel.Channel;
+
import java.util.Collection;
+
import org.opendaylight.controller.netconf.nettyutil.AbstractNetconfSession;
import org.opendaylight.controller.netconf.nettyutil.handler.NetconfEXICodec;
import org.opendaylight.controller.netconf.nettyutil.handler.NetconfEXIToMessageDecoder;
private static final Logger logger = LoggerFactory.getLogger(NetconfClientSession.class);
private final Collection<String> capabilities;
- public NetconfClientSession(NetconfClientSessionListener sessionListener, Channel channel, long sessionId,
- Collection<String> capabilities) {
+ /**
+ * Construct a new session.
+ *
+ * @param sessionListener
+ * @param channel
+ * @param sessionId
+ * @param capabilities set of advertised capabilities. Expected to be immutable.
+ */
+ public NetconfClientSession(final NetconfClientSessionListener sessionListener, final Channel channel, final long sessionId,
+ final Collection<String> capabilities) {
super(sessionListener, channel, sessionId);
this.capabilities = capabilities;
logger.debug("Client Session {} created", toString());
}
@Override
- protected void addExiHandlers(NetconfEXICodec exiCodec) {
+ protected void addExiHandlers(final NetconfEXICodec exiCodec) {
// TODO used only in negotiator, client supports only auto start-exi
replaceMessageDecoder(new NetconfEXIToMessageDecoder(exiCodec));
replaceMessageEncoder(new NetconfMessageToEXIEncoder(exiCodec));
package org.opendaylight.controller.netconf.client;
+import com.google.common.collect.ImmutableList;
+
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
private static final String EXI_1_0_CAPABILITY_MARKER = "exi:1.0";
- protected NetconfClientSessionNegotiator(NetconfClientSessionPreferences sessionPreferences,
- Promise<NetconfClientSession> promise,
- Channel channel,
- Timer timer,
- NetconfClientSessionListener sessionListener,
- long connectionTimeoutMillis) {
+ protected NetconfClientSessionNegotiator(final NetconfClientSessionPreferences sessionPreferences,
+ final Promise<NetconfClientSession> promise,
+ final Channel channel,
+ final Timer timer,
+ final NetconfClientSessionListener sessionListener,
+ final long connectionTimeoutMillis) {
super(sessionPreferences, promise, channel, timer, sessionListener, connectionTimeoutMillis);
}
@Override
- protected void handleMessage(NetconfHelloMessage netconfMessage) throws NetconfDocumentedException {
+ protected void handleMessage(final NetconfHelloMessage netconfMessage) throws NetconfDocumentedException {
final NetconfClientSession session = getSessionForHelloMessage(netconfMessage);
replaceHelloMessageInboundHandler(session);
});
}
- private boolean shouldUseExi(NetconfHelloMessage helloMsg) {
+ private boolean shouldUseExi(final NetconfHelloMessage helloMsg) {
return containsExi10Capability(helloMsg.getDocument())
&& containsExi10Capability(sessionPreferences.getHelloMessage().getDocument());
}
return false;
}
- private long extractSessionId(Document doc) {
+ private long extractSessionId(final Document doc) {
final Node sessionIdNode = (Node) XmlUtil.evaluateXPath(sessionIdXPath, doc, XPathConstants.NODE);
String textContent = sessionIdNode.getTextContent();
if (textContent == null || textContent.equals("")) {
}
@Override
- protected NetconfClientSession getSession(NetconfClientSessionListener sessionListener, Channel channel,
- NetconfHelloMessage message) throws NetconfDocumentedException {
+ protected NetconfClientSession getSession(final NetconfClientSessionListener sessionListener, final Channel channel,
+ final NetconfHelloMessage message) throws NetconfDocumentedException {
long sessionId = extractSessionId(message.getDocument());
- Collection<String> capabilities = NetconfMessageUtil.extractCapabilitiesFromHello(message.getDocument());
+
+ // Copy here is important: it disconnects the strings from the document
+ Collection<String> capabilities = ImmutableList.copyOf(NetconfMessageUtil.extractCapabilitiesFromHello(message.getDocument()));
+
+ // FIXME: scalability: we could instantiate a cache to share the same collections
return new NetconfClientSession(sessionListener, channel, sessionId, capabilities);
}
private static final String EXI_CONFIRMED_HANDLER = "exiConfirmedHandler";
private final NetconfClientSession session;
- private NetconfStartExiMessage startExiMessage;
+ private final NetconfStartExiMessage startExiMessage;
- ExiConfirmationInboundHandler(NetconfClientSession session, final NetconfStartExiMessage startExiMessage) {
+ ExiConfirmationInboundHandler(final NetconfClientSession session, final NetconfStartExiMessage startExiMessage) {
this.session = session;
this.startExiMessage = startExiMessage;
}
@Override
- public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
+ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {
ctx.pipeline().remove(ExiConfirmationInboundHandler.EXI_CONFIRMED_HANDLER);
NetconfMessage netconfMessage = (NetconfMessage) msg;
import org.opendaylight.controller.netconf.nettyutil.handler.ssh.authentication.AuthenticationHandler;
import org.opendaylight.controller.netconf.ssh.NetconfSSHServer;
import org.opendaylight.controller.netconf.ssh.authentication.AuthProvider;
+import org.opendaylight.controller.netconf.ssh.authentication.AuthProviderImpl;
import org.opendaylight.controller.netconf.ssh.authentication.PEMGenerator;
import org.opendaylight.controller.netconf.util.messages.NetconfMessageUtil;
import org.opendaylight.controller.netconf.util.osgi.NetconfConfigUtil;
}
public AuthProvider getAuthProvider() throws Exception {
- AuthProvider mock = mock(AuthProvider.class);
+ AuthProvider mock = mock(AuthProviderImpl.class);
doReturn(true).when(mock).authenticated(anyString(), anyString());
doReturn(PEMGenerator.generate().toCharArray()).when(mock).getPEMAsCharArray();
return mock;
private OutputStream stdIn;
- private Queue<ByteBuf> postponed = new LinkedList<>();
+ private final Queue<ByteBuf> postponed = new LinkedList<>();
private ChannelHandlerContext ctx;
private ChannelPromise disconnectPromise;
private final Object lock = new Object();
- public SshClientAdapter(SshClient sshClient, Invoker invoker) {
+ public SshClientAdapter(final SshClient sshClient, final Invoker invoker) {
this.sshClient = sshClient;
this.invoker = invoker;
}
- // TODO: refactor
+ // TODO ganymed spawns a Thread that receives the data from remote inside TransportManager
+ // Get rid of this thread and reuse Ganymed internal thread (not sure if its possible without modifications in ganymed)
public void run() {
try {
- SshSession session = sshClient.openSession();
+ final SshSession session = sshClient.openSession();
invoker.invoke(session);
- InputStream stdOut = session.getStdout();
- session.getStderr();
+ final InputStream stdOut = session.getStdout();
synchronized (lock) {
-
stdIn = session.getStdin();
- ByteBuf message;
- while ((message = postponed.poll()) != null) {
- writeImpl(message);
+ while (postponed.peek() != null) {
+ writeImpl(postponed.poll());
}
}
while (!stopRequested.get()) {
- byte[] readBuff = new byte[BUFFER_SIZE];
- int c = stdOut.read(readBuff);
+ final byte[] readBuff = new byte[BUFFER_SIZE];
+ final int c = stdOut.read(readBuff);
if (c == -1) {
continue;
}
- byte[] tranBuff = new byte[c];
- System.arraycopy(readBuff, 0, tranBuff, 0, c);
- ByteBuf byteBuf = Unpooled.buffer(c);
- byteBuf.writeBytes(tranBuff);
- ctx.fireChannelRead(byteBuf);
+ ctx.fireChannelRead(Unpooled.copiedBuffer(readBuff, 0, c));
}
- } catch (Exception e) {
+ } catch (final Exception e) {
logger.error("Unexpected exception", e);
} finally {
sshClient.close();
}
// TODO: needs rework to match netconf framer API.
- public void write(ByteBuf message) throws IOException {
+ public void write(final ByteBuf message) throws IOException {
synchronized (lock) {
if (stdIn == null) {
postponed.add(message);
}
}
- private void writeImpl(ByteBuf message) throws IOException {
+ private void writeImpl(final ByteBuf message) throws IOException {
message.getBytes(0, stdIn, message.readableBytes());
message.release();
stdIn.flush();
}
- public void stop(ChannelPromise promise) {
+ public void stop(final ChannelPromise promise) {
synchronized (lock) {
stopRequested.set(true);
disconnectPromise = promise;
}
}
- public Thread start(ChannelHandlerContext ctx, ChannelFuture channelFuture) {
+ public Thread start(final ChannelHandlerContext ctx, final ChannelFuture channelFuture) {
checkArgument(channelFuture.isSuccess());
checkNotNull(ctx.channel().remoteAddress());
synchronized (this) {
checkState(this.ctx == null);
this.ctx = ctx;
}
- String threadName = toString();
- Thread thread = new Thread(this, threadName);
+ final String threadName = toString();
+ final Thread thread = new Thread(this, threadName);
thread.start();
return thread;
}
package org.opendaylight.controller.netconf.nettyutil.handler.ssh.client;
import ch.ethz.ssh2.Session;
-import ch.ethz.ssh2.StreamGobbler;
-
import ch.ethz.ssh2.channel.Channel;
import java.io.Closeable;
import java.io.IOException;
class SshSession implements Closeable {
private final Session session;
- public SshSession(Session session) {
+ public SshSession(final Session session) {
this.session = session;
}
-
- public void startSubSystem(String name) throws IOException {
+ public void startSubSystem(final String name) throws IOException {
session.startSubSystem(name);
}
public InputStream getStdout() {
- return new StreamGobbler(session.getStdout());
+ return session.getStdout();
}
+ // FIXME according to http://www.ganymed.ethz.ch/ssh2/FAQ.html#blocking you should read data from both stdout and stderr to prevent window filling up (since stdout and stderr share a window)
+ // FIXME stdErr is not used anywhere
public InputStream getStderr() {
return session.getStderr();
}
/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.netconf.ssh.authentication;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.opendaylight.controller.sal.authorization.AuthResultEnum;
-import org.opendaylight.controller.usermanager.IUserManager;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.ServiceReference;
-import org.osgi.util.tracker.ServiceTracker;
-import org.osgi.util.tracker.ServiceTrackerCustomizer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class AuthProvider {
- private static final Logger logger = LoggerFactory.getLogger(AuthProvider.class);
-
- private final String pem;
- private IUserManager nullableUserManager;
- public AuthProvider(String pemCertificate, final BundleContext bundleContext) {
- checkNotNull(pemCertificate, "Parameter 'pemCertificate' is null");
- pem = pemCertificate;
-
- ServiceTrackerCustomizer<IUserManager, IUserManager> customizer = new ServiceTrackerCustomizer<IUserManager, IUserManager>() {
- @Override
- public IUserManager addingService(final ServiceReference<IUserManager> reference) {
- logger.trace("Service {} added", reference);
- nullableUserManager = bundleContext.getService(reference);
- return nullableUserManager;
- }
-
- @Override
- public void modifiedService(final ServiceReference<IUserManager> reference, final IUserManager service) {
- logger.trace("Replacing modified service {} in netconf SSH.", reference);
- nullableUserManager = service;
- }
-
- @Override
- public void removedService(final ServiceReference<IUserManager> reference, final IUserManager service) {
- logger.trace("Removing service {} from netconf SSH. " +
- "SSH won't authenticate users until IUserManager service will be started.", reference);
- synchronized (AuthProvider.this) {
- nullableUserManager = null;
- }
- }
- };
- ServiceTracker<IUserManager, IUserManager> listenerTracker = new ServiceTracker<>(bundleContext, IUserManager.class, customizer);
- listenerTracker.open();
- }
+package org.opendaylight.controller.netconf.ssh.authentication;
- /**
- * Authenticate user. This implementation tracks IUserManager and delegates the decision to it. If the service is not
- * available, IllegalStateException is thrown.
- */
- public synchronized boolean authenticated(String username, String password) {
- if (nullableUserManager == null) {
- logger.warn("Cannot authenticate user '{}', user manager service is missing", username);
- throw new IllegalStateException("User manager service is not available");
- }
- AuthResultEnum authResult = nullableUserManager.authenticate(username, password);
- logger.debug("Authentication result for user '{}' : {}", username, authResult);
- return authResult.equals(AuthResultEnum.AUTH_ACCEPT) || authResult.equals(AuthResultEnum.AUTH_ACCEPT_LOC);
- }
+public interface AuthProvider {
- public char[] getPEMAsCharArray() {
- return pem.toCharArray();
- }
+ boolean authenticated(String username, String password);
- @VisibleForTesting
- void setNullableUserManager(IUserManager nullableUserManager) {
- this.nullableUserManager = nullableUserManager;
- }
+ char[] getPEMAsCharArray();
}
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.netconf.ssh.authentication;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.opendaylight.controller.sal.authorization.AuthResultEnum;
+import org.opendaylight.controller.usermanager.IUserManager;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
+import org.osgi.util.tracker.ServiceTracker;
+import org.osgi.util.tracker.ServiceTrackerCustomizer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class AuthProviderImpl implements AuthProvider {
+ private static final Logger logger = LoggerFactory.getLogger(AuthProviderImpl.class);
+
+ private final String pem;
+ private IUserManager nullableUserManager;
+
+ public AuthProviderImpl(String pemCertificate, final BundleContext bundleContext) {
+ checkNotNull(pemCertificate, "Parameter 'pemCertificate' is null");
+ pem = pemCertificate;
+
+ ServiceTrackerCustomizer<IUserManager, IUserManager> customizer = new ServiceTrackerCustomizer<IUserManager, IUserManager>() {
+ @Override
+ public IUserManager addingService(final ServiceReference<IUserManager> reference) {
+ logger.trace("Service {} added", reference);
+ nullableUserManager = bundleContext.getService(reference);
+ return nullableUserManager;
+ }
+
+ @Override
+ public void modifiedService(final ServiceReference<IUserManager> reference, final IUserManager service) {
+ logger.trace("Replacing modified service {} in netconf SSH.", reference);
+ nullableUserManager = service;
+ }
+
+ @Override
+ public void removedService(final ServiceReference<IUserManager> reference, final IUserManager service) {
+ logger.trace("Removing service {} from netconf SSH. " +
+ "SSH won't authenticate users until IUserManager service will be started.", reference);
+ synchronized (AuthProviderImpl.this) {
+ nullableUserManager = null;
+ }
+ }
+ };
+ ServiceTracker<IUserManager, IUserManager> listenerTracker = new ServiceTracker<>(bundleContext, IUserManager.class, customizer);
+ listenerTracker.open();
+ }
+
+ /**
+ * Authenticate user. This implementation tracks IUserManager and delegates the decision to it. If the service is not
+ * available, IllegalStateException is thrown.
+ */
+ @Override
+ public synchronized boolean authenticated(String username, String password) {
+ if (nullableUserManager == null) {
+ logger.warn("Cannot authenticate user '{}', user manager service is missing", username);
+ throw new IllegalStateException("User manager service is not available");
+ }
+ AuthResultEnum authResult = nullableUserManager.authenticate(username, password);
+ logger.debug("Authentication result for user '{}' : {}", username, authResult);
+ return authResult.equals(AuthResultEnum.AUTH_ACCEPT) || authResult.equals(AuthResultEnum.AUTH_ACCEPT_LOC);
+ }
+
+ @Override
+ public char[] getPEMAsCharArray() {
+ return pem.toCharArray();
+ }
+
+ @VisibleForTesting
+ void setNullableUserManager(IUserManager nullableUserManager) {
+ this.nullableUserManager = nullableUserManager;
+ }
+}
import org.apache.commons.lang3.StringUtils;
import org.opendaylight.controller.netconf.ssh.NetconfSSHServer;
import org.opendaylight.controller.netconf.ssh.authentication.AuthProvider;
+import org.opendaylight.controller.netconf.ssh.authentication.AuthProviderImpl;
import org.opendaylight.controller.netconf.ssh.authentication.PEMGenerator;
import org.opendaylight.controller.netconf.util.osgi.NetconfConfigUtil;
import org.opendaylight.controller.netconf.util.osgi.NetconfConfigUtil.InfixProp;
checkState(StringUtils.isNotBlank(path), "Path to ssh private key is blank. Reconfigure %s", NetconfConfigUtil.getPrivateKeyKey());
String privateKeyPEMString = PEMGenerator.readOrGeneratePK(new File(path));
- final AuthProvider authProvider = new AuthProvider(privateKeyPEMString, bundleContext);
+ final AuthProvider authProvider = new AuthProviderImpl(privateKeyPEMString, bundleContext);
EventLoopGroup bossGroup = new NioEventLoopGroup();
NetconfSSHServer server = NetconfSSHServer.start(sshSocketAddress.getPort(), localAddress, authProvider, bossGroup);
ChannelFuture clientChannelFuture = initializeNettyConnection(localAddress, bossGroup, sshClientHandler);
// get channel
final Channel channel = clientChannelFuture.awaitUninterruptibly().channel();
+
+ // write additional header before polling thread is started
+ // polling thread could process and forward data before additional header is written
+ // This will result into unexpected state: hello message without additional header and the next message with additional header
+ channel.writeAndFlush(Unpooled.copiedBuffer(additionalHeader.getBytes()));
+
new ClientInputStreamPoolingThread(session, ss.getStdout(), channel, new AutoCloseable() {
@Override
public void close() throws Exception {
}
}
}, sshClientHandler.getChannelHandlerContext()).start();
-
- // write additional header
- channel.writeAndFlush(Unpooled.copiedBuffer(additionalHeader.getBytes()));
} else {
logger.debug("{} Wrong subsystem requested:'{}', closing ssh session", serverSession, subsystem);
String reason = "Only netconf subsystem is supported, requested:" + subsystem;
import org.opendaylight.controller.netconf.nettyutil.handler.ssh.client.SshHandler;
import org.opendaylight.controller.netconf.ssh.NetconfSSHServer;
import org.opendaylight.controller.netconf.ssh.authentication.AuthProvider;
+import org.opendaylight.controller.netconf.ssh.authentication.AuthProviderImpl;
import org.opendaylight.controller.netconf.ssh.authentication.PEMGenerator;
import org.opendaylight.controller.netconf.util.osgi.NetconfConfigUtil;
import org.slf4j.Logger;
@Test
public void test() throws Exception {
new Thread(new EchoServer(), "EchoServer").start();
- AuthProvider authProvider = mock(AuthProvider.class);
+ AuthProvider authProvider = mock(AuthProviderImpl.class);
doReturn(PEMGenerator.generate().toCharArray()).when(authProvider).getPEMAsCharArray();
doReturn(true).when(authProvider).authenticated(anyString(), anyString());
NetconfSSHServer netconfSSHServer = NetconfSSHServer.start(10831, NetconfConfigUtil.getNetconfLocalAddress(),
try (InputStream is = getClass().getResourceAsStream("/RSA.pk")) {
pem = IOUtils.toString(is);
}
- AuthProvider ap = new AuthProvider(pem, mockedContext);
+ AuthProviderImpl ap = new AuthProviderImpl(pem, mockedContext);
ap.setNullableUserManager(um);
EventLoopGroup bossGroup = new NioEventLoopGroup();
NetconfSSHServer server = NetconfSSHServer.start(PORT, NetconfConfigUtil.getNetconfLocalAddress(),
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ ~
+ ~ This program and the accompanying materials are made available under the
+ ~ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ ~ and is available at http://www.eclipse.org/legal/epl-v10.html
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-subsystem</artifactId>
+ <version>0.2.5-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>netconf-testtool</artifactId>
+ <name>${project.artifactId}</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>net.sourceforge.argparse4j</groupId>
+ <artifactId>argparse4j</artifactId>
+ <version>0.4.3</version>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-netty-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.logback_settings</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>config-netconf-connector</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-connector-config</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>logback-config</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>mockito-configuration</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>xmlunit</groupId>
+ <artifactId>xmlunit</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>config-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-client</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-impl</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-mapping-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-monitoring</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-ssh</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netty-config-api</artifactId>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <configuration></configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <phase>package</phase>
+ <configuration>
+ <!-- TODO investigate why jar fails without this filter-->
+ <filters>
+ <filter>
+ <artifact>*:*</artifact>
+ <excludes>
+ <exclude>META-INF/*.SF</exclude>
+ <exclude>META-INF/*.DSA</exclude>
+ <exclude>META-INF/*.RSA</exclude>
+ </excludes>
+ </filter>
+ </filters>
+ <transformers>
+ <transformer
+ implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+ <mainClass>org.opendaylight.controller.netconf.test.tool.Main</mainClass>
+ </transformer>
+ </transformers>
+ <shadedArtifactAttached>true</shadedArtifactAttached>
+ <shadedClassifierName>executable</shadedClassifierName>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import java.io.File;
+import java.io.IOException;
+import org.opendaylight.controller.netconf.ssh.authentication.AuthProvider;
+import org.opendaylight.controller.netconf.ssh.authentication.PEMGenerator;
+
+class AcceptingAuthProvider implements AuthProvider {
+ private final String privateKeyPEMString;
+
+ public AcceptingAuthProvider() {
+ try {
+ this.privateKeyPEMString = PEMGenerator.readOrGeneratePK(new File("PK"));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public synchronized boolean authenticated(final String username, final String password) {
+ return true;
+ }
+
+ @Override
+ public char[] getPEMAsCharArray() {
+ return privateKeyPEMString.toCharArray();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.io.Files;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.List;
+
+import java.util.concurrent.TimeUnit;
+import net.sourceforge.argparse4j.ArgumentParsers;
+import net.sourceforge.argparse4j.annotation.Arg;
+import net.sourceforge.argparse4j.inf.ArgumentParser;
+import net.sourceforge.argparse4j.inf.ArgumentParserException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.CharStreams;
+
+public final class Main {
+
+ // TODO add logback config
+
+ // TODO make exi configurable
+
+ private static final Logger LOG = LoggerFactory.getLogger(Main.class);
+
+ static class Params {
+
+ @Arg(dest = "schemas-dir")
+ public File schemasDir;
+
+ @Arg(dest = "devices-count")
+ public int deviceCount;
+
+ @Arg(dest = "starting-port")
+ public int startingPort;
+
+ @Arg(dest = "generate-config-connection-timeout")
+ public int generateConfigsTimeout;
+
+ @Arg(dest = "generate-configs-dir")
+ public File generateConfigsDir;
+
+ @Arg(dest = "generate-configs-batch-size")
+ public int generateConfigBatchSize;
+
+ @Arg(dest = "ssh")
+ public boolean ssh;
+
+ @Arg(dest = "exi")
+ public boolean exi;
+
+ static ArgumentParser getParser() {
+ final ArgumentParser parser = ArgumentParsers.newArgumentParser("netconf testool");
+ parser.addArgument("--devices-count")
+ .type(Integer.class)
+ .setDefault(1)
+ .type(Integer.class)
+ .help("Number of simulated netconf devices to spin")
+ .dest("devices-count");
+
+ parser.addArgument("--schemas-dir")
+ .type(File.class)
+ .required(true)
+ .help("Directory containing yang schemas to describe simulated devices")
+ .dest("schemas-dir");
+
+ parser.addArgument("--starting-port")
+ .type(Integer.class)
+ .setDefault(17830)
+ .help("First port for simulated device. Each other device will have previous+1 port number")
+ .dest("starting-port");
+
+ parser.addArgument("--generate-config-connection-timeout")
+ .type(Integer.class)
+ .setDefault((int)TimeUnit.MINUTES.toMillis(5))
+ .help("Timeout to be generated in initial config files")
+ .dest("generate-config-connection-timeout");
+
+ parser.addArgument("--generate-configs-batch-size")
+ .type(Integer.class)
+ .setDefault(100)
+ .help("Number of connector configs per generated file")
+ .dest("generate-configs-batch-size");
+
+ parser.addArgument("--generate-configs-dir")
+ .type(File.class)
+ .help("Directory where initial config files for ODL distribution should be generated")
+ .dest("generate-configs-dir");
+
+ parser.addArgument("--ssh")
+ .type(Boolean.class)
+ .setDefault(true)
+ .help("Whether to use ssh for transport or just pure tcp")
+ .dest("ssh");
+
+ parser.addArgument("--exi")
+ .type(Boolean.class)
+ .setDefault(false)
+ .help("Whether to use exi to transport xml content")
+ .dest("exi");
+
+ return parser;
+ }
+
+ void validate() {
+ checkArgument(deviceCount > 0, "Device count has to be > 0");
+ checkArgument(startingPort > 1024, "Starting port has to be > 1024");
+
+ checkArgument(schemasDir.exists(), "Schemas dir has to exist");
+ checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
+ checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ }
+ }
+
+ public static void main(final String[] args) {
+ ch.ethz.ssh2.log.Logger.enabled = true;
+
+ final Params params = parseArgs(args, Params.getParser());
+ params.validate();
+
+ final NetconfDeviceSimulator netconfDeviceSimulator = new NetconfDeviceSimulator();
+ try {
+ final List<Integer> openDevices = netconfDeviceSimulator.start(params);
+ if(params.generateConfigsDir != null) {
+ new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout);
+ }
+ } catch (final Exception e) {
+ LOG.error("Unhandled exception", e);
+ netconfDeviceSimulator.close();
+ System.exit(1);
+ }
+
+ // Block main thread
+ synchronized (netconfDeviceSimulator) {
+ try {
+ netconfDeviceSimulator.wait();
+ } catch (final InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+
+ private static Params parseArgs(final String[] args, final ArgumentParser parser) {
+ final Params opt = new Params();
+ try {
+ parser.parseArgs(args, opt);
+ return opt;
+ } catch (final ArgumentParserException e) {
+ parser.handleError(e);
+ }
+
+ System.exit(1);
+ return null;
+ }
+
+ private static class ConfigGenerator {
+ public static final String NETCONF_CONNECTOR_XML = "/initial/99-netconf-connector.xml";
+ public static final String NETCONF_CONNECTOR_NAME = "controller-config";
+ public static final String NETCONF_CONNECTOR_PORT = "1830";
+ public static final String NETCONF_USE_SSH = "false";
+ public static final String SIM_DEVICE_SUFFIX = "-sim-device";
+
+ private final File directory;
+ private final List<Integer> openDevices;
+
+ public ConfigGenerator(final File directory, final List<Integer> openDevices) {
+ this.directory = directory;
+ this.openDevices = openDevices;
+ }
+
+ public void generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout) {
+ if(directory.exists() == false) {
+ checkState(directory.mkdirs(), "Unable to create folder %s" + directory);
+ }
+
+ try(InputStream stream = Main.class.getResourceAsStream(NETCONF_CONNECTOR_XML)) {
+ checkNotNull(stream, "Cannot load %s", NETCONF_CONNECTOR_XML);
+ String configBlueprint = CharStreams.toString(new InputStreamReader(stream, Charsets.UTF_8));
+
+ // TODO make address configurable
+ checkState(configBlueprint.contains(NETCONF_CONNECTOR_NAME));
+ checkState(configBlueprint.contains(NETCONF_CONNECTOR_PORT));
+ checkState(configBlueprint.contains(NETCONF_USE_SSH));
+ configBlueprint = configBlueprint.replace(NETCONF_CONNECTOR_NAME, "%s");
+ configBlueprint = configBlueprint.replace(NETCONF_CONNECTOR_PORT, "%s");
+ configBlueprint = configBlueprint.replace(NETCONF_USE_SSH, "%s");
+
+ final String before = configBlueprint.substring(0, configBlueprint.indexOf("<module>"));
+ final String middleBlueprint = configBlueprint.substring(configBlueprint.indexOf("<module>"), configBlueprint.indexOf("</module>"));
+ final String after = configBlueprint.substring(configBlueprint.indexOf("</module>") + "</module>".length());
+
+ int connectorCount = 0;
+ Integer batchStart = null;
+ StringBuilder b = new StringBuilder();
+ b.append(before);
+
+ for (final Integer openDevice : openDevices) {
+ if(batchStart == null) {
+ batchStart = openDevice;
+ }
+
+ final String name = String.valueOf(openDevice) + SIM_DEVICE_SUFFIX;
+ String configContent = String.format(middleBlueprint, name, String.valueOf(openDevice), String.valueOf(!useSsh));
+ configContent = String.format("%s%s%d%s\n%s\n", configContent, "<connection-timeout-millis>", generateConfigsTimeout, "</connection-timeout-millis>", "</module>");
+
+ b.append(configContent);
+ connectorCount++;
+ if(connectorCount == batchSize) {
+ b.append(after);
+ Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevice)), Charsets.UTF_8);
+ connectorCount = 0;
+ b = new StringBuilder();
+ b.append(before);
+ batchStart = null;
+ }
+ }
+
+ // Write remaining
+ if(connectorCount != 0) {
+ b.append(after);
+ Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1))), Charsets.UTF_8);
+ }
+
+ LOG.info("Config files generated in {}", directory);
+ } catch (final IOException e) {
+ throw new RuntimeException("Unable to generate config files", e);
+ }
+ }
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Optional;
+import java.util.Date;
+import java.util.List;
+import org.opendaylight.controller.netconf.confignetconfconnector.util.Util;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
+
+final class ModuleBuilderCapability implements Capability {
+ private static final Date NO_REVISION = new Date(0);
+ private final ModuleBuilder input;
+ private final Optional<String> content;
+
+ public ModuleBuilderCapability(final ModuleBuilder input, final String inputStream) {
+ this.input = input;
+ this.content = Optional.of(inputStream);
+ }
+
+ @Override
+ public String getCapabilityUri() {
+ // FIXME capabilities in Netconf-impl need to check for NO REVISION
+ final String withoutRevision = getModuleNamespace().get() + "?module=" + getModuleName().get();
+ return hasRevision() ? withoutRevision + "&revision=" + Util.writeDate(input.getRevision()) : withoutRevision;
+ }
+
+ @Override
+ public Optional<String> getModuleNamespace() {
+ return Optional.of(input.getNamespace().toString());
+ }
+
+ @Override
+ public Optional<String> getModuleName() {
+ return Optional.of(input.getName());
+ }
+
+ @Override
+ public Optional<String> getRevision() {
+ return Optional.of(hasRevision() ? QName.formattedRevision(input.getRevision()) : "");
+ }
+
+ private boolean hasRevision() {
+ return !input.getRevision().equals(NO_REVISION);
+ }
+
+ @Override
+ public Optional<String> getCapabilitySchema() {
+ return content;
+ }
+
+ @Override
+ public Optional<List<String>> getLocation() {
+ return Optional.absent();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Function;
+import com.google.common.base.Optional;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.io.CharStreams;
+import com.google.common.util.concurrent.CheckedFuture;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.local.LocalAddress;
+import io.netty.channel.nio.NioEventLoopGroup;
+import io.netty.util.HashedWheelTimer;
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.lang.management.ManagementFactory;
+import java.net.Inet4Address;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.UnknownHostException;
+import java.util.AbstractMap;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.ExecutionException;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.tree.ParseTreeWalker;
+import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
+import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
+import org.opendaylight.controller.netconf.impl.NetconfServerDispatcher;
+import org.opendaylight.controller.netconf.impl.NetconfServerSessionNegotiatorFactory;
+import org.opendaylight.controller.netconf.impl.SessionIdProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfMonitoringServiceImpl;
+import org.opendaylight.controller.netconf.impl.osgi.SessionMonitoringService;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperation;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
+import org.opendaylight.controller.netconf.monitoring.osgi.NetconfMonitoringOperationService;
+import org.opendaylight.controller.netconf.ssh.NetconfSSHServer;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceException;
+import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceRepresentation;
+import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier;
+import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource;
+import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource;
+import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceListener;
+import org.opendaylight.yangtools.yang.model.repo.util.FilesystemSchemaSourceCache;
+import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils;
+import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
+import org.opendaylight.yangtools.yang.parser.impl.YangParserListenerImpl;
+import org.opendaylight.yangtools.yang.parser.repo.SharedSchemaRepository;
+import org.opendaylight.yangtools.yang.parser.util.ASTSchemaSource;
+import org.opendaylight.yangtools.yang.parser.util.TextToASTTransformer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class NetconfDeviceSimulator implements Closeable {
+
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfDeviceSimulator.class);
+
+ public static final int CONNECTION_TIMEOUT_MILLIS = 20000;
+
+ private final NioEventLoopGroup nettyThreadgroup;
+ private final HashedWheelTimer hashedWheelTimer;
+ private final List<Channel> devicesChannels = Lists.newArrayList();
+
+ public NetconfDeviceSimulator() {
+ this(new NioEventLoopGroup(), new HashedWheelTimer());
+ }
+
+ public NetconfDeviceSimulator(final NioEventLoopGroup eventExecutors, final HashedWheelTimer hashedWheelTimer) {
+ this.nettyThreadgroup = eventExecutors;
+ this.hashedWheelTimer = hashedWheelTimer;
+ }
+
+ private NetconfServerDispatcher createDispatcher(final Map<ModuleBuilder, String> moduleBuilders, final boolean exi) {
+
+ final Set<Capability> capabilities = Sets.newHashSet(Collections2.transform(moduleBuilders.keySet(), new Function<ModuleBuilder, Capability>() {
+ @Override
+ public Capability apply(final ModuleBuilder input) {
+ return new ModuleBuilderCapability(input, moduleBuilders.get(input));
+ }
+ }));
+
+ final SessionIdProvider idProvider = new SessionIdProvider();
+
+ final SimulatedOperationProvider simulatedOperationProvider = new SimulatedOperationProvider(idProvider, capabilities);
+ final NetconfMonitoringOperationService monitoringService = new NetconfMonitoringOperationService(new NetconfMonitoringServiceImpl(simulatedOperationProvider));
+ simulatedOperationProvider.addService(monitoringService);
+
+ final DefaultCommitNotificationProducer commitNotifier = new DefaultCommitNotificationProducer(ManagementFactory.getPlatformMBeanServer());
+
+ final Set<String> serverCapabilities = exi
+ ? NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES
+ : Sets.newHashSet(XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_0, XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_1);
+
+ final NetconfServerSessionNegotiatorFactory serverNegotiatorFactory = new NetconfServerSessionNegotiatorFactory(
+ hashedWheelTimer, simulatedOperationProvider, idProvider, CONNECTION_TIMEOUT_MILLIS, commitNotifier, new LoggingMonitoringService(), serverCapabilities);
+
+ final NetconfServerDispatcher.ServerChannelInitializer serverChannelInitializer = new NetconfServerDispatcher.ServerChannelInitializer(
+ serverNegotiatorFactory);
+ return new NetconfServerDispatcher(serverChannelInitializer, nettyThreadgroup, nettyThreadgroup);
+ }
+
+ private Map<ModuleBuilder, String> toModuleBuilders(final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> sources) {
+ final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
+ @Override
+ public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
+ return input.getKey().getAST();
+ }
+ });
+ final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
+ asts.values(), Optional.<SchemaContext>absent());
+
+ final ParseTreeWalker walker = new ParseTreeWalker();
+ final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
+
+ for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
+ final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
+ walker, entry.getValue()).getModuleBuilder();
+
+ try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
+ sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ return sourceToBuilder;
+ }
+
+
+ public List<Integer> start(final Main.Params params) {
+ final Map<ModuleBuilder, String> moduleBuilders = parseSchemasToModuleBuilders(params);
+
+ final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders, params.exi);
+
+ int currentPort = params.startingPort;
+
+ final List<Integer> openDevices = Lists.newArrayList();
+ for (int i = 0; i < params.deviceCount; i++) {
+ final InetSocketAddress address = getAddress(currentPort);
+
+ final ChannelFuture server;
+ if(params.ssh) {
+ final LocalAddress tcpLocalAddress = new LocalAddress(address.toString());
+
+ server = dispatcher.createLocalServer(tcpLocalAddress);
+ try {
+ NetconfSSHServer.start(currentPort, tcpLocalAddress, new AcceptingAuthProvider(), nettyThreadgroup);
+ } catch (final Exception e) {
+ LOG.warn("Cannot start simulated device on {}, skipping", address, e);
+ // Close local server and continue
+ server.cancel(true);
+ if(server.isDone()) {
+ server.channel().close();
+ }
+ continue;
+ } finally {
+ currentPort++;
+ }
+
+ try {
+ server.get();
+ } catch (final InterruptedException e) {
+ throw new RuntimeException(e);
+ } catch (final ExecutionException e) {
+ LOG.warn("Cannot start ssh simulated device on {}, skipping", address, e);
+ continue;
+ }
+
+ LOG.debug("Simulated SSH device started on {}", address);
+
+ } else {
+ server = dispatcher.createServer(address);
+ currentPort++;
+
+ try {
+ server.get();
+ } catch (final InterruptedException e) {
+ throw new RuntimeException(e);
+ } catch (final ExecutionException e) {
+ LOG.warn("Cannot start tcp simulated device on {}, skipping", address, e);
+ continue;
+ }
+
+ LOG.debug("Simulated TCP device started on {}", address);
+ }
+
+ devicesChannels.add(server.channel());
+ openDevices.add(currentPort - 1);
+
+ }
+
+ if(openDevices.size() == params.deviceCount) {
+ LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort);
+ } else {
+ LOG.warn("Not all simulated devices started successfully. Started devices ar on ports {}", openDevices);
+ }
+
+ return openDevices;
+ }
+
+ private Map<ModuleBuilder, String> parseSchemasToModuleBuilders(final Main.Params params) {
+ final SharedSchemaRepository consumer = new SharedSchemaRepository("netconf-simulator");
+ consumer.registerSchemaSourceListener(TextToASTTransformer.create(consumer, consumer));
+
+ final Set<SourceIdentifier> loadedSources = Sets.newHashSet();
+
+ consumer.registerSchemaSourceListener(new SchemaSourceListener() {
+ @Override
+ public void schemaSourceEncountered(final SchemaSourceRepresentation schemaSourceRepresentation) {}
+
+ @Override
+ public void schemaSourceRegistered(final Iterable<PotentialSchemaSource<?>> potentialSchemaSources) {
+ for (final PotentialSchemaSource<?> potentialSchemaSource : potentialSchemaSources) {
+ loadedSources.add(potentialSchemaSource.getSourceIdentifier());
+ }
+ }
+
+ @Override
+ public void schemaSourceUnregistered(final PotentialSchemaSource<?> potentialSchemaSource) {}
+ });
+
+ final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
+ consumer.registerSchemaSourceListener(cache);
+
+ final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> asts = Maps.newHashMap();
+ for (final SourceIdentifier loadedSource : loadedSources) {
+ try {
+ final CheckedFuture<ASTSchemaSource, SchemaSourceException> ast = consumer.getSchemaSource(loadedSource, ASTSchemaSource.class);
+ final CheckedFuture<YangTextSchemaSource, SchemaSourceException> text = consumer.getSchemaSource(loadedSource, YangTextSchemaSource.class);
+ asts.put(loadedSource, new AbstractMap.SimpleEntry<>(ast.get(), text.get()));
+ } catch (final InterruptedException e) {
+ throw new RuntimeException(e);
+ } catch (final ExecutionException e) {
+ throw new RuntimeException("Cannot parse schema context", e);
+ }
+ }
+ return toModuleBuilders(asts);
+ }
+
+ private static InetSocketAddress getAddress(final int port) {
+ try {
+ // TODO make address configurable
+ return new InetSocketAddress(Inet4Address.getByName("0.0.0.0"), port);
+ } catch (final UnknownHostException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void close() {
+ for (final Channel deviceCh : devicesChannels) {
+ deviceCh.close();
+ }
+ nettyThreadgroup.shutdownGracefully();
+ // close Everything
+ }
+
+ private static class SimulatedOperationProvider implements NetconfOperationProvider {
+ private final SessionIdProvider idProvider;
+ private final Set<NetconfOperationService> netconfOperationServices;
+
+
+ public SimulatedOperationProvider(final SessionIdProvider idProvider, final Set<Capability> caps) {
+ this.idProvider = idProvider;
+ final SimulatedOperationService simulatedOperationService = new SimulatedOperationService(caps, idProvider.getCurrentSessionId());
+ this.netconfOperationServices = Sets.<NetconfOperationService>newHashSet(simulatedOperationService);
+ }
+
+ @Override
+ public NetconfOperationServiceSnapshot openSnapshot(final String sessionIdForReporting) {
+ return new SimulatedServiceSnapshot(idProvider, netconfOperationServices);
+ }
+
+ public void addService(final NetconfOperationService monitoringService) {
+ netconfOperationServices.add(monitoringService);
+ }
+
+ private static class SimulatedServiceSnapshot implements NetconfOperationServiceSnapshot {
+ private final SessionIdProvider idProvider;
+ private final Set<NetconfOperationService> netconfOperationServices;
+
+ public SimulatedServiceSnapshot(final SessionIdProvider idProvider, final Set<NetconfOperationService> netconfOperationServices) {
+ this.idProvider = idProvider;
+ this.netconfOperationServices = netconfOperationServices;
+ }
+
+ @Override
+ public String getNetconfSessionIdForReporting() {
+ return String.valueOf(idProvider.getCurrentSessionId());
+ }
+
+ @Override
+ public Set<NetconfOperationService> getServices() {
+ return netconfOperationServices;
+ }
+
+ @Override
+ public void close() throws Exception {}
+ }
+
+ static class SimulatedOperationService implements NetconfOperationService {
+ private final Set<Capability> capabilities;
+ private static SimulatedGet sGet;
+
+ public SimulatedOperationService(final Set<Capability> capabilities, final long currentSessionId) {
+ this.capabilities = capabilities;
+ sGet = new SimulatedGet(String.valueOf(currentSessionId));
+ }
+
+ @Override
+ public Set<Capability> getCapabilities() {
+ return capabilities;
+ }
+
+ @Override
+ public Set<NetconfOperation> getNetconfOperations() {
+ return Sets.<NetconfOperation>newHashSet(sGet);
+ }
+
+ @Override
+ public void close() {
+ }
+
+ }
+ }
+
+ private class LoggingMonitoringService implements SessionMonitoringService {
+ @Override
+ public void onSessionUp(final NetconfManagementSession session) {
+ LOG.debug("Session {} established", session);
+ }
+
+ @Override
+ public void onSessionDown(final NetconfManagementSession session) {
+ LOG.debug("Session {} down", session);
+ }
+ }
+
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.test.tool;
+
+import com.google.common.base.Optional;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
+import org.opendaylight.controller.netconf.confignetconfconnector.operations.AbstractConfigNetconfOperation;
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+class SimulatedGet extends AbstractConfigNetconfOperation {
+
+ SimulatedGet(final String netconfSessionIdForReporting) {
+ super(null, netconfSessionIdForReporting);
+ }
+
+ @Override
+ protected Element handleWithNoSubsequentOperations(final Document document, final XmlElement operationElement) throws NetconfDocumentedException {
+ return XmlUtil.createElement(document, XmlNetconfConstants.DATA_KEY, Optional.<String>absent());
+ }
+
+ @Override
+ protected String getOperationName() {
+ return XmlNetconfConstants.GET;
+ }
+}
final NetconfDocumentedException sendErrorException) {
try {
final Element incommingRpc = incommingDocument.getDocumentElement();
- Preconditions.checkState(incommingRpc.getTagName().equals(XmlNetconfConstants.RPC_KEY), "Missing "
- + XmlNetconfConstants.RPC_KEY + " " + "element");
+ Preconditions.checkState(incommingRpc.getTagName().equals(XmlNetconfConstants.RPC_KEY), "Missing %s element",
+ XmlNetconfConstants.RPC_KEY);
final Element rpcReply = errorDocument.getDocumentElement();
- Preconditions.checkState(rpcReply.getTagName().equals(XmlNetconfConstants.RPC_REPLY_KEY), "Missing "
- + XmlNetconfConstants.RPC_REPLY_KEY + " element");
+ Preconditions.checkState(rpcReply.getTagName().equals(XmlNetconfConstants.RPC_REPLY_KEY), "Missing %s element",
+ XmlNetconfConstants.RPC_REPLY_KEY);
final NamedNodeMap incomingAttributes = incommingRpc.getAttributes();
for (int i = 0; i < incomingAttributes.getLength(); i++) {
@Override
public void operationComplete(final ChannelFuture channelFuture) throws Exception {
- Preconditions.checkState(channelFuture.isSuccess(), "Unable to send exception {}", sendErrorException,
+ Preconditions.checkState(channelFuture.isSuccess(), "Unable to send exception %s", sendErrorException,
channelFuture.cause());
}
}
<module>netconf-it</module>
</modules>
</profile>
+
+ <profile>
+ <id>testtool</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <modules>
+ <module>netconf-testtool</module>
+ </modules>
+ </profile>
</profiles>
</project>
<!-- Karaf Distribution -->
<module>opendaylight/dummy-console</module>
<module>opendaylight/karaf-branding</module>
+ <module>opendaylight/distribution/opendaylight-karaf-empty</module>
<module>opendaylight/distribution/opendaylight-karaf</module>
+ <module>opendaylight/distribution/opendaylight-karaf-resources</module>
<module>features</module>
</modules>
<scm>