.DS_STORE
.metadata
opendaylight/md-sal/sal-distributed-datastore/journal
+!opendaylight/distribution/opendaylight-karaf-resources/src/main/resources/bin
+
</parent>
<artifactId>features-config-netty</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-config-persister</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-netty-config</artifactId>
+ <!--
+ note, the reason the type and classifier
+ are here instead of in opendaylight/commons/opendaylight/pom.xml
+ is because they are used as jars in distribution.
+ -->
+ <version>${config.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
+ </dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
</dependency>
</dependencies>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-config-persister</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<version>${yangtools.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-netconf</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<groupId>org.eclipse.persistence</groupId>
<artifactId>org.eclipse.persistence.moxy</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-config</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
</properties>
<dependencies>
+ <!-- dependency for opendaylight-karaf-empty for use by testing -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>opendaylight-karaf-empty</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <type>zip</type>
+ </dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
<version>${yangtools.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<groupId>org.opendaylight.controller</groupId>
<artifactId>config-manager</artifactId>
</dependency>
+ <!-- test the features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
<repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
<feature name='odl-config-all' version='${project.version}'>
- <feature version='${project.version}'>odl-mdsal-common</feature>
+ <feature version='${mdsal.version}'>odl-mdsal-common</feature>
<feature version='${project.version}'>odl-config-api</feature>
<feature version='${project.version}'>odl-config-netty-config-api</feature>
<feature version='${project.version}'>odl-config-core</feature>
</parent>
<artifactId>features-flow</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<version>${mdsal.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.model</groupId>
<groupId>org.opendaylight.controller.md</groupId>
<artifactId>forwardingrules-manager</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-mdsal</artifactId>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-yangtools</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config-persister</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-config-netty</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>md-sal-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-connector-config</artifactId>
+ <version>${netconf.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-rest-connector-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller.samples</groupId>
<dependency>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>toaster-config</artifactId>
+ <version>${mdsal.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
+ </dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ <version>0.6.2-SNAPSHOT</version>
</dependency>
</dependencies>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
</parent>
<artifactId>features-protocol-framework</artifactId>
<version>${protocol-framework.version}</version>
- <packaging>pom</packaging>
+ <packaging>jar</packaging>
<properties>
<features.file>features.xml</features.file>
<artifactId>features-config</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>protocol-framework</artifactId>
</dependency>
+ <!-- test to validate features.xml -->
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ </dependency>
</dependencies>
<build>
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <karaf.distro.groupId>org.opendaylight.controller</karaf.distro.groupId>
+ <karaf.distro.artifactId>opendaylight-karaf-empty</karaf.distro.artifactId>
+ <karaf.distro.version>${commons.opendaylight.version}</karaf.distro.version>
+ </systemPropertyVariables>
+ <dependenciesToScan>
+ <dependency>org.opendaylight.yangtools:features-test</dependency>
+ </dependenciesToScan>
+ </configuration>
+ </plugin>
</plugins>
</build>
<scm>
<artifactId>toaster-config</artifactId>
<version>${mdsal.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>features-test</artifactId>
+ <version>${yangtools.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>features-yangtools</artifactId>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>opendaylight-karaf-empty</artifactId>
+ <packaging>pom</packaging>
+ <prerequisites>
+ <maven>3.0</maven>
+ </prerequisites>
+
+ <dependencies>
+ <dependency>
+ <!-- scope is compile so all features (there is only one) are installed
+ into startup.properties and the feature repo itself is not installed -->
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>framework</artifactId>
+ <version>${karaf.version}</version>
+ <type>kar</type>
+ </dependency>
+ <!-- scope is runtime so the feature repo is listed in the features
+ service config file, and features may be installed using the
+ karaf-maven-plugin configuration -->
+ <dependency>
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>standard</artifactId>
+ <version>${karaf.version}</version>
+ <classifier>features</classifier>
+ <type>xml</type>
+ <scope>runtime</scope>
+ </dependency>
+
+ <!-- ODL Branding -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Resources needed -->
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>opendaylight-karaf-resources</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>cleanVersions</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>copy</goal>
+ <goal>unpack</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.karaf.tooling</groupId>
+ <artifactId>karaf-maven-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>commands-generate-help</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.fusesource.scalate</groupId>
+ <artifactId>maven-scalate-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>sitegen</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.servicemix.tooling</groupId>
+ <artifactId>depends-maven-plugin</artifactId>
+ <versionRange>[0,)</versionRange>
+ <goals>
+ <goal>generate-depends-file</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore></ignore>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.karaf.tooling</groupId>
+ <artifactId>karaf-maven-plugin</artifactId>
+ <version>${karaf.version}</version>
+ <extensions>true</extensions>
+ <executions>
+ <execution>
+ <id>process-resources</id>
+ <goals>
+ <goal>install-kars</goal>
+ </goals>
+ <phase>process-resources</phase>
+ </execution>
+ <execution>
+ <id>package</id>
+ <goals>
+ <goal>instance-create-archive</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-checkstyle-plugin</artifactId>
+ <version>${checkstyle.version}</version>
+ <configuration>
+ <excludes>**\/target\/,**\/bin\/,**\/target-ide\/,**\/configuration\/initial\/</excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.6</version>
+ <executions>
+ <execution>
+ <id>copy</id>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <!-- here the phase you need -->
+ <phase>generate-resources</phase>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>karaf.branding</artifactId>
+ <version>${karaf.branding.version}</version>
+ <outputDirectory>target/assembly/lib</outputDirectory>
+ <destFileName>karaf.branding-${branding.version}.jar</destFileName>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ <execution>
+ <id>unpack-karaf-resources</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>prepare-package</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+ <groupId>org.opendaylight.controller</groupId>
+ <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+ <excludes>META-INF\/**</excludes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <chmod perm="755">
+ <fileset dir="${project.build.directory}/assembly/bin">
+ <include name="karaf"/>
+ <include name="instance"/>
+ </fileset>
+ </chmod>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <tag>HEAD</tag>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main</url>
+ </scm>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+
+ This program and the accompanying materials are made available under the
+ terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ and is available at http://www.eclipse.org/legal/epl-v10.html
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../../commons/opendaylight</relativePath>
+ </parent>
+ <artifactId>opendaylight-karaf-resources</artifactId>
+ <description>Resources for opendaylight-karaf</description>
+ <packaging>jar</packaging>
+</project>
<version>${karaf.version}</version>
<type>kar</type>
</dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>base-features</artifactId>
- <version>${project.version}</version>
- <type>kar</type>
- </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>karaf.branding</artifactId>
<scope>compile</scope>
</dependency>
- <!-- scope is runtime so the feature repo is listed in the features
- service config file, and features may be installed using the
- karaf-maven-plugin configuration -->
- <dependency>
- <groupId>org.apache.karaf.features</groupId>
- <artifactId>standard</artifactId>
- <version>${karaf.version}</version>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>base-features</artifactId>
- <version>${project.parent.version}</version>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
- </dependency>
- <!-- scope is compile so all features (there is only one) are installed
- into startup.properties and the feature repo itself is not installed -->
+
+ <!-- Resources needed -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>extras-features</artifactId>
+ <artifactId>opendaylight-karaf-resources</artifactId>
<version>${project.version}</version>
- <type>kar</type>
- <scope>runtime</scope>
- </dependency>
- <!-- AD-SAL Related Features -->
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>features-adsal</artifactId>
- <classifier>features</classifier>
- <type>xml</type>
- <scope>runtime</scope>
</dependency>
+
+ <!-- scope is not runtime so the feature repo is pulled into the local
+ repo on build and thus you actually run. Failure to do so can lead
+ to very confusing errors for devs -->
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>features-nsf</artifactId>
- <version>${project.version}</version>
+ <groupId>org.apache.karaf.features</groupId>
+ <artifactId>standard</artifactId>
+ <version>${karaf.version}</version>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
+ <!--
+ controller provided features:
+ Note: Nothing should go here that is not locked
+ down with testing... ie, no broken feature repos
+ -->
+
<!-- MD-SAL Related Features -->
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-mdsal</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>features-flow</artifactId>
<classifier>features</classifier>
<type>xml</type>
- <scope>runtime</scope>
</dependency>
</dependencies>
</artifactItems>
</configuration>
</execution>
+ <execution>
+ <id>unpack-karaf-resources</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>prepare-package</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/assembly</outputDirectory>
+ <groupId>org.opendaylight.controller</groupId>
+ <includeArtifactIds>opendaylight-karaf-resources</includeArtifactIds>
+ <excludes>META-INF\/**</excludes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <chmod perm="755">
+ <fileset dir="${project.build.directory}/assembly/bin">
+ <include name="karaf"/>
+ <include name="instance"/>
+ </fileset>
+ </chmod>
+ </tasks>
+ </configuration>
+ </execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <phase>prepare-package</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <tasks>
- <copy todir="${project.build.directory}/assembly/bin" overwrite="true">
- <fileset dir="${basedir}/src/main/resources/karaf/" includes="karaf,karaf.bat,instance,instance.bat"/>
- </copy>
- </tasks>
- </configuration>
- </execution>
- </executions>
- </plugin>
</plugins>
</build>
<scm>
+++ /dev/null
-################################################################################
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-################################################################################
-
-#
-# Java platform package export properties.
-#
-
-# Standard package set. Note that:
-# - javax.transaction* is exported with a mandatory attribute
-jre-1.6= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-# Standard package set. Note that:
-# - javax.transaction* is exported with a mandatory attribute
-jre-1.7= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
-
-jre-1.8= \
- javax.accessibility, \
- javax.activation;version="1.1", \
- javax.activity, \
- javax.crypto, \
- javax.crypto.interfaces, \
- javax.crypto.spec, \
- javax.imageio, \
- javax.imageio.event, \
- javax.imageio.metadata, \
- javax.imageio.plugins.bmp, \
- javax.imageio.plugins.jpeg, \
- javax.imageio.spi, \
- javax.imageio.stream, \
- javax.jws, \
- javax.jws.soap, \
- javax.lang.model, \
- javax.lang.model.element, \
- javax.lang.model.type, \
- javax.lang.model.util, \
- javax.management, \
- javax.management.loading, \
- javax.management.modelmbean, \
- javax.management.monitor, \
- javax.management.openmbean, \
- javax.management.relation, \
- javax.management.remote, \
- javax.management.remote.rmi, \
- javax.management.timer, \
- javax.naming, \
- javax.naming.directory, \
- javax.naming.event, \
- javax.naming.ldap, \
- javax.naming.spi, \
- javax.net, \
- javax.net.ssl, \
- javax.print, \
- javax.print.attribute, \
- javax.print.attribute.standard, \
- javax.print.event, \
- javax.rmi, \
- javax.rmi.CORBA, \
- javax.rmi.ssl, \
- javax.script, \
- javax.security.auth, \
- javax.security.auth.callback, \
- javax.security.auth.kerberos, \
- javax.security.auth.login, \
- javax.security.auth.spi, \
- javax.security.auth.x500, \
- javax.security.cert, \
- javax.security.sasl, \
- javax.sound.midi, \
- javax.sound.midi.spi, \
- javax.sound.sampled, \
- javax.sound.sampled.spi, \
- javax.sql, \
- javax.sql.rowset, \
- javax.sql.rowset.serial, \
- javax.sql.rowset.spi, \
- javax.swing, \
- javax.swing.border, \
- javax.swing.colorchooser, \
- javax.swing.event, \
- javax.swing.filechooser, \
- javax.swing.plaf, \
- javax.swing.plaf.basic, \
- javax.swing.plaf.metal, \
- javax.swing.plaf.multi, \
- javax.swing.plaf.synth, \
- javax.swing.table, \
- javax.swing.text, \
- javax.swing.text.html, \
- javax.swing.text.html.parser, \
- javax.swing.text.rtf, \
- javax.swing.tree, \
- javax.swing.undo, \
- javax.tools, \
- javax.transaction; javax.transaction.xa; partial=true; mandatory:=partial, \
- javax.xml, \
- javax.xml.bind;version="2.2.1", \
- javax.xml.bind.annotation;version="2.2.1", \
- javax.xml.bind.annotation.adapters;version="2.2.1", \
- javax.xml.bind.attachment;version="2.2.1", \
- javax.xml.bind.helpers;version="2.2.1", \
- javax.xml.bind.util;version="2.2.1", \
- javax.xml.crypto, \
- javax.xml.crypto.dom, \
- javax.xml.crypto.dsig, \
- javax.xml.crypto.dsig.dom, \
- javax.xml.crypto.dsig.keyinfo, \
- javax.xml.crypto.dsig.spec, \
- javax.xml.datatype, \
- javax.xml.namespace, \
- javax.xml.parsers, \
- javax.xml.soap;version="1.3", \
- javax.xml.stream;version="1.2", \
- javax.xml.stream.events;version="1.2", \
- javax.xml.stream.util;version="1.2", \
- javax.xml.transform, \
- javax.xml.transform.dom, \
- javax.xml.transform.sax, \
- javax.xml.transform.stax, \
- javax.xml.transform.stream, \
- javax.xml.validation, \
- javax.xml.ws;version="2.2", \
- javax.xml.ws.handler;version="2.2", \
- javax.xml.ws.handler.soap;version="2.2", \
- javax.xml.ws.http;version="2.2", \
- javax.xml.ws.soap;version="2.2", \
- javax.xml.ws.spi;version="2.2", \
- javax.xml.ws.wsaddressing;version="2.2", \
- javax.xml.ws.spi.http;version="2.2", \
- javax.xml.xpath, \
- org.ietf.jgss, \
- org.omg.CORBA, \
- org.omg.CORBA_2_3, \
- org.omg.CORBA_2_3.portable, \
- org.omg.CORBA.DynAnyPackage, \
- org.omg.CORBA.ORBPackage, \
- org.omg.CORBA.portable, \
- org.omg.CORBA.TypeCodePackage, \
- org.omg.CosNaming, \
- org.omg.CosNaming.NamingContextExtPackage, \
- org.omg.CosNaming.NamingContextPackage, \
- org.omg.Dynamic, \
- org.omg.DynamicAny, \
- org.omg.DynamicAny.DynAnyFactoryPackage, \
- org.omg.DynamicAny.DynAnyPackage, \
- org.omg.IOP, \
- org.omg.IOP.CodecFactoryPackage, \
- org.omg.IOP.CodecPackage, \
- org.omg.Messaging, \
- org.omg.PortableInterceptor, \
- org.omg.PortableInterceptor.ORBInitInfoPackage, \
- org.omg.PortableServer, \
- org.omg.PortableServer.CurrentPackage, \
- org.omg.PortableServer.POAManagerPackage, \
- org.omg.PortableServer.POAPackage, \
- org.omg.PortableServer.portable, \
- org.omg.PortableServer.ServantLocatorPackage, \
- org.omg.SendingContext, \
- org.omg.stub.java.rmi, \
- org.omg.stub.javax.management.remote.rmi, \
- org.w3c.dom, \
- org.w3c.dom.bootstrap, \
- org.w3c.dom.css, \
- org.w3c.dom.events, \
- org.w3c.dom.html, \
- org.w3c.dom.ls, \
- org.w3c.dom.ranges, \
- org.w3c.dom.stylesheets, \
- org.w3c.dom.traversal, \
- org.w3c.dom.views, \
- org.w3c.dom.xpath, \
- org.xml.sax, \
- org.xml.sax.ext, \
- org.xml.sax.helpers, \
- javax.annotation.processing
+++ /dev/null
-#Bundles to be started on startup, with startlevel
-
-# feature: framework version: 3.0.1
-mvn\:org.ops4j.base/ops4j-base-lang/1.4.0 = 5
-mvn\:biz.aQute.bnd/bndlib/2.2.0 = 5
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-bnd/1.7.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-maven-commons/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-aether/1.6.0 = 5
-mvn\:org.ops4j.pax.url/pax-url-wrap/1.6.0 = 5
-mvn\:javax.annotation/javax.annotation-api/1.2 = 5
-mvn\:org.ops4j.pax.logging/pax-logging-api/1.7.2 = 8
-mvn\:org.ops4j.pax.logging/pax-logging-service/1.7.2 = 8
-mvn\:org.apache.karaf.service/org.apache.karaf.service.guard/3.0.1 = 10
-mvn\:org.apache.felix/org.apache.felix.configadmin/1.6.0 = 10
-mvn\:org.apache.felix/org.apache.felix.fileinstall/3.2.8 = 11
-mvn\:org.ow2.asm/asm-all/4.1 = 12
-mvn\:org.apache.aries/org.apache.aries.util/1.1.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.api/1.0.0 = 20
-mvn\:org.apache.aries.proxy/org.apache.aries.proxy.impl/1.0.2 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.api/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.cm/1.0.3 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core.compatibility/1.0.0 = 20
-mvn\:org.apache.aries.blueprint/org.apache.aries.blueprint.core/1.4.0 = 20
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.spring/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.blueprint/3.0.1 = 24
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.wrap/3.0.1 = 24
-mvn\:org.apache.karaf.region/org.apache.karaf.region.core/3.0.1 = 25
-mvn\:org.apache.karaf.features/org.apache.karaf.features.core/3.0.1 = 25
-mvn\:org.apache.karaf.deployer/org.apache.karaf.deployer.features/3.0.1 = 26
-mvn\:jline/jline/2.11 = 30
-mvn\:org.jledit/core/0.2.1 = 30
-mvn\:org.fusesource.jansi/jansi/1.11 = 30
-mvn\:org.ops4j.base/ops4j-base-util-property/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-xml/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-util-collections/1.4.0 = 30
-mvn\:org.ops4j.pax.url/pax-url-commons/1.6.0 = 30
-mvn\:org.ops4j.pax.swissbox/pax-swissbox-property/1.7.0 = 30
-mvn\:org.ops4j.base/ops4j-base-net/1.4.0 = 30
-mvn\:org.ops4j.base/ops4j-base-monitors/1.4.0 = 30
-mvn\:org.apache.karaf.features/org.apache.karaf.features.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.console/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.modules/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.config/3.0.1 = 30
-mvn\:org.apache.karaf.jaas/org.apache.karaf.jaas.boot/3.0.1 = 30
-mvn\:org.apache.sshd/sshd-core/0.9.0 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.table/3.0.1 = 30
-mvn\:org.apache.karaf.bundle/org.apache.karaf.bundle.core/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.help/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.core/3.0.1 = 30
-mvn\:org.apache.karaf.system/org.apache.karaf.system.command/3.0.1 = 30
-mvn\:org.apache.karaf.shell/org.apache.karaf.shell.commands/3.0.1 = 30
-mvn\:org.apache.aries.quiesce/org.apache.aries.quiesce.api/1.0.0 = 30
+++ /dev/null
-#
-# The properties defined in this file will be made available through system
-# properties at the very beginning of the Karaf's boot process.
-#
-
-# Use Equinox as default OSGi Framework Implementation
-karaf.framework=equinox
-
-# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578
-# Extend the framework to avoid the resources to be presented with
-# a URL of type bundleresource: but to be presented as file:
-osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator
-
-
-# Log level when the pax-logging service is not available
-# This level will only be used while the pax-logging service bundle
-# is not fully available.
-# To change log levels, please refer to the org.ops4j.pax.logging.cfg file
-# instead.
-org.ops4j.pax.logging.DefaultServiceLog.level = ERROR
-
-#
-# Name of this Karaf instance.
-#
-karaf.name = root
-
-#
-# Default repository where bundles will be loaded from before using
-# other Maven repositories. For the full Maven configuration, see
-# the org.ops4j.pax.url.mvn.cfg file.
-#
-karaf.default.repository = system
-
-#
-# Location of a shell script that will be run when starting a shell
-# session. This script can be used to create aliases and define
-# additional commands.
-#
-karaf.shell.init.script = ${karaf.etc}/shell.init.script
-
-#
-# Sets the maximum size of the shell command history. If not set,
-# defaults to 500 entries. Setting to 0 will disable history.
-#
-# karaf.shell.history.maxSize = 0
-
-#
-# Deletes the entire karaf.data directory at every start
-#
-karaf.clean.all = false
-
-#
-# Deletes the karaf.data/cache directory at every start
-#
-karaf.clean.cache = false
-
-#
-# Roles to use when logging into a local Karaf console.
-#
-# The syntax is the following:
-# [classname:]principal
-# where classname is the class name of the principal object
-# (defaults to org.apache.karaf.jaas.modules.RolePrincipal)
-# and principal is the name of the principal of that class
-# (defaults to instance).
-#
-karaf.local.roles = admin,manager,viewer
-
-#
-# Set this empty property to avoid errors when validating xml documents.
-#
-xml.catalog.files =
-
-#
-# Suppress the bell in the console when hitting backspace too many times
-# for example
-#
-jline.nobell = true
-
-#
-# ServiceMix specs options
-#
-org.apache.servicemix.specs.debug = false
-org.apache.servicemix.specs.timeout = 0
-
-#
-# Settings for the OSGi 4.3 Weaving
-# By default, we will not weave any classes. Change this setting to include classes
-# that you application needs to have woven.
-#
-org.apache.aries.proxy.weaving.enabled = none
-# Classes not to weave - Aries default + Xerces which is known to have issues.
-org.apache.aries.proxy.weaving.disabled = org.objectweb.asm.*,org.slf4j.*,org.apache.log4j.*,javax.*,org.apache.xerces.*
-
-#
-# By default, only Karaf shell commands are secured, but additional services can be
-# secured by expanding this filter
-#
-karaf.secured.services = (&(osgi.command.scope=*)(osgi.command.function=*))
-
-#
-# Security properties
-#
-# To enable OSGi security, uncomment the properties below,
-# install the framework-security feature and restart.
-#
-#java.security.policy=${karaf.etc}/all.policy
-#org.osgi.framework.security=osgi
-#org.osgi.framework.trust.repositories=${karaf.etc}/trustStore.ks
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier.InstanceIdentifierBuilder;
InstanceIdentifier<NodeConnector> value = (InstanceIdentifier<NodeConnector>) ref.getValue();
LOG.debug("updating node connector : {}.", value);
NodeConnector build = data.build();
- tx.put(LogicalDatastoreType.OPERATIONAL, value, build);
+ tx.merge(LogicalDatastoreType.OPERATIONAL, value, build, true);
}
});
}
manager.enqueue(new InventoryOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- final NodeBuilder nodeBuilder = new NodeBuilder(node);
- nodeBuilder.setKey(new NodeKey(node.getId()));
-
final FlowCapableNode augment = InventoryMapping.toInventoryAugment(flowNode);
- nodeBuilder.addAugmentation(FlowCapableNode.class, augment);
LOG.debug("updating node :{} ", path);
- tx.put(LogicalDatastoreType.OPERATIONAL, path, augment);
+ tx.merge(LogicalDatastoreType.OPERATIONAL, path, augment, true);
}
});
}
description "";
}
- typedef buffer-id {
- type uint32;
- }
-
typedef connection-cookie {
type uint32;
}
}
notification packet-received {
-
- leaf buffer-id {
- type buffer-id;
- }
-
leaf connection-cookie {
type connection-cookie;
}
type inv:node-connector-ref;
}
leaf buffer-id {
- type buffer-id;
+ type uint32;
}
uses raw-packet;
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>binding-generator-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>binding-data-codec</artifactId>
+ <version>0.6.2-SNAPSHOT</version>
+ </dependency>
<dependency>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>yang-data-impl</artifactId>
import java.util.Collection;
import java.util.Collections;
-
+import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBindingDataBroker;
import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
public class BindingAsyncDataBrokerImplModule extends
org.opendaylight.controller.config.yang.md.sal.binding.impl.AbstractBindingAsyncDataBrokerImplModule implements
@Override
public java.lang.AutoCloseable createInstance() {
Broker domBroker = getDomAsyncBrokerDependency();
- BindingIndependentMappingService mappingService = getBindingMappingServiceDependency();
+ BindingToNormalizedNodeCodec mappingService = getBindingMappingServiceDependency();
// FIXME: Switch this to DOM Broker registration which would not require
// BundleContext when API are updated.
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.config.yang.md.sal.binding.impl;
-
-import java.util.concurrent.ExecutorService;
-
-import org.opendaylight.controller.sal.binding.codegen.impl.SingletonHolder;
-import org.opendaylight.controller.sal.binding.impl.RootDataBrokerImpl;
-import org.opendaylight.controller.sal.binding.impl.connect.dom.BindingDomConnectorDeployer;
-import org.opendaylight.controller.sal.binding.impl.connect.dom.BindingIndependentConnector;
-import org.opendaylight.controller.sal.binding.impl.forward.DomForwardedDataBrokerImpl;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
-
-/**
-*
-*/
-public final class DataBrokerImplModule extends
- org.opendaylight.controller.config.yang.md.sal.binding.impl.AbstractDataBrokerImplModule {
-
- public DataBrokerImplModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
- super(identifier, dependencyResolver);
- }
-
- public DataBrokerImplModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- final DataBrokerImplModule oldModule, final java.lang.AutoCloseable oldInstance) {
- super(identifier, dependencyResolver, oldModule, oldInstance);
- }
-
- @Override
- public void validate() {
- super.validate();
- }
-
- @Override
- public java.lang.AutoCloseable createInstance() {
- RootDataBrokerImpl dataBindingBroker;
-
-
- ExecutorService listeningExecutor = SingletonHolder.getDefaultCommitExecutor();
- BindingIndependentMappingService potentialMapping = getMappingServiceDependency();
- if (getDomBrokerDependency() != null && potentialMapping != null) {
-
- dataBindingBroker = createDomConnectedBroker(listeningExecutor,potentialMapping);
- } else {
- dataBindingBroker = createStandAloneBroker(listeningExecutor);
- }
- dataBindingBroker.registerRuntimeBean(getRootRuntimeBeanRegistratorWrapper());
- dataBindingBroker.setNotificationExecutor(SingletonHolder.getDefaultChangeEventExecutor());
- return dataBindingBroker;
- }
-
-
- private RootDataBrokerImpl createStandAloneBroker(final ExecutorService listeningExecutor) {
- RootDataBrokerImpl broker = new RootDataBrokerImpl();
- broker.setExecutor(listeningExecutor);
- return broker;
- }
-
- private RootDataBrokerImpl createDomConnectedBroker(final ExecutorService listeningExecutor, final BindingIndependentMappingService mappingService) {
- DomForwardedDataBrokerImpl forwardedBroker = new DomForwardedDataBrokerImpl();
- forwardedBroker.setExecutor(listeningExecutor);
- BindingIndependentConnector connector = BindingDomConnectorDeployer.createConnector(mappingService);
- getDomBrokerDependency().registerProvider(forwardedBroker, null);
- ProviderSession domContext = forwardedBroker.getDomProviderContext();
- forwardedBroker.setConnector(connector);
- forwardedBroker.setDomProviderContext(domContext);
- forwardedBroker.startForwarding();
- return forwardedBroker;
- }
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.config.yang.md.sal.binding.impl;
-
-
-/**
-*
-*/
-public class DataBrokerImplModuleFactory extends
- org.opendaylight.controller.config.yang.md.sal.binding.impl.AbstractDataBrokerImplModuleFactory {
-
-}
*/
package org.opendaylight.controller.config.yang.md.sal.binding.impl;
+import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.Collection;
import java.util.Collections;
-
+import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBackwardsCompatibleDataBroker;
import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker;
import org.opendaylight.controller.sal.binding.codegen.impl.SingletonHolder;
import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
-
-import com.google.common.util.concurrent.ListeningExecutorService;
/**
*
@Override
public java.lang.AutoCloseable createInstance() {
ListeningExecutorService listeningExecutor = SingletonHolder.getDefaultCommitExecutor();
- BindingIndependentMappingService mappingService = getBindingMappingServiceDependency();
+ BindingToNormalizedNodeCodec mappingService = getBindingMappingServiceDependency();
Broker domBroker = getDomAsyncBrokerDependency();
ProviderSession session = domBroker.registerProvider(this, null);
ForwardedBackwardsCompatibleDataBroker dataBroker = new ForwardedBackwardsCompatibleDataBroker(domDataBroker,
mappingService, schemaService,listeningExecutor);
- dataBroker.setConnector(BindingDomConnectorDeployer.createConnector(getBindingMappingServiceDependency()));
+ dataBroker.setConnector(BindingDomConnectorDeployer.createConnector(mappingService.getLegacy()));
dataBroker.setDomProviderContext(session);
return dataBroker;
}
*/
package org.opendaylight.controller.config.yang.md.sal.binding.impl;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import java.util.Hashtable;
import java.util.Map.Entry;
import java.util.Set;
-
+import javassist.ClassPool;
+import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.sal.binding.codegen.impl.SingletonHolder;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.StreamWriterGenerator;
+import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
import org.opendaylight.yangtools.concepts.Delegator;
+import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
import org.opendaylight.yangtools.sal.binding.generator.impl.RuntimeGeneratedMappingServiceImpl;
import org.opendaylight.yangtools.yang.binding.DataContainer;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-
/**
*
*/
private BundleContext bundleContext;
- public RuntimeMappingModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
+ public RuntimeMappingModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver) {
super(identifier, dependencyResolver);
}
- public RuntimeMappingModule(org.opendaylight.controller.config.api.ModuleIdentifier identifier,
- org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
- RuntimeMappingModule oldModule, java.lang.AutoCloseable oldInstance) {
+ public RuntimeMappingModule(final org.opendaylight.controller.config.api.ModuleIdentifier identifier,
+ final org.opendaylight.controller.config.api.DependencyResolver dependencyResolver,
+ final RuntimeMappingModule oldModule, final java.lang.AutoCloseable oldInstance) {
super(identifier, dependencyResolver, oldModule, oldInstance);
}
}
@Override
- public boolean canReuseInstance(AbstractRuntimeMappingModule oldModule) {
+ public boolean canReuseInstance(final AbstractRuntimeMappingModule oldModule) {
return true;
}
@Override
public java.lang.AutoCloseable createInstance() {
+ final GeneratedClassLoadingStrategy classLoading = getGlobalClassLoadingStrategy();
+ final BindingIndependentMappingService legacyMapping = getGlobalLegacyMappingService(classLoading);
+ BindingNormalizedNodeCodecRegistry codecRegistry = new BindingNormalizedNodeCodecRegistry(new StreamWriterGenerator(SingletonHolder.JAVASSIST));
+ BindingToNormalizedNodeCodec instance = new BindingToNormalizedNodeCodec(classLoading, legacyMapping, codecRegistry);
+ bundleContext.registerService(SchemaContextListener.class, instance, new Hashtable<String,String>());
+ return instance;
+ }
- RuntimeGeneratedMappingServiceProxy potential = tryToReuseGlobalInstance();
- if(potential != null) {
- return potential;
+ private BindingIndependentMappingService getGlobalLegacyMappingService(final GeneratedClassLoadingStrategy classLoading) {
+ BindingIndependentMappingService potential = tryToReuseGlobalMappingServiceInstance();
+ if(potential == null) {
+ potential = new RuntimeGeneratedMappingServiceImpl(ClassPool.getDefault(),classLoading);
+ bundleContext.registerService(SchemaContextListener.class, (SchemaContextListener) potential, new Hashtable<String,String>());
}
+ return potential;
+ }
- final RuntimeGeneratedMappingServiceImpl service = new RuntimeGeneratedMappingServiceImpl(SingletonHolder.CLASS_POOL);
- bundleContext.registerService(SchemaContextListener.class, service, new Hashtable<String,String>());
- return service;
+ private GeneratedClassLoadingStrategy getGlobalClassLoadingStrategy() {
+ ServiceReference<GeneratedClassLoadingStrategy> ref = bundleContext.getServiceReference(GeneratedClassLoadingStrategy.class);
+ return bundleContext.getService(ref);
}
- private RuntimeGeneratedMappingServiceProxy tryToReuseGlobalInstance() {
+ private BindingIndependentMappingService tryToReuseGlobalMappingServiceInstance() {
ServiceReference<BindingIndependentMappingService> serviceRef = getBundleContext().getServiceReference(BindingIndependentMappingService.class);
if(serviceRef == null) {
return null;
}
+ return bundleContext.getService(serviceRef);
- BindingIndependentMappingService delegate = bundleContext.getService(serviceRef);
- if (delegate == null) {
- return null;
- }
- return new RuntimeGeneratedMappingServiceProxy(getBundleContext(),serviceRef,delegate);
}
private BundleContext getBundleContext() {
return bundleContext;
}
- public void setBundleContext(BundleContext bundleContext) {
+ public void setBundleContext(final BundleContext bundleContext) {
this.bundleContext = bundleContext;
}
private ServiceReference<BindingIndependentMappingService> reference;
private BundleContext bundleContext;
- public RuntimeGeneratedMappingServiceProxy(BundleContext bundleContext,
- ServiceReference<BindingIndependentMappingService> serviceRef,
- BindingIndependentMappingService delegate) {
+ public RuntimeGeneratedMappingServiceProxy(final BundleContext bundleContext,
+ final ServiceReference<BindingIndependentMappingService> serviceRef,
+ final BindingIndependentMappingService delegate) {
this.bundleContext = Preconditions.checkNotNull(bundleContext);
this.reference = Preconditions.checkNotNull(serviceRef);
this.delegate = Preconditions.checkNotNull(delegate);
}
@Override
- public CompositeNode toDataDom(DataObject data) {
+ public CompositeNode toDataDom(final DataObject data) {
return delegate.toDataDom(data);
}
@Override
public Entry<YangInstanceIdentifier, CompositeNode> toDataDom(
- Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject> entry) {
+ final Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject> entry) {
return delegate.toDataDom(entry);
}
@Override
public YangInstanceIdentifier toDataDom(
- org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> path) {
+ final org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> path) {
return delegate.toDataDom(path);
}
@Override
public DataObject dataObjectFromDataDom(
- org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> path,
- CompositeNode result) throws DeserializationException {
+ final org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> path,
+ final CompositeNode result) throws DeserializationException {
return delegate.dataObjectFromDataDom(path, result);
}
@Override
- public org.opendaylight.yangtools.yang.binding.InstanceIdentifier<?> fromDataDom(YangInstanceIdentifier entry)
+ public org.opendaylight.yangtools.yang.binding.InstanceIdentifier<?> fromDataDom(final YangInstanceIdentifier entry)
throws DeserializationException {
return delegate.fromDataDom(entry);
}
@Override
- public Set<QName> getRpcQNamesFor(Class<? extends RpcService> service) {
+ public Set<QName> getRpcQNamesFor(final Class<? extends RpcService> service) {
return delegate.getRpcQNamesFor(service);
}
@Override
- public Optional<Class<? extends RpcService>> getRpcServiceClassFor(String namespace, String revision) {
+ public Optional<Class<? extends RpcService>> getRpcServiceClassFor(final String namespace, final String revision) {
return delegate.getRpcServiceClassFor(namespace,revision);
}
@Override
- public DataContainer dataObjectFromDataDom(Class<? extends DataContainer> inputClass, CompositeNode domInput) {
+ public DataContainer dataObjectFromDataDom(final Class<? extends DataContainer> inputClass, final CompositeNode domInput) {
return delegate.dataObjectFromDataDom(inputClass, domInput);
}
import com.google.common.base.Objects;
import com.google.common.base.Optional;
-
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-
import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
import org.opendaylight.yangtools.yang.data.impl.codec.DeserializationException;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaContextListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public abstract class AbstractForwardedDataBroker implements Delegator<DOMDataBroker>, DomForwardedBroker, SchemaContextListener, AutoCloseable {
+public abstract class AbstractForwardedDataBroker implements Delegator<DOMDataBroker>, DomForwardedBroker,
+ SchemaContextListener, AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractForwardedDataBroker.class);
// The Broker to whom we do all forwarding
private final DOMDataBroker domDataBroker;
- // Mapper to convert from Binding Independent objects to Binding Aware
- // objects
- private final BindingIndependentMappingService mappingService;
-
private final BindingToNormalizedNodeCodec codec;
private BindingIndependentConnector connector;
private ProviderSession context;
private final ListenerRegistration<SchemaContextListener> schemaListenerRegistration;
- protected AbstractForwardedDataBroker(final DOMDataBroker domDataBroker,
- final BindingIndependentMappingService mappingService,final SchemaService schemaService) {
+ protected AbstractForwardedDataBroker(final DOMDataBroker domDataBroker, final BindingToNormalizedNodeCodec codec,
+ final SchemaService schemaService) {
this.domDataBroker = domDataBroker;
- this.mappingService = mappingService;
- this.codec = new BindingToNormalizedNodeCodec(mappingService);
+ this.codec = codec;
this.schemaListenerRegistration = schemaService.registerSchemaContextListener(this);
}
return codec;
}
- protected BindingIndependentMappingService getMappingService() {
- return mappingService;
- }
-
@Override
public DOMDataBroker getDelegate() {
return domDataBroker;
@Override
public void onGlobalContextUpdated(final SchemaContext ctx) {
- codec.onGlobalContextUpdated(ctx);
+ // NOOP
}
public ListenerRegistration<DataChangeListener> registerDataChangeListener(final LogicalDatastoreType store,
- final InstanceIdentifier<?> path, final DataChangeListener listener,
- final DataChangeScope triggeringScope) {
+ final InstanceIdentifier<?> path, final DataChangeListener listener, final DataChangeScope triggeringScope) {
DOMDataChangeListener domDataChangeListener = new TranslatingDataChangeInvoker(store, path, listener,
triggeringScope);
YangInstanceIdentifier domPath = codec.toNormalized(path);
return new ListenerRegistrationImpl(listener, domRegistration);
}
- protected Map<InstanceIdentifier<?>, DataObject> toBinding(
- InstanceIdentifier<?> path,
+ protected Map<InstanceIdentifier<?>, DataObject> toBinding(final InstanceIdentifier<?> path,
final Map<YangInstanceIdentifier, ? extends NormalizedNode<?, ?>> normalized) {
Map<InstanceIdentifier<?>, DataObject> newMap = new HashMap<>();
for (Map.Entry<YangInstanceIdentifier, ? extends NormalizedNode<?, ?>> entry : sortedEntries(normalized)) {
try {
- Optional<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> potential = getCodec().toBinding(
- entry);
+ Optional<Entry<InstanceIdentifier<? extends DataObject>, DataObject>> potential = getCodec().toBinding(entry);
if (potential.isPresent()) {
Entry<InstanceIdentifier<? extends DataObject>, DataObject> binding = potential.get();
newMap.put(binding.getKey(), binding.getValue());
- } else if (entry.getKey().getLastPathArgument() instanceof YangInstanceIdentifier.AugmentationIdentifier) {
- DataObject bindingDataObject = getCodec().toBinding(path, entry.getValue());
- if (bindingDataObject != null) {
- newMap.put(path, bindingDataObject);
- }
}
} catch (DeserializationException e) {
LOG.warn("Failed to transform {}, omitting it", entry, e);
private static final Comparator<Entry<YangInstanceIdentifier, ?>> MAP_ENTRY_COMPARATOR = new Comparator<Entry<YangInstanceIdentifier, ?>>() {
@Override
- public int compare(final Entry<YangInstanceIdentifier, ?> left,
- final Entry<YangInstanceIdentifier, ?> right) {
+ public int compare(final Entry<YangInstanceIdentifier, ?> left, final Entry<YangInstanceIdentifier, ?> right) {
final Iterator<?> li = left.getKey().getPathArguments().iterator();
final Iterator<?> ri = right.getKey().getPathArguments().iterator();
}
};
- private static <T> Iterable<Entry<YangInstanceIdentifier,T>> sortedEntries(final Map<YangInstanceIdentifier, T> map) {
+ private static <T> Iterable<Entry<YangInstanceIdentifier, T>> sortedEntries(final Map<YangInstanceIdentifier, T> map) {
if (!map.isEmpty()) {
ArrayList<Entry<YangInstanceIdentifier, T>> entries = new ArrayList<>(map.entrySet());
Collections.sort(entries, MAP_ENTRY_COMPARATOR);
}
}
- protected Set<InstanceIdentifier<?>> toBinding(InstanceIdentifier<?> path,
+ protected Set<InstanceIdentifier<?>> toBinding(final InstanceIdentifier<?> path,
final Set<YangInstanceIdentifier> normalized) {
Set<InstanceIdentifier<?>> hashSet = new HashSet<>();
for (YangInstanceIdentifier normalizedPath : normalized) {
if (path.isWildcarded()) {
return Optional.absent();
}
-
- try {
- return Optional.fromNullable(getCodec().toBinding(path, data));
- } catch (DeserializationException e) {
- return Optional.absent();
- }
+ return (Optional) getCodec().deserializeFunction(path).apply(Optional.<NormalizedNode<?, ?>> of(data));
}
private class TranslatingDataChangeInvoker implements DOMDataChangeListener {
}
@Override
- public void onDataChanged(
- final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
+ public void onDataChanged(final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
bindingDataChangeListener.onDataChanged(new TranslatedDataChangeEvent(change, path));
}
}
@Override
public DataObject getOriginalSubtree() {
if (originalDataCache == null) {
- if(domEvent.getOriginalSubtree() != null) {
+ if (domEvent.getOriginalSubtree() != null) {
originalDataCache = toBindingData(path, domEvent.getOriginalSubtree());
} else {
originalDataCache = Optional.absent();
@Override
public DataObject getUpdatedSubtree() {
if (updatedDataCache == null) {
- if(domEvent.getUpdatedSubtree() != null) {
+ if (domEvent.getUpdatedSubtree() != null) {
updatedDataCache = toBindingData(path, domEvent.getUpdatedSubtree());
} else {
updatedDataCache = Optional.absent();
*/
package org.opendaylight.controller.md.sal.binding.impl;
-import java.lang.reflect.Method;
+import com.google.common.base.Function;
+import com.google.common.base.Optional;
import java.util.AbstractMap.SimpleEntry;
-import java.util.Collection;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.Map.Entry;
-import java.util.Set;
-
-import javax.annotation.Nullable;
-
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationException;
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationOperation;
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizer;
-import org.opendaylight.yangtools.yang.binding.Augmentation;
-import org.opendaylight.yangtools.yang.binding.BindingMapping;
+import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
+import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
+import org.opendaylight.yangtools.sal.binding.generator.util.BindingRuntimeContext;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.binding.YangModuleInfo;
-import org.opendaylight.yangtools.yang.binding.util.BindingReflections;
-import org.opendaylight.yangtools.yang.common.QName;
-import org.opendaylight.yangtools.yang.common.QNameModule;
-import org.opendaylight.yangtools.yang.data.api.CompositeNode;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
-import org.opendaylight.yangtools.yang.data.api.schema.AugmentationNode;
-import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
-import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
-import org.opendaylight.yangtools.yang.data.api.schema.DataContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
import org.opendaylight.yangtools.yang.data.impl.codec.DeserializationException;
-import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
-import org.opendaylight.yangtools.yang.model.api.AugmentationSchema;
-import org.opendaylight.yangtools.yang.model.api.AugmentationTarget;
-import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaContextListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Function;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
-
-public class BindingToNormalizedNodeCodec implements SchemaContextListener {
+public class BindingToNormalizedNodeCodec implements SchemaContextListener,AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(BindingToNormalizedNodeCodec.class);
private final BindingIndependentMappingService bindingToLegacy;
+ private final BindingNormalizedNodeCodecRegistry codecRegistry;
private DataNormalizer legacyToNormalized;
+ private final GeneratedClassLoadingStrategy classLoadingStrategy;
- public BindingToNormalizedNodeCodec(final BindingIndependentMappingService mappingService) {
+ public BindingToNormalizedNodeCodec(final GeneratedClassLoadingStrategy classLoadingStrategy, final BindingIndependentMappingService mappingService, final BindingNormalizedNodeCodecRegistry codecRegistry) {
super();
this.bindingToLegacy = mappingService;
+ this.classLoadingStrategy = classLoadingStrategy;
+ this.codecRegistry = codecRegistry;
+
}
public org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier toNormalized(
final InstanceIdentifier<? extends DataObject> binding) {
-
- // Used instance-identifier codec do not support serialization of last
- // path
- // argument if it is Augmentation (behaviour expected by old datastore)
- // in this case, we explicitly check if last argument is augmentation
- // to process it separately
- if (isAugmentationIdentifier(binding)) {
- return toNormalizedAugmented(binding);
- }
- return toNormalizedImpl(binding);
+ return codecRegistry.toYangInstanceIdentifier(binding);
}
+ @SuppressWarnings({ "unchecked", "rawtypes" })
public Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, NormalizedNode<?, ?>> toNormalizedNode(
final InstanceIdentifier<? extends DataObject> bindingPath, final DataObject bindingObject) {
- return toNormalizedNode(toBindingEntry(bindingPath, bindingObject));
+ return codecRegistry.toNormalizedNode((InstanceIdentifier) bindingPath, bindingObject);
}
public Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, NormalizedNode<?, ?>> toNormalizedNode(
final Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject> binding) {
- Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, CompositeNode> legacyEntry = bindingToLegacy
- .toDataDom(binding);
- Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, NormalizedNode<?, ?>> normalizedEntry = legacyToNormalized
- .toNormalized(legacyEntry);
- LOG.trace("Serialization of {}, Legacy Representation: {}, Normalized Representation: {}", binding,
- legacyEntry, normalizedEntry);
- if (isAugmentation(binding.getKey().getTargetType())) {
-
- for (DataContainerChild<? extends PathArgument, ?> child : ((DataContainerNode<?>) normalizedEntry
- .getValue()).getValue()) {
- if (child instanceof AugmentationNode) {
- ImmutableList<PathArgument> childArgs = ImmutableList.<PathArgument> builder()
- .addAll(normalizedEntry.getKey().getPathArguments()).add(child.getIdentifier()).build();
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier childPath = org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier
- .create(childArgs);
- return toDOMEntry(childPath, child);
- }
- }
-
- }
- return normalizedEntry;
-
+ return toNormalizedNode(binding.getKey(),binding.getValue());
}
/**
public Optional<InstanceIdentifier<? extends DataObject>> toBinding(
final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized)
throws DeserializationException {
-
- PathArgument lastArgument = Iterables.getLast(normalized.getPathArguments());
- // Used instance-identifier codec do not support serialization of last
- // path
- // argument if it is AugmentationIdentifier (behaviour expected by old
- // datastore)
- // in this case, we explicitly check if last argument is augmentation
- // to process it separately
- if (lastArgument instanceof AugmentationIdentifier) {
- return toBindingAugmented(normalized);
- }
- return toBindingImpl(normalized);
- }
-
- private Optional<InstanceIdentifier<? extends DataObject>> toBindingAugmented(
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized)
- throws DeserializationException {
- Optional<InstanceIdentifier<? extends DataObject>> potential = toBindingImpl(normalized);
- // Shorthand check, if codec already supports deserialization
- // of AugmentationIdentifier we will return
- if (potential.isPresent() && isAugmentationIdentifier(potential.get())) {
- return potential;
- }
-
- int normalizedCount = getAugmentationCount(normalized);
- AugmentationIdentifier lastArgument = (AugmentationIdentifier) Iterables.getLast(normalized.getPathArguments());
-
- // Here we employ small trick - Binding-aware Codec injects an pointer
- // to augmentation class
- // if child is referenced - so we will reference child and then shorten
- // path.
- LOG.trace("Looking for candidates to match {}", normalized);
- for (QName child : lastArgument.getPossibleChildNames()) {
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier childPath = normalized.node(child);
- try {
- if (isNotRepresentable(childPath)) {
- LOG.trace("Path {} is not BI-representable, skipping it", childPath);
- continue;
- }
- } catch (DataNormalizationException e) {
- LOG.warn("Failed to denormalize path {}, skipping it", childPath, e);
- continue;
- }
-
- Optional<InstanceIdentifier<? extends DataObject>> baId = toBindingImpl(childPath);
- if (!baId.isPresent()) {
- LOG.debug("No binding-aware identifier found for path {}, skipping it", childPath);
- continue;
- }
-
- InstanceIdentifier<? extends DataObject> potentialPath = shortenToLastAugment(baId.get());
- int potentialAugmentCount = getAugmentationCount(potentialPath);
- if (potentialAugmentCount == normalizedCount) {
- LOG.trace("Found matching path {}", potentialPath);
- return Optional.<InstanceIdentifier<? extends DataObject>> of(potentialPath);
- }
-
- LOG.trace("Skipping mis-matched potential path {}", potentialPath);
- }
-
- LOG.trace("Failed to find augmentation matching {}", normalized);
- return Optional.absent();
- }
-
- private Optional<InstanceIdentifier<? extends DataObject>> toBindingImpl(
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized)
- throws DeserializationException {
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier legacyPath;
-
try {
- if (isNotRepresentable(normalized)) {
- return Optional.absent();
- }
- legacyPath = legacyToNormalized.toLegacy(normalized);
- } catch (DataNormalizationException e) {
- throw new IllegalStateException("Could not denormalize path.", e);
- }
- LOG.trace("InstanceIdentifier Path Deserialization: Legacy representation {}, Normalized representation: {}",
- legacyPath, normalized);
- return Optional.<InstanceIdentifier<? extends DataObject>> of(bindingToLegacy.fromDataDom(legacyPath));
- }
-
- private boolean isNotRepresentable(final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized)
- throws DataNormalizationException {
- DataNormalizationOperation<?> op = findNormalizationOperation(normalized);
- if (op.isMixin() && op.getIdentifier() instanceof NodeIdentifier) {
- return true;
- }
- if (op.isLeaf()) {
- return true;
+ return Optional.<InstanceIdentifier<? extends DataObject>>of(codecRegistry.fromYangInstanceIdentifier(normalized));
+ } catch (IllegalArgumentException e) {
+ return Optional.absent();
}
- return false;
}
- private DataNormalizationOperation<?> findNormalizationOperation(
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized)
- throws DataNormalizationException {
- DataNormalizationOperation<?> current = legacyToNormalized.getRootOperation();
- for (PathArgument arg : normalized.getPathArguments()) {
- current = current.getChild(arg);
- }
- return current;
- }
private static final Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject> toBindingEntry(
final org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> key,
key, value);
}
- private static final Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, NormalizedNode<?, ?>> toDOMEntry(
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier key, final NormalizedNode<?, ?> value) {
- return new SimpleEntry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, NormalizedNode<?, ?>>(key,
- value);
- }
-
- public DataObject toBinding(final InstanceIdentifier<?> path, final NormalizedNode<?, ?> normalizedNode)
- throws DeserializationException {
- CompositeNode legacy = null;
- if (isAugmentationIdentifier(path) && normalizedNode instanceof AugmentationNode) {
- QName augIdentifier = BindingReflections.findQName(path.getTargetType());
- ContainerNode virtualNode = Builders.containerBuilder() //
- .withNodeIdentifier(new NodeIdentifier(augIdentifier)) //
- .withChild((DataContainerChild<?, ?>) normalizedNode) //
- .build();
- legacy = (CompositeNode) DataNormalizer.toLegacy(virtualNode);
- } else {
- legacy = (CompositeNode) DataNormalizer.toLegacy(normalizedNode);
- }
-
- return bindingToLegacy.dataObjectFromDataDom(path, legacy);
- }
-
public DataNormalizer getDataNormalizer() {
return legacyToNormalized;
}
+ @SuppressWarnings("unchecked")
public Optional<Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject>> toBinding(
final Entry<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier, ? extends NormalizedNode<?, ?>> normalized)
throws DeserializationException {
- Optional<InstanceIdentifier<? extends DataObject>> potentialPath = toBinding(normalized.getKey());
- if (potentialPath.isPresent()) {
- InstanceIdentifier<? extends DataObject> bindingPath = potentialPath.get();
- DataObject bindingData = toBinding(bindingPath, normalized.getValue());
- if (bindingData == null) {
- LOG.warn("Failed to deserialize {} to Binding format. Binding path is: {}", normalized, bindingPath);
- }
- return Optional.of(toBindingEntry(bindingPath, bindingData));
- } else {
+ try {
+ @SuppressWarnings("rawtypes")
+ Entry binding = codecRegistry.fromNormalizedNode(normalized.getKey(), normalized.getValue());
+ return Optional.<Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject>>fromNullable(binding);
+ } catch (IllegalArgumentException e) {
return Optional.absent();
}
}
@Override
public void onGlobalContextUpdated(final SchemaContext arg0) {
legacyToNormalized = new DataNormalizer(arg0);
+ codecRegistry.onBindingRuntimeContextUpdated(BindingRuntimeContext.create(classLoadingStrategy, arg0));
}
- private org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier toNormalizedAugmented(
- final InstanceIdentifier<?> augPath) {
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier processed = toNormalizedImpl(augPath);
- // If used instance identifier codec added supports for deserialization
- // of last AugmentationIdentifier we will just reuse it
- if (isAugmentationIdentifier(processed)) {
- return processed;
- }
- Optional<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier> additionalSerialized;
- additionalSerialized = toNormalizedAugmentedUsingChildContainers(augPath, processed);
-
- if (additionalSerialized.isPresent()) {
- return additionalSerialized.get();
- }
- additionalSerialized = toNormalizedAugmentedUsingChildLeafs(augPath, processed);
- if (additionalSerialized.isPresent()) {
- return additionalSerialized.get();
- }
- throw new IllegalStateException("Unabled to construct augmentation identfier for " + augPath);
- }
-
- /**
- * Tries to find correct augmentation identifier using children leafs
- *
- * This method uses normalized Instance Identifier of parent node to fetch
- * schema and {@link BindingReflections#getModuleInfo(Class)} to learn about
- * augmentation namespace, specificly, in which module it was defined.
- *
- * Then it uses it to filter all available augmentations for parent by
- * module. After that it walks augmentations in particular module and
- * pick-up first which at least one leaf name matches supplied augmentation.
- * We could do this safely since YANG explicitly states that no any existing
- * augmentations must differ in leaf fully qualified names.
- *
- *
- * @param augPath
- * Binding Aware Path which ends with augment
- * @param parentPath
- * Processed path
- * @return
- */
- private Optional<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier> toNormalizedAugmentedUsingChildLeafs(
- final InstanceIdentifier<?> augPath,
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier parentPath) {
- try {
- DataNormalizationOperation<?> parentOp = legacyToNormalized.getOperation(parentPath);
- if(!parentOp.getDataSchemaNode().isPresent()) {
- return Optional.absent();
- }
- DataSchemaNode parentSchema = parentOp.getDataSchemaNode().get();
- if (parentSchema instanceof AugmentationTarget) {
- Set<AugmentationSchema> augmentations = ((AugmentationTarget) parentSchema).getAvailableAugmentations();
- LOG.info("Augmentations for {}, {}", augPath, augmentations);
- Optional<AugmentationSchema> schema = findAugmentation(augPath.getTargetType(), augmentations);
- if (schema.isPresent()) {
- AugmentationIdentifier augmentationIdentifier = DataNormalizationOperation
- .augmentationIdentifierFrom(schema.get());
- return Optional.of(parentPath.node(augmentationIdentifier));
- }
- }
- } catch (DataNormalizationException e) {
- throw new IllegalArgumentException(e);
- }
- return Optional.absent();
- }
-
- /**
- * Creates instance identifier for augmentation child, tries to serialize it
- * Instance Identifier is then shortened to last augmentation.
- *
- * This is for situations, where underlying codec is implementing hydrogen
- * style DOM APIs (which did not supported {@link AugmentationIdentifier}.)
- *
- * @param augPath
- * @param parentPath
- * Path to parent node
- * @return
- */
- @SuppressWarnings("rawtypes")
- private Optional<org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier> toNormalizedAugmentedUsingChildContainers(
- final InstanceIdentifier<?> augPath,
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier parentPath) {
- for (Class augChild : BindingReflections.getChildrenClasses(augPath.getTargetType())) {
- @SuppressWarnings("unchecked")
- InstanceIdentifier<?> childPath = augPath.child(augChild);
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized = toNormalizedImpl(childPath);
- org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier potentialDiscovered = shortenToLastAugmentation(
- normalized, parentPath);
- if (potentialDiscovered != null) {
- return Optional.of(potentialDiscovered);
- }
- }
- return Optional.absent();
- }
-
- private Optional<AugmentationSchema> findAugmentation(final Class<?> targetType,
- final Set<AugmentationSchema> augmentations) {
- YangModuleInfo moduleInfo;
- try {
- moduleInfo = BindingReflections.getModuleInfo(targetType);
- } catch (Exception e) {
- throw new IllegalStateException(e);
- }
- Iterable<AugmentationSchema> filtered = filteredByModuleInfo(augmentations,
- BindingReflections.getModuleQName(moduleInfo).getModule());
- filtered.toString();
- Set<String> targetTypeGetters = getYangModeledGetters(targetType);
- for (AugmentationSchema schema : filtered) {
- for (DataSchemaNode child : schema.getChildNodes()) {
- String getterName = "get" + BindingMapping.getClassName(child.getQName());
- if (targetTypeGetters.contains(getterName)) {
- return Optional.of(schema);
- }
- }
- }
- return Optional.absent();
- }
-
- private static Iterable<AugmentationSchema> filteredByModuleInfo(final Iterable<AugmentationSchema> augmentations,
- final QNameModule module) {
- return Iterables.filter(augmentations, new Predicate<AugmentationSchema>() {
- @Override
- public boolean apply(final AugmentationSchema schema) {
- final Collection<DataSchemaNode> childNodes = schema.getChildNodes();
- return !childNodes.isEmpty() && module.equals(Iterables.get(childNodes, 0).getQName().getModule());
- }
- });
- }
-
- public static final Set<String> getYangModeledGetters(final Class<?> targetType) {
- HashSet<String> ret = new HashSet<String>();
- for (Method method : targetType.getMethods()) {
- if (isYangModeledGetter(method)) {
- ret.add(method.getName());
- }
- }
- return ret;
- }
-
- /**
- *
- * Returns true if supplied method represent getter for YANG modeled value
- *
- * @param method
- * Method to be tested
- * @return true if method represent getter for YANG Modeled value.
- */
- private static final boolean isYangModeledGetter(final Method method) {
- return !method.getName().equals("getClass") && !method.getName().equals("getImplementedInterface")
- && method.getName().startsWith("get") && method.getParameterTypes().length == 0;
- }
-
- private org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier shortenToLastAugmentation(
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized,
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier parentPath) {
- int parentSize = Iterables.size(parentPath.getPathArguments());
- int position = 0;
- int foundPosition = -1;
- for (PathArgument arg : normalized.getPathArguments()) {
- position++;
- if (arg instanceof AugmentationIdentifier) {
- foundPosition = position;
- }
- }
- if (foundPosition > 0 && foundPosition > parentSize) {
- Iterable<PathArgument> shortened = Iterables.limit(normalized.getPathArguments(), foundPosition);
- return org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.create(shortened);
- }
- return null;
- }
-
- private InstanceIdentifier<? extends DataObject> shortenToLastAugment(
- final InstanceIdentifier<? extends DataObject> binding) {
- int position = 0;
- int foundPosition = -1;
- for (org.opendaylight.yangtools.yang.binding.InstanceIdentifier.PathArgument arg : binding.getPathArguments()) {
- position++;
- if (isAugmentation(arg.getType())) {
- foundPosition = position;
- }
- }
- return InstanceIdentifier.create(Iterables.limit(binding.getPathArguments(), foundPosition));
- }
-
- private org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier toNormalizedImpl(
- final InstanceIdentifier<? extends DataObject> binding) {
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier legacyPath = bindingToLegacy
- .toDataDom(binding);
- final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier normalized = legacyToNormalized
- .toNormalized(legacyPath);
- return normalized;
- }
-
- private static boolean isAugmentation(final Class<? extends DataObject> type) {
- return Augmentation.class.isAssignableFrom(type);
- }
-
- private static boolean isAugmentationIdentifier(final InstanceIdentifier<?> potential) {
- return Augmentation.class.isAssignableFrom(potential.getTargetType());
- }
-
- private boolean isAugmentationIdentifier(final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier processed) {
- return Iterables.getLast(processed.getPathArguments()) instanceof AugmentationIdentifier;
- }
-
- private static int getAugmentationCount(final InstanceIdentifier<?> potential) {
- int count = 0;
- for (org.opendaylight.yangtools.yang.binding.InstanceIdentifier.PathArgument arg : potential.getPathArguments()) {
- if (isAugmentation(arg.getType())) {
- count++;
- }
-
- }
- return count;
- }
-
- private static int getAugmentationCount(final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier potential) {
- int count = 0;
- for (PathArgument arg : potential.getPathArguments()) {
- if (arg instanceof AugmentationIdentifier) {
- count++;
- }
- }
- return count;
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
public <T extends DataObject> Function<Optional<NormalizedNode<?, ?>>, Optional<T>> deserializeFunction(final InstanceIdentifier<T> path) {
- return new DeserializeFunction(this, path);
- }
-
- private static class DeserializeFunction<T extends DataObject> implements Function<Optional<NormalizedNode<?, ?>>, Optional<T>> {
-
- private final BindingToNormalizedNodeCodec codec;
- private final InstanceIdentifier<?> path;
-
- public DeserializeFunction(final BindingToNormalizedNodeCodec codec, final InstanceIdentifier<?> path) {
- super();
- this.codec = Preconditions.checkNotNull(codec, "Codec must not be null");
- this.path = Preconditions.checkNotNull(path, "Path must not be null");
- }
-
- @SuppressWarnings("rawtypes")
- @Nullable
- @Override
- public Optional apply(@Nullable final Optional<NormalizedNode<?, ?>> normalizedNode) {
- if (normalizedNode.isPresent()) {
- final DataObject dataObject;
- try {
- dataObject = codec.toBinding(path, normalizedNode.get());
- } catch (DeserializationException e) {
- LOG.warn("Failed to create dataobject from node {}", normalizedNode.get(), e);
- throw new IllegalStateException("Failed to create dataobject", e);
- }
-
- if (dataObject != null) {
- return Optional.of(dataObject);
- }
- }
- return Optional.absent();
- }
+ return codecRegistry.deserializeFunction(path);
}
/**
}
return currentOp.createDefault(path.getLastPathArgument());
}
+
+ public BindingIndependentMappingService getLegacy() {
+ return bindingToLegacy;
+ }
+
+ @Override
+ public void close() throws Exception {
+ // NOOP Intentionally
+ }
}
*/
package org.opendaylight.controller.md.sal.binding.impl;
+import com.google.common.base.Function;
+import com.google.common.util.concurrent.AsyncFunction;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
-
import org.opendaylight.controller.md.sal.common.api.RegistrationListener;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.opendaylight.yangtools.yang.common.RpcResultBuilder;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Function;
-import com.google.common.util.concurrent.AsyncFunction;
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-
@SuppressWarnings("deprecation")
public class ForwardedBackwardsCompatibleDataBroker extends AbstractForwardedDataBroker implements DataProviderService, AutoCloseable {
private final ListeningExecutorService executorService;
public ForwardedBackwardsCompatibleDataBroker(final DOMDataBroker domDataBroker,
- final BindingIndependentMappingService mappingService, final SchemaService schemaService,final ListeningExecutorService executor) {
+ final BindingToNormalizedNodeCodec mappingService, final SchemaService schemaService,final ListeningExecutorService executor) {
super(domDataBroker, mappingService,schemaService);
executorService = executor;
LOG.info("ForwardedBackwardsCompatibleBroker started.");
import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener;
import org.opendaylight.controller.md.sal.dom.api.DOMDataBroker;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
/**
* The DataBrokerImpl simply defers to the DOMDataBroker for all its operations.
*/
public class ForwardedBindingDataBroker extends AbstractForwardedDataBroker implements DataBroker {
- public ForwardedBindingDataBroker(final DOMDataBroker domDataBroker, final BindingIndependentMappingService mappingService, final SchemaService schemaService) {
- super(domDataBroker, mappingService,schemaService);
+ public ForwardedBindingDataBroker(final DOMDataBroker domDataBroker, final BindingToNormalizedNodeCodec codec, final SchemaService schemaService) {
+ super(domDataBroker, codec,schemaService);
}
@Override
*/
package org.opendaylight.controller.sal.binding.codegen.impl;
+import com.google.common.util.concurrent.ForwardingBlockingQueue;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
-
import javassist.ClassPool;
-
import org.apache.commons.lang3.StringUtils;
import org.opendaylight.controller.sal.binding.codegen.RuntimeCodeGenerator;
import org.opendaylight.controller.sal.binding.spi.NotificationInvokerFactory;
+import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.util.concurrent.ForwardingBlockingQueue;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
public class SingletonHolder {
private static final Logger logger = LoggerFactory.getLogger(SingletonHolder.class);
public static final ClassPool CLASS_POOL = ClassPool.getDefault();
+ public static final JavassistUtils JAVASSIST = JavassistUtils.forClassPool(CLASS_POOL);
public static final org.opendaylight.controller.sal.binding.codegen.impl.RuntimeCodeGenerator RPC_GENERATOR_IMPL = new org.opendaylight.controller.sal.binding.codegen.impl.RuntimeCodeGenerator(
CLASS_POOL);
public static final RuntimeCodeGenerator RPC_GENERATOR = RPC_GENERATOR_IMPL;
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sal.binding.impl;\r
-\r
-import org.opendaylight.controller.config.yang.md.sal.binding.impl.Data;\r
-import org.opendaylight.controller.config.yang.md.sal.binding.impl.DataBrokerImplRuntimeMXBean;\r
-import org.opendaylight.controller.config.yang.md.sal.binding.impl.DataBrokerImplRuntimeRegistration;\r
-import org.opendaylight.controller.config.yang.md.sal.binding.impl.DataBrokerImplRuntimeRegistrator;\r
-import org.opendaylight.controller.config.yang.md.sal.binding.impl.Transactions;\r
-import org.opendaylight.controller.sal.binding.impl.connect.dom.BindingIndependentConnector;\r
-\r
-public class RootDataBrokerImpl extends DataBrokerImpl implements DataBrokerImplRuntimeMXBean {\r
-\r
- private final Transactions transactions = new Transactions();\r
- private final Data data = new Data();\r
- private BindingIndependentConnector bindingIndependentConnector;\r
- private DataBrokerImplRuntimeRegistration runtimeBeanRegistration;\r
-\r
- public BindingIndependentConnector getBindingIndependentConnector() {\r
- return bindingIndependentConnector;\r
- }\r
-\r
- public Transactions getTransactions() {\r
- transactions.setCreated(getCreatedTransactionsCount().get());\r
- transactions.setSubmitted(getSubmittedTransactionsCount().get());\r
- transactions.setSuccessful(getFinishedTransactionsCount().get());\r
- transactions.setFailed(getFailedTransactionsCount().get());\r
- return transactions;\r
- }\r
-\r
- @Override\r
- public Data getData() {\r
- data.setTransactions(getTransactions());\r
- return data;\r
- }\r
-\r
- public void setBindingIndependentConnector(BindingIndependentConnector runtimeMapping) {\r
- this.bindingIndependentConnector = runtimeMapping;\r
- }\r
-\r
- public void registerRuntimeBean(DataBrokerImplRuntimeRegistrator rootRegistrator) {\r
- runtimeBeanRegistration = rootRegistrator.register(this);\r
- }\r
-\r
-}\r
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sal.binding.impl.forward;
-
-import java.util.Collection;
-import java.util.Collections;
-
-import org.opendaylight.controller.sal.binding.impl.RootDataBrokerImpl;
-import org.opendaylight.controller.sal.binding.impl.connect.dom.BindingDomConnectorDeployer;
-import org.opendaylight.controller.sal.binding.impl.connect.dom.BindingIndependentConnector;
-import org.opendaylight.controller.sal.core.api.Provider;
-import org.opendaylight.controller.sal.core.api.Broker.ProviderSession;
-
-public class DomForwardedDataBrokerImpl extends RootDataBrokerImpl implements Provider, DomForwardedBroker {
-
- private BindingIndependentConnector connector;
- private ProviderSession domProviderContext;
-
- public void setConnector(BindingIndependentConnector connector) {
- this.connector = connector;
- }
-
- @Override
- public void onSessionInitiated(ProviderSession session) {
- this.setDomProviderContext(session);
- }
-
- @Override
- public Collection<ProviderFunctionality> getProviderFunctionality() {
- return Collections.emptySet();
- }
-
- @Override
- public BindingIndependentConnector getConnector() {
- return connector;
- }
-
- @Override
- public ProviderSession getDomProviderContext() {
- return domProviderContext;
- }
-
- public void setDomProviderContext(ProviderSession domProviderContext) {
- this.domProviderContext = domProviderContext;
- }
-
- @Override
- public void startForwarding() {
- BindingDomConnectorDeployer.startDataForwarding(getConnector(), this, getDomProviderContext());
- }
-}
identity binding-dom-mapping-service {
base config:service-type;
- config:java-class "org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService";
+ config:java-class "org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec";
}
-
identity binding-broker-impl {
base config:module-type;
config:provided-service sal:binding-broker-osgi-registry;
config:java-name-prefix BindingBrokerImpl;
}
- identity binding-data-broker {
- base config:module-type;
- config:provided-service sal:binding-data-broker;
- config:provided-service sal:binding-data-consumer-broker;
- config:java-name-prefix DataBrokerImpl;
- }
-
identity binding-data-compatible-broker {
base config:module-type;
config:provided-service sal:binding-data-broker;
}
}
- augment "/config:modules/config:module/config:configuration" {
- case binding-data-broker {
- when "/config:modules/config:module/config:type = 'binding-data-broker'";
- container dom-broker {
- uses config:service-ref {
- refine type {
- mandatory true;
- config:required-identity dom:dom-broker-osgi-registry;
- }
- }
- }
-
- container mapping-service {
- uses config:service-ref {
- refine type {
- mandatory true;
- config:required-identity binding-dom-mapping-service;
- }
- }
- }
- }
- }
-
augment "/config:modules/config:module/config:configuration" {
case binding-data-compatible-broker {
when "/config:modules/config:module/config:type = 'binding-data-compatible-broker'";
}
}
- augment "/config:modules/config:module/config:state" {
- case binding-data-broker {
- when "/config:modules/config:module/config:type = 'binding-data-broker'";
- container data {
- uses common:data-state;
- }
- }
- }
augment "/config:modules/config:module/config:state" {
case binding-rpc-broker {
when "/config:modules/config:module/config:type = 'binding-rpc-broker'";
package org.opendaylight.controller.md.sal.binding.impl.test;
import static org.junit.Assert.assertTrue;
-import javassist.ClassPool;
+import javassist.ClassPool;
import org.junit.Test;
import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.test.AbstractSchemaAwareTest;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.Top;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelListKey;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.StreamWriterGenerator;
+import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
+import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
import org.opendaylight.yangtools.sal.binding.generator.impl.RuntimeGeneratedMappingServiceImpl;
+import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.AugmentationIdentifier;
@Override
protected void setupWithSchema(final SchemaContext context) {
mappingService = new RuntimeGeneratedMappingServiceImpl(ClassPool.getDefault());
- codec = new BindingToNormalizedNodeCodec(mappingService);
+ StreamWriterGenerator streamWriter = new StreamWriterGenerator(JavassistUtils.forClassPool(ClassPool.getDefault()));
+ BindingNormalizedNodeCodecRegistry registry = new BindingNormalizedNodeCodecRegistry(streamWriter);
+ codec = new BindingToNormalizedNodeCodec(GeneratedClassLoadingStrategy.getTCCLClassLoadingStrategy(), mappingService, registry);
mappingService.onGlobalContextUpdated(context);
codec.onGlobalContextUpdated(context);
};
*/
package org.opendaylight.controller.md.sal.binding.test;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+
import javassist.ClassPool;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBackwardsCompatibleDataBroker;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBindingDataBroker;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.sal.binding.test.util.MockSchemaService;
import org.opendaylight.controller.sal.core.api.model.SchemaService;
import org.opendaylight.controller.sal.core.spi.data.DOMStore;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.DataObjectSerializerGenerator;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.StreamWriterGenerator;
+import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
+import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
import org.opendaylight.yangtools.sal.binding.generator.impl.RuntimeGeneratedMappingServiceImpl;
+import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
public class DataBrokerTestCustomizer {
private DOMDataBroker domDataBroker;
private final RuntimeGeneratedMappingServiceImpl mappingService;
private final MockSchemaService schemaService;
private ImmutableMap<LogicalDatastoreType, DOMStore> datastores;
+ private final BindingToNormalizedNodeCodec bindingToNormalized ;
public ImmutableMap<LogicalDatastoreType, DOMStore> createDatastores() {
return ImmutableMap.<LogicalDatastoreType, DOMStore>builder()
public DataBrokerTestCustomizer() {
schemaService = new MockSchemaService();
- mappingService = new RuntimeGeneratedMappingServiceImpl(ClassPool.getDefault());
+ ClassPool pool = ClassPool.getDefault();
+ mappingService = new RuntimeGeneratedMappingServiceImpl(pool);
+ DataObjectSerializerGenerator generator = StreamWriterGenerator.create(JavassistUtils.forClassPool(pool));
+ BindingNormalizedNodeCodecRegistry codecRegistry = new BindingNormalizedNodeCodecRegistry(generator);
+ GeneratedClassLoadingStrategy loading = GeneratedClassLoadingStrategy.getTCCLClassLoadingStrategy();
+ bindingToNormalized = new BindingToNormalizedNodeCodec(loading, mappingService, codecRegistry);
+ schemaService.registerSchemaContextListener(bindingToNormalized);
}
public DOMStore createConfigurationDatastore() {
}
public DataBroker createDataBroker() {
- return new ForwardedBindingDataBroker(getDOMDataBroker(), getMappingService(), getSchemaService());
+ return new ForwardedBindingDataBroker(getDOMDataBroker(), bindingToNormalized, schemaService );
}
public ForwardedBackwardsCompatibleDataBroker createBackwardsCompatibleDataBroker() {
- return new ForwardedBackwardsCompatibleDataBroker(getDOMDataBroker(), getMappingService(), getSchemaService(), MoreExecutors.sameThreadExecutor());
+ return new ForwardedBackwardsCompatibleDataBroker(getDOMDataBroker(), bindingToNormalized, getSchemaService(), MoreExecutors.sameThreadExecutor());
}
-
private SchemaService getSchemaService() {
return schemaService;
}
import java.util.Collections;
import java.util.List;
import java.util.Map;
-
import org.junit.Test;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
*
* @throws Exception
*/
- @Test( timeout = 15000)
+ @Test()
public void testAugmentSerialization() throws Exception {
baDataService.registerDataChangeListener(NODES_INSTANCE_ID_BA, this);
testNodeRemove();
}
- private <T extends Augmentation<Node>> Node createTestNode(Class<T> augmentationClass, T augmentation) {
+ private <T extends Augmentation<Node>> Node createTestNode(final Class<T> augmentationClass, final T augmentation) {
NodeBuilder nodeBuilder = new NodeBuilder();
nodeBuilder.setId(new NodeId(NODE_ID));
nodeBuilder.setKey(NODE_KEY);
return nodeBuilder.build();
}
- private DataModificationTransaction commitNodeAndVerifyTransaction(Node original) throws Exception {
+ private DataModificationTransaction commitNodeAndVerifyTransaction(final Node original) throws Exception {
DataModificationTransaction transaction = baDataService.beginTransaction();
transaction.putOperationalData(NODE_INSTANCE_ID_BA, original);
RpcResult<TransactionStatus> result = transaction.commit().get();
assertNull(node);
}
- private AugmentationVerifier<Node> verifyNode(Nodes nodes, Node original) {
+ private AugmentationVerifier<Node> verifyNode(final Nodes nodes, final Node original) {
assertNotNull(nodes);
assertNotNull(nodes.getNode());
assertEquals(1, nodes.getNode().size());
return new AugmentationVerifier<Node>(readedNode);
}
- private void assertBindingIndependentVersion(org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier nodeId) {
+ private void assertBindingIndependentVersion(final org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier nodeId) {
CompositeNode node = biDataService.readOperationalData(nodeId);
assertNotNull(node);
}
return nodeMeterStatistics(10, false);
}
- private NodeMeterStatistics nodeMeterStatistics(int count, boolean setDuration) {
+ private NodeMeterStatistics nodeMeterStatistics(final int count, final boolean setDuration) {
NodeMeterStatisticsBuilder nmsb = new NodeMeterStatisticsBuilder();
MeterStatisticsBuilder meterStats = new MeterStatisticsBuilder();
}
@Override
- public void onDataChanged(DataChangeEvent<InstanceIdentifier<?>, DataObject> change) {
+ public void onDataChanged(final DataChangeEvent<InstanceIdentifier<?>, DataObject> change) {
receivedChangeEvent = change;
}
import static com.google.common.base.Preconditions.checkState;
+import com.google.common.annotations.Beta;
+import com.google.common.collect.ClassToInstanceMap;
+import com.google.common.collect.ImmutableClassToInstanceMap;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.MutableClassToInstanceMap;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
import java.util.Set;
import java.util.concurrent.Future;
-
import javassist.ClassPool;
-
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
+import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBackwardsCompatibleDataBroker;
import org.opendaylight.controller.md.sal.binding.impl.ForwardedBindingDataBroker;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.sal.dom.broker.BrokerImpl;
import org.opendaylight.controller.sal.dom.broker.MountPointManagerImpl;
import org.opendaylight.controller.sal.dom.broker.impl.SchemaAwareRpcBroker;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.DataObjectSerializerGenerator;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.StreamWriterGenerator;
+import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
import org.opendaylight.yangtools.sal.binding.generator.impl.ModuleInfoBackedContext;
import org.opendaylight.yangtools.sal.binding.generator.impl.RuntimeGeneratedMappingServiceImpl;
+import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
import org.opendaylight.yangtools.yang.binding.YangModuleInfo;
import org.opendaylight.yangtools.yang.binding.util.BindingReflections;
import org.opendaylight.yangtools.yang.common.QName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.annotations.Beta;
-import com.google.common.collect.ClassToInstanceMap;
-import com.google.common.collect.ImmutableClassToInstanceMap;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.MutableClassToInstanceMap;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
@Beta
public class BindingTestContext implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(BindingTestContext.class);
private RuntimeGeneratedMappingServiceImpl mappingServiceImpl;
+ private BindingToNormalizedNodeCodec codec;
private DomForwardedBindingBrokerImpl baBrokerImpl;
private DataBrokerImpl baDataImpl;
public void startNewDataBroker() {
checkState(executor != null, "Executor needs to be set");
checkState(newDOMDataBroker != null, "DOM Data Broker must be set");
- dataBroker = new ForwardedBindingDataBroker(newDOMDataBroker, mappingServiceImpl, mockSchemaService);
+ dataBroker = new ForwardedBindingDataBroker(newDOMDataBroker, codec, mockSchemaService);
}
public void startNewDomDataBroker() {
checkState(classPool != null, "ClassPool needs to be present");
mappingServiceImpl = new RuntimeGeneratedMappingServiceImpl(classPool);
mockSchemaService.registerSchemaContextListener(mappingServiceImpl);
+
+ DataObjectSerializerGenerator generator = StreamWriterGenerator.create(JavassistUtils.forClassPool(classPool));
+ BindingNormalizedNodeCodecRegistry codecRegistry = new BindingNormalizedNodeCodecRegistry(generator);
+ GeneratedClassLoadingStrategy loading = GeneratedClassLoadingStrategy.getTCCLClassLoadingStrategy();
+ codec = new BindingToNormalizedNodeCodec(loading, mappingServiceImpl, codecRegistry);
+ mockSchemaService.registerSchemaContextListener(codec);
}
private void updateYangSchema(final ImmutableSet<YangModuleInfo> moduleInfos) {
}
public void startNewBindingDataBroker() {
- ForwardedBackwardsCompatibleDataBroker forwarded = new ForwardedBackwardsCompatibleDataBroker(newDOMDataBroker, mappingServiceImpl,mockSchemaService, executor);
+ ForwardedBackwardsCompatibleDataBroker forwarded = new ForwardedBackwardsCompatibleDataBroker(newDOMDataBroker, codec,mockSchemaService, executor);
baData = forwarded;
}
mavenBundle(YANGTOOLS, "binding-generator-api").versionAsInProject(), mavenBundle(YANGTOOLS,
"binding-generator-spi").versionAsInProject(), //
mavenBundle(YANGTOOLS, "binding-generator-impl").versionAsInProject(),
+ mavenBundle(YANGTOOLS, "binding-data-codec").versionAsInProject(),
mavenBundle(YANGTOOLS + ".thirdparty", "antlr4-runtime-osgi-nohead").versionAsInProject(), // //
mavenBundle(CONTROLLER, "sal-core-api").versionAsInProject().update(), //
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
+import com.google.inject.Inject;
import java.util.concurrent.Future;
-
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
import org.opendaylight.controller.sal.binding.api.BindingAwareBroker.ConsumerContext;
import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeBuilder;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.RpcResult;
-import com.google.inject.Inject;
-
public class DataServiceTest extends AbstractTest {
protected DataBrokerService consumerDataService;
public void setUp() throws Exception {
}
+ /*
+ *
+ * Ignored this, because classes here are constructed from
+ * very different class loader as MD-SAL is run into,
+ * this is code is run from different classloader.
+ *
+ */
@Test
+ @Ignore
public void test() throws Exception {
BindingAwareConsumer consumer1 = new BindingAwareConsumer() {
@Override
- public void onSessionInitialized(ConsumerContext session) {
+ public void onSessionInitialized(final ConsumerContext session) {
consumerDataService = session.getSALService(DataBrokerService.class);
}
};
DataModificationTransaction transaction = consumerDataService.beginTransaction();
assertNotNull(transaction);
- NodeRef node1 = createNodeRef("0");
- DataObject node = consumerDataService.readConfigurationData(node1.getValue());
+ InstanceIdentifier<Node> node1 = createNodeRef("0");
+ DataObject node = consumerDataService.readConfigurationData(node1);
assertNull(node);
Node nodeData1 = createNode("0");
- transaction.putConfigurationData(node1.getValue(), nodeData1);
+ transaction.putConfigurationData(node1, nodeData1);
Future<RpcResult<TransactionStatus>> commitResult = transaction.commit();
assertNotNull(commitResult);
assertNotNull(result.getResult());
assertEquals(TransactionStatus.COMMITED, result.getResult());
- Node readedData = (Node) consumerDataService.readConfigurationData(node1.getValue());
+ Node readedData = (Node) consumerDataService.readConfigurationData(node1);
assertNotNull(readedData);
assertEquals(nodeData1.getKey(), readedData.getKey());
DataModificationTransaction transaction2 = consumerDataService.beginTransaction();
assertNotNull(transaction);
- transaction2.removeConfigurationData(node1.getValue());
+ transaction2.removeConfigurationData(node1);
Future<RpcResult<TransactionStatus>> commitResult2 = transaction2.commit();
assertNotNull(commitResult2);
assertNotNull(result2.getResult());
assertEquals(TransactionStatus.COMMITED, result2.getResult());
- DataObject readedData2 = consumerDataService.readConfigurationData(node1.getValue());
+ DataObject readedData2 = consumerDataService.readConfigurationData(node1);
assertNull(readedData2);
}
- private static NodeRef createNodeRef(String string) {
+ private static InstanceIdentifier<Node> createNodeRef(final String string) {
NodeKey key = new NodeKey(new NodeId(string));
- InstanceIdentifier<Node> path = InstanceIdentifier.builder(Nodes.class).child(Node.class, key).build();
-
- return new NodeRef(path);
+ return InstanceIdentifier.builder(Nodes.class).child(Node.class, key).build();
}
- private static Node createNode(String string) {
+ private static Node createNode(final String string) {
NodeBuilder ret = new NodeBuilder();
NodeId id = new NodeId(string);
ret.setKey(new NodeKey(id));
<data xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<modules xmlns="urn:opendaylight:params:xml:ns:yang:controller:config">
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">
- prefix:schema-service-singleton
- </type>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">prefix:schema-service-singleton</type>
<name>yang-schema-service</name>
</module>
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">
- prefix:hash-map-data-store
- </type>
- <name>hash-map-data-store</name>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">prefix:runtime-generated-mapping</type>
+ <name>runtime-mapping-singleton</name>
</module>
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">
- prefix:dom-broker-impl
- </type>
- <name>dom-broker</name>
- <data-store xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">
- <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">
- dom:dom-data-store
- </type>
- <name>ref_hash-map-data-store</name>
- </data-store>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">prefix:binding-notification-broker</type>
+ <name>binding-notification-broker</name>
</module>
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- prefix:binding-broker-impl
- </type>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">prefix:binding-broker-impl</type>
<name>binding-broker-impl</name>
- <notification-service
- xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
- binding:binding-notification-service
- </type>
- <name>ref_binding-notification-broker</name>
+ <notification-service xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-notification-service</type>
+ <name>binding-notification-broker</name>
</notification-service>
<data-broker xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
- binding:binding-data-broker
- </type>
- <name>ref_binding-data-broker</name>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-data-broker</type>
+ <name>binding-data-broker</name>
</data-broker>
</module>
+ <!--
+ Tree-based in-memory data store. This is the data store which is currently
+ recommended for single-node deployments.
+ -->
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- prefix:runtime-generated-mapping
- </type>
- <name>runtime-mapping-singleton</name>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">prefix:dom-inmemory-data-broker</type>
+ <name>inmemory-data-broker</name>
+ <schema-service>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:schema-service</type>
+ <name>yang-schema-service</name>
+ </schema-service>
</module>
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- prefix:binding-notification-broker
- </type>
- <name>binding-notification-broker</name>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom:impl">prefix:dom-broker-impl</type>
+ <name>inmemory-dom-broker</name>
+ <async-data-broker>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:dom-async-data-broker</type>
+ <name>inmemory-data-broker</name>
+ </async-data-broker>
</module>
<module>
- <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- prefix:binding-data-broker
- </type>
- <name>binding-data-broker</name>
- <dom-broker xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">
- dom:dom-broker-osgi-registry
- </type>
- <name>ref_dom-broker</name>
- </dom-broker>
- <mapping-service xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- binding:binding-dom-mapping-service
- </type>
- <name>ref_runtime-mapping-singleton</name>
- </mapping-service>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">prefix:binding-data-compatible-broker</type>
+ <name>inmemory-binding-data-broker</name>
+ <dom-async-broker xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:dom-broker-osgi-registry</type>
+ <name>dom-broker</name>
+ </dom-async-broker>
+ <binding-mapping-service xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">binding:binding-dom-mapping-service</type>
+ <name>runtime-mapping-singleton</name>
+ </binding-mapping-service>
+ </module>
+ <module>
+ <type xmlns:prefix="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">prefix:binding-forwarded-data-broker</type>
+ <name>binding-async-data-broker</name>
+ <binding-forwarded-data-broker xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
+ <dom-async-broker>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:dom-broker-osgi-registry</type>
+ <name>dom-broker</name>
+ </dom-async-broker>
+ <binding-mapping-service>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">binding:binding-dom-mapping-service</type>
+ <name>runtime-mapping-singleton</name>
+ </binding-mapping-service>
+ </binding-forwarded-data-broker>
</module>
</modules>
<services xmlns="urn:opendaylight:params:xml:ns:yang:controller:config">
<service>
- <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">
- dom:schema-service
- </type>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:schema-service</type>
<instance>
- <name>ref_yang-schema-service</name>
- <provider>
- /config/modules/module[name='schema-service-singleton']/instance[name='yang-schema-service']
- </provider>
+ <name>yang-schema-service</name>
+ <provider>/modules/module[type='schema-service-singleton'][name='yang-schema-service']</provider>
</instance>
</service>
<service>
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
- binding:binding-notification-service
- </type>
+ <type xmlns:binding-impl="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">binding-impl:binding-dom-mapping-service</type>
<instance>
- <name>ref_binding-notification-broker</name>
- <provider>
- /config/modules/module[name='binding-notification-broker']/instance[name='binding-notification-broker']
- </provider>
+ <name>runtime-mapping-singleton</name>
+ <provider>/modules/module[type='runtime-generated-mapping'][name='runtime-mapping-singleton']</provider>
</instance>
</service>
<service>
- <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">
- dom:dom-data-store
- </type>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-notification-service</type>
<instance>
- <name>ref_hash-map-data-store</name>
- <provider>
- /config/modules/module[name='hash-map-data-store']/instance[name='hash-map-data-store']
- </provider>
+ <name>binding-notification-broker</name>
+ <provider>/modules/module[type='binding-notification-broker'][name='binding-notification-broker']</provider>
</instance>
</service>
-
<service>
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
- binding:binding-broker-osgi-registry
- </type>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-broker-osgi-registry</type>
<instance>
- <name>ref_binding-broker-impl</name>
- <provider>
- /config/modules/module[name='binding-broker-impl']/instance[name='binding-broker-impl']
- </provider>
+ <name>binding-osgi-broker</name>
+ <provider>/modules/module[type='binding-broker-impl'][name='binding-broker-impl']</provider>
</instance>
</service>
<service>
<provider>/modules/module[type='binding-broker-impl'][name='binding-broker-impl']</provider>
</instance>
</service>
+
+ <service>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:dom-broker-osgi-registry</type>
+ <instance>
+ <name>dom-broker</name>
+ <provider>/modules/module[type='dom-broker-impl'][name='inmemory-dom-broker']</provider>
+ </instance>
+ </service>
+
<service>
- <type xmlns:binding-impl="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding:impl">
- binding-impl:binding-dom-mapping-service
- </type>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-data-broker</type>
<instance>
- <name>ref_runtime-mapping-singleton</name>
- <provider>
- /config/modules/module[name='runtime-generated-mapping']/instance[name='runtime-mapping-singleton']
- </provider>
+ <name>binding-data-broker</name>
+ <provider>/modules/module[type='binding-data-compatible-broker'][name='inmemory-binding-data-broker']</provider>
</instance>
</service>
+
<service>
- <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">
- dom:dom-broker-osgi-registry
- </type>
+ <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">binding:binding-async-data-broker</type>
<instance>
- <name>ref_dom-broker</name>
- <provider>/config/modules/module[name='dom-broker-impl']/instance[name='dom-broker']
- </provider>
+ <name>binding-data-broker</name>
+ <provider>/modules/module[type='binding-forwarded-data-broker'][name='binding-async-data-broker']</provider>
</instance>
</service>
+
<service>
- <type xmlns:binding="urn:opendaylight:params:xml:ns:yang:controller:md:sal:binding">
- binding:binding-data-broker
- </type>
+ <type xmlns:dom="urn:opendaylight:params:xml:ns:yang:controller:md:sal:dom">dom:dom-async-data-broker</type>
<instance>
- <name>ref_binding-data-broker</name>
- <provider>
- /config/modules/module[name='binding-data-broker']/instance[name='binding-data-broker']
- </provider>
+ <name>inmemory-data-broker</name>
+ <provider>/modules/module[type='dom-inmemory-data-broker'][name='inmemory-data-broker']</provider>
</instance>
</service>
</services>
<groupId>org.opendaylight.yangtools</groupId>
<artifactId>yang-parser-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-util</artifactId>
+ </dependency>
<dependency>
<groupId>xmlunit</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.1</version>
</dependency>
+
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
</dependencies>
</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorPath;
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.dispatch.BoundedMailbox;
+import akka.dispatch.MailboxType;
+import akka.dispatch.MessageQueue;
+import akka.dispatch.ProducesMessageQueue;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Preconditions;
+import com.typesafe.config.Config;
+import org.opendaylight.controller.common.reporting.MetricsReporter;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+
+public class MeteredBoundedMailbox implements MailboxType, ProducesMessageQueue<BoundedMailbox.MessageQueue> {
+
+ private MeteredMessageQueue queue;
+ private Integer capacity;
+ private FiniteDuration pushTimeOut;
+ private ActorPath actorPath;
+ private MetricsReporter reporter;
+
+ private final String QUEUE_SIZE = "queue-size";
+ private final Long DEFAULT_TIMEOUT = 10L;
+
+ public MeteredBoundedMailbox(ActorSystem.Settings settings, Config config) {
+ Preconditions.checkArgument( config.hasPath("mailbox-capacity"), "Missing configuration [mailbox-capacity]" );
+ this.capacity = config.getInt("mailbox-capacity");
+ Preconditions.checkArgument( this.capacity > 0, "mailbox-capacity must be > 0");
+
+ Long timeout = -1L;
+ if ( config.hasPath("mailbox-push-timeout-time") ){
+ timeout = config.getDuration("mailbox-push-timeout-time", TimeUnit.NANOSECONDS);
+ } else {
+ timeout = DEFAULT_TIMEOUT;
+ }
+ Preconditions.checkArgument( timeout > 0, "mailbox-push-timeout-time must be > 0");
+ this.pushTimeOut = new FiniteDuration(timeout, TimeUnit.NANOSECONDS);
+
+ reporter = MetricsReporter.getInstance();
+ }
+
+
+ @Override
+ public MessageQueue create(final scala.Option<ActorRef> owner, scala.Option<ActorSystem> system) {
+ this.queue = new MeteredMessageQueue(this.capacity, this.pushTimeOut);
+ monitorQueueSize(owner, this.queue);
+ return this.queue;
+ }
+
+ private void monitorQueueSize(scala.Option<ActorRef> owner, final MeteredMessageQueue monitoredQueue) {
+ if (owner.isEmpty()) {
+ return; //there's no actor to monitor
+ }
+ actorPath = owner.get().path();
+ MetricRegistry registry = reporter.getMetricsRegistry();
+
+ String actorName = registry.name(actorPath.toString(), QUEUE_SIZE);
+
+ if (registry.getMetrics().containsKey(actorName))
+ return; //already registered
+
+ reporter.getMetricsRegistry().register(actorName,
+ new Gauge<Integer>() {
+ @Override
+ public Integer getValue() {
+ return monitoredQueue.size();
+ }
+ });
+ }
+
+
+ public static class MeteredMessageQueue extends BoundedMailbox.MessageQueue {
+
+ public MeteredMessageQueue(int capacity, FiniteDuration pushTimeOut) {
+ super(capacity, pushTimeOut);
+ }
+ }
+
+}
+
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.reporting;
+
+import com.codahale.metrics.JmxReporter;
+import com.codahale.metrics.MetricRegistry;
+
+/**
+ * Maintains metrics registry that is provided to reporters.
+ * At the moment only one reporter exists {@code JmxReporter}.
+ * More reporters can be added.
+ * <p/>
+ * The consumers of this class will only be interested in {@code MetricsRegistry}
+ * where metrics for that consumer gets stored.
+ */
+public class MetricsReporter implements AutoCloseable{
+
+ private final MetricRegistry METRICS_REGISTRY = new MetricRegistry();
+ private final String DOMAIN = "org.opendaylight.controller";
+
+ public final JmxReporter jmxReporter = JmxReporter.forRegistry(METRICS_REGISTRY).inDomain(DOMAIN).build();
+
+ private static MetricsReporter inst = new MetricsReporter();
+
+ private MetricsReporter(){
+ jmxReporter.start();
+ }
+
+ public static MetricsReporter getInstance(){
+ return inst;
+ }
+
+ public MetricRegistry getMetricsRegistry(){
+ return METRICS_REGISTRY;
+ }
+
+ @Override
+ public void close() throws Exception {
+ jmxReporter.close();
+ }
+}
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import org.opendaylight.yangtools.yang.common.QName;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Function;
import com.google.common.base.Objects;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.annotations.Beta;
import com.google.common.base.Preconditions;
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.base.Optional;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.common.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.DeadLetter;
+import akka.actor.Props;
+import akka.actor.UntypedActor;
+import akka.japi.Creator;
+import akka.testkit.JavaTestKit;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import scala.concurrent.duration.FiniteDuration;
+
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class MeteredBoundedMailboxTest {
+
+ private static ActorSystem actorSystem;
+ private final ReentrantLock lock = new ReentrantLock();
+
+ @Before
+ public void setUp() throws Exception {
+ actorSystem = ActorSystem.create("testsystem");
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (actorSystem != null)
+ actorSystem.shutdown();
+ }
+
+ @Test
+ public void test_WhenQueueIsFull_ShouldSendMsgToDeadLetter() throws InterruptedException {
+ final JavaTestKit mockReceiver = new JavaTestKit(actorSystem);
+ actorSystem.eventStream().subscribe(mockReceiver.getRef(), DeadLetter.class);
+
+
+ final FiniteDuration TEN_SEC = new FiniteDuration(10, TimeUnit.SECONDS);
+ String boundedMailBox = actorSystem.name() + ".bounded-mailbox";
+ ActorRef pingPongActor = actorSystem.actorOf(PingPongActor.props(lock).withMailbox(boundedMailBox),
+ "pingpongactor");
+
+ actorSystem.mailboxes().settings();
+ lock.lock();
+ //queue capacity = 10
+ //need to send 12 messages; 1 message is dequeued and actor waits on lock,
+ //2nd to 11th messages are put on the queue
+ //12th message is sent to dead letter.
+ for (int i=0;i<12;i++){
+ pingPongActor.tell("ping", mockReceiver.getRef());
+ }
+
+ mockReceiver.expectMsgClass(TEN_SEC, DeadLetter.class);
+
+ lock.unlock();
+
+ Object[] eleven = mockReceiver.receiveN(11, TEN_SEC);
+ }
+
+ /**
+ * For testing
+ */
+ public static class PingPongActor extends UntypedActor{
+
+ ReentrantLock lock;
+
+ private PingPongActor(ReentrantLock lock){
+ this.lock = lock;
+ }
+
+ public static Props props(final ReentrantLock lock){
+ return Props.create(new Creator<PingPongActor>(){
+ @Override
+ public PingPongActor create() throws Exception {
+ return new PingPongActor(lock);
+ }
+ });
+ }
+
+ @Override
+ public void onReceive(Object message) throws Exception {
+ lock.lock();
+ if ("ping".equals(message))
+ getSender().tell("pong", getSelf());
+ }
+ }
+}
\ No newline at end of file
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
-package org.opendaylight.controller.remote.rpc.utils;
+package org.opendaylight.controller.xml.codec;
import com.google.common.collect.ImmutableList;
+testsystem {
+
+ bounded-mailbox {
+ mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+ mailbox-capacity = 10
+ mailbox-push-timeout-time = 100ms
+ }
+}
\ No newline at end of file
--- /dev/null
+testsystem {
+
+ bounded-mailbox {
+ mailbox-type = "org.opendaylight.controller.common.actor.MeteredBoundedMailbox"
+ mailbox-capacity = 1000
+ mailbox-push-timeout-time = 10ms
+ }
+}
\ No newline at end of file
<version>1.1-SNAPSHOT</version>
</dependency>
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>
<Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
<Export-package></Export-package>
<Private-Package></Private-Package>
- <Import-Package>!*snappy;!org.jboss.*;*</Import-Package>
+ <Import-Package>!*snappy;!org.jboss.*;!com.jcraft.*;*</Import-Package>
<Embed-Dependency>
sal-clustering-commons;
sal-akka-raft;
+ *metrics*;
!sal*;
!*config-api*;
!*testkit*;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
+
import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardManagerIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListener;
import org.opendaylight.controller.cluster.datastore.messages.RegisterChangeListenerReply;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.util.PropertyUtils;
-import org.opendaylight.yangtools.util.concurrent.SpecialExecutors;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
*/
public class DistributedDataStore implements DOMStore, SchemaContextListener, AutoCloseable {
- private static final Logger
- LOG = LoggerFactory.getLogger(DistributedDataStore.class);
-
- private static final String EXECUTOR_MAX_POOL_SIZE_PROP =
- "mdsal.dist-datastore-executor-pool.size";
- private static final int DEFAULT_EXECUTOR_MAX_POOL_SIZE = 10;
-
- private static final String EXECUTOR_MAX_QUEUE_SIZE_PROP =
- "mdsal.dist-datastore-executor-queue.size";
- private static final int DEFAULT_EXECUTOR_MAX_QUEUE_SIZE = 5000;
+ private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
private final ActorContext actorContext;
private SchemaContext schemaContext;
- /**
- * Executor used to run FutureTask's
- *
- * This is typically used when we need to make a request to an actor and
- * wait for it's response and the consumer needs to be provided a Future.
- */
- private final ListeningExecutorService executor =
- MoreExecutors.listeningDecorator(
- SpecialExecutors.newBlockingBoundedFastThreadPool(
- PropertyUtils.getIntSystemProperty(
- EXECUTOR_MAX_POOL_SIZE_PROP,
- DEFAULT_EXECUTOR_MAX_POOL_SIZE),
- PropertyUtils.getIntSystemProperty(
- EXECUTOR_MAX_QUEUE_SIZE_PROP,
- DEFAULT_EXECUTOR_MAX_QUEUE_SIZE), "DistDataStore"));
-
public DistributedDataStore(ActorSystem actorSystem, String type, ClusterWrapper cluster,
Configuration configuration, InMemoryDOMDataStoreConfigProperties dataStoreProperties) {
Preconditions.checkNotNull(actorSystem, "actorSystem should not be null");
}
+ @SuppressWarnings("unchecked")
@Override
- public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>> ListenerRegistration<L> registerChangeListener(
+ public <L extends AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>>
+ ListenerRegistration<L> registerChangeListener(
YangInstanceIdentifier path, L listener,
AsyncDataBroker.DataChangeScope scope) {
Preconditions.checkNotNull(path, "path should not be null");
Preconditions.checkNotNull(listener, "listener should not be null");
-
LOG.debug("Registering listener: {} for path: {} scope: {}", listener, path, scope);
ActorRef dataChangeListenerActor = actorContext.getActorSystem().actorOf(
String shardName = ShardStrategyFactory.getStrategy(path).findShard(path);
Object result = actorContext.executeLocalShardOperation(shardName,
- new RegisterChangeListener(path, dataChangeListenerActor.path(),
- scope),
- ActorContext.ASK_DURATION
- );
+ new RegisterChangeListener(path, dataChangeListenerActor.path(), scope),
+ ActorContext.ASK_DURATION);
if (result != null) {
RegisterChangeListenerReply reply = (RegisterChangeListenerReply) result;
LOG.debug(
"No local shard for shardName {} was found so returning a noop registration",
shardName);
+
return new NoOpDataChangeListenerRegistration(listener);
}
-
-
-
-
@Override
public DOMStoreTransactionChain createTransactionChain() {
- return new TransactionChainProxy(actorContext, executor, schemaContext);
+ return new TransactionChainProxy(actorContext, schemaContext);
}
@Override
public DOMStoreReadTransaction newReadOnlyTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY,
- executor, schemaContext);
+ schemaContext);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.WRITE_ONLY,
- executor, schemaContext);
+ schemaContext);
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
return new TransactionProxy(actorContext, TransactionProxy.TransactionType.READ_WRITE,
- executor, schemaContext);
+ schemaContext);
}
@Override public void onGlobalContextUpdated(SchemaContext schemaContext) {
new Function<Throwable, SupervisorStrategy.Directive>() {
@Override
public SupervisorStrategy.Directive apply(Throwable t) {
- LOG.warning("Supervisor Strategy of resume applied {}",t);
+ StringBuilder sb = new StringBuilder();
+ for(StackTraceElement element : t.getStackTrace()) {
+ sb.append("\n\tat ")
+ .append(element.toString());
+ }
+ LOG.warning("Supervisor Strategy of resume applied {}",sb.toString());
return SupervisorStrategy.resume();
}
}
} else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
} else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction,DeleteData.fromSerizalizable(message));
+ deleteData(transaction,DeleteData.fromSerializable(message));
} else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction,new ReadyTransaction());
} else if(DataExists.SERIALIZABLE_CLASS.equals(message.getClass())) {
} else if (MergeData.SERIALIZABLE_CLASS.equals(message.getClass())) {
mergeData(transaction, MergeData.fromSerializable(message, schemaContext));
} else if (DeleteData.SERIALIZABLE_CLASS.equals(message.getClass())) {
- deleteData(transaction,DeleteData.fromSerizalizable(message));
+ deleteData(transaction,DeleteData.fromSerializable(message));
} else if (ReadyTransaction.SERIALIZABLE_CLASS.equals(message.getClass())) {
readyTransaction(transaction,new ReadyTransaction());
}else {
public void onFailure(Throwable t) {
LOG.error(t, "An exception happened during abort");
sender
- .tell(new akka.actor.Status.Failure(t), getSelf());
+ .tell(new akka.actor.Status.Failure(t), self);
}
});
}
public void onFailure(Throwable t) {
LOG.error(t, "An exception happened during pre-commit");
sender
- .tell(new akka.actor.Status.Failure(t), getSelf());
+ .tell(new akka.actor.Status.Failure(t), self);
}
});
public void onFailure(Throwable t) {
LOG.error(t, "An exception happened during canCommit");
sender
- .tell(new akka.actor.Status.Failure(t), getSelf());
+ .tell(new akka.actor.Status.Failure(t), self);
}
});
import akka.actor.ActorPath;
import akka.actor.ActorSelection;
+import akka.dispatch.Futures;
+import akka.dispatch.OnComplete;
+import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.SettableFuture;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import scala.concurrent.Future;
+
import java.util.Collections;
import java.util.List;
-import java.util.concurrent.Callable;
/**
* ThreePhaseCommitCohortProxy represents a set of remote cohort proxies
*/
-public class ThreePhaseCommitCohortProxy implements
- DOMStoreThreePhaseCommitCohort{
+public class ThreePhaseCommitCohortProxy implements DOMStoreThreePhaseCommitCohort{
- private static final Logger
- LOG = LoggerFactory.getLogger(DistributedDataStore.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DistributedDataStore.class);
private final ActorContext actorContext;
private final List<ActorPath> cohortPaths;
- private final ListeningExecutorService executor;
private final String transactionId;
-
- public ThreePhaseCommitCohortProxy(ActorContext actorContext,
- List<ActorPath> cohortPaths,
- String transactionId,
- ListeningExecutorService executor) {
-
+ public ThreePhaseCommitCohortProxy(ActorContext actorContext, List<ActorPath> cohortPaths,
+ String transactionId) {
this.actorContext = actorContext;
this.cohortPaths = cohortPaths;
this.transactionId = transactionId;
- this.executor = executor;
}
- @Override public ListenableFuture<Boolean> canCommit() {
+ @Override
+ public ListenableFuture<Boolean> canCommit() {
LOG.debug("txn {} canCommit", transactionId);
- Callable<Boolean> call = new Callable<Boolean>() {
+ Future<Iterable<Object>> combinedFuture =
+ invokeCohorts(new CanCommitTransaction().toSerializable());
+
+ final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+ combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
@Override
- public Boolean call() throws Exception {
- for(ActorPath actorPath : cohortPaths){
-
- Object message = new CanCommitTransaction().toSerializable();
- LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
- ActorSelection cohort = actorContext.actorSelection(actorPath);
-
- try {
- Object response =
- actorContext.executeRemoteOperation(cohort,
- message,
- ActorContext.ASK_DURATION);
-
- if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
- CanCommitTransactionReply reply =
- CanCommitTransactionReply.fromSerializable(response);
- if (!reply.getCanCommit()) {
- return false;
- }
+ public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(failure);
+ return;
+ }
+
+ boolean result = true;
+ for(Object response: responses) {
+ if (response.getClass().equals(CanCommitTransactionReply.SERIALIZABLE_CLASS)) {
+ CanCommitTransactionReply reply =
+ CanCommitTransactionReply.fromSerializable(response);
+ if (!reply.getCanCommit()) {
+ result = false;
+ break;
}
- } catch(RuntimeException e){
- // FIXME : Need to properly handle this
- LOG.error("Unexpected Exception", e);
- return false;
+ } else {
+ LOG.error("Unexpected response type {}", response.getClass());
+ returnFuture.setException(new IllegalArgumentException(
+ String.format("Unexpected response type {}", response.getClass())));
+ return;
}
}
- return true;
+ returnFuture.set(Boolean.valueOf(result));
}
- };
+ }, actorContext.getActorSystem().dispatcher());
+
+ return returnFuture;
+ }
+
+ private Future<Iterable<Object>> invokeCohorts(Object message) {
+ List<Future<Object>> futureList = Lists.newArrayListWithCapacity(cohortPaths.size());
+ for(ActorPath actorPath : cohortPaths) {
+
+ LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
- return executor.submit(call);
+ ActorSelection cohort = actorContext.actorSelection(actorPath);
+
+ futureList.add(actorContext.executeRemoteOperationAsync(cohort, message,
+ ActorContext.ASK_DURATION));
+ }
+
+ return Futures.sequence(futureList, actorContext.getActorSystem().dispatcher());
}
- @Override public ListenableFuture<Void> preCommit() {
+ @Override
+ public ListenableFuture<Void> preCommit() {
LOG.debug("txn {} preCommit", transactionId);
- return voidOperation(new PreCommitTransaction().toSerializable(), PreCommitTransactionReply.SERIALIZABLE_CLASS);
+ return voidOperation(new PreCommitTransaction().toSerializable(),
+ PreCommitTransactionReply.SERIALIZABLE_CLASS, true);
}
- @Override public ListenableFuture<Void> abort() {
+ @Override
+ public ListenableFuture<Void> abort() {
LOG.debug("txn {} abort", transactionId);
- return voidOperation(new AbortTransaction().toSerializable(), AbortTransactionReply.SERIALIZABLE_CLASS);
+
+ // Note - we pass false for propagateException. In the front-end data broker, this method
+ // is called when one of the 3 phases fails with an exception. We'd rather have that
+ // original exception propagated to the client. If our abort fails and we propagate the
+ // exception then that exception will supersede and suppress the original exception. But
+ // it's the original exception that is the root cause and of more interest to the client.
+
+ return voidOperation(new AbortTransaction().toSerializable(),
+ AbortTransactionReply.SERIALIZABLE_CLASS, false);
}
- @Override public ListenableFuture<Void> commit() {
+ @Override
+ public ListenableFuture<Void> commit() {
LOG.debug("txn {} commit", transactionId);
- return voidOperation(new CommitTransaction().toSerializable(), CommitTransactionReply.SERIALIZABLE_CLASS);
+ return voidOperation(new CommitTransaction().toSerializable(),
+ CommitTransactionReply.SERIALIZABLE_CLASS, true);
}
- private ListenableFuture<Void> voidOperation(final Object message, final Class expectedResponseClass){
- Callable<Void> call = new Callable<Void>() {
-
- @Override public Void call() throws Exception {
- for(ActorPath actorPath : cohortPaths){
- ActorSelection cohort = actorContext.actorSelection(actorPath);
-
- LOG.debug("txn {} Sending {} to {}", transactionId, message, actorPath);
-
- try {
- Object response =
- actorContext.executeRemoteOperation(cohort,
- message,
- ActorContext.ASK_DURATION);
-
- if (response != null && !response.getClass()
- .equals(expectedResponseClass)) {
- throw new RuntimeException(
- String.format(
- "did not get the expected response \n\t\t expected : %s \n\t\t actual : %s",
- expectedResponseClass.toString(),
- response.getClass().toString())
- );
+ private ListenableFuture<Void> voidOperation(final Object message,
+ final Class<?> expectedResponseClass, final boolean propagateException) {
+
+ Future<Iterable<Object>> combinedFuture = invokeCohorts(message);
+
+ final SettableFuture<Void> returnFuture = SettableFuture.create();
+
+ combinedFuture.onComplete(new OnComplete<Iterable<Object>>() {
+ @Override
+ public void onComplete(Throwable failure, Iterable<Object> responses) throws Throwable {
+
+ Throwable exceptionToPropagate = failure;
+ if(exceptionToPropagate == null) {
+ for(Object response: responses) {
+ if(!response.getClass().equals(expectedResponseClass)) {
+ exceptionToPropagate = new IllegalArgumentException(
+ String.format("Unexpected response type {}",
+ response.getClass()));
+ break;
}
- } catch(TimeoutException e){
- LOG.error(String.format("A timeout occurred when processing operation : %s", message));
}
}
- return null;
+
+ if(exceptionToPropagate != null) {
+ if(propagateException) {
+ // We don't log the exception here to avoid redundant logging since we're
+ // propagating to the caller in MD-SAL core who will log it.
+ returnFuture.setException(exceptionToPropagate);
+ } else {
+ // Since the caller doesn't want us to propagate the exception we'll also
+ // not log it normally. But it's usually not good to totally silence
+ // exceptions so we'll log it to debug level.
+ LOG.debug(String.format("%s failed", message.getClass().getSimpleName()),
+ exceptionToPropagate);
+ returnFuture.set(null);
+ }
+ } else {
+ returnFuture.set(null);
+ }
}
- };
+ }, actorContext.getActorSystem().dispatcher());
- return executor.submit(call);
+ return returnFuture;
}
public List<ActorPath> getCohortPaths() {
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import com.google.common.util.concurrent.ListeningExecutorService;
-
/**
* TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
*/
public class TransactionChainProxy implements DOMStoreTransactionChain{
private final ActorContext actorContext;
- private final ListeningExecutorService transactionExecutor;
private final SchemaContext schemaContext;
- public TransactionChainProxy(ActorContext actorContext, ListeningExecutorService transactionExecutor,
- SchemaContext schemaContext) {
+ public TransactionChainProxy(ActorContext actorContext, SchemaContext schemaContext) {
this.actorContext = actorContext;
- this.transactionExecutor = transactionExecutor;
this.schemaContext = schemaContext;
}
@Override
public DOMStoreReadTransaction newReadOnlyTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.READ_ONLY, schemaContext);
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.WRITE_ONLY, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_WRITE, transactionExecutor, schemaContext);
+ TransactionProxy.TransactionType.READ_WRITE, schemaContext);
}
@Override
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.OnComplete;
+
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
-import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
+import com.google.common.util.concurrent.SettableFuture;
+
import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import scala.concurrent.Future;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
/**
private final ActorContext actorContext;
private final Map<String, TransactionContext> remoteTransactionPaths = new HashMap<>();
private final TransactionIdentifier identifier;
- private final ListeningExecutorService executor;
private final SchemaContext schemaContext;
+ private boolean inReadyState;
- public TransactionProxy(
- ActorContext actorContext,
- TransactionType transactionType,
- ListeningExecutorService executor,
- SchemaContext schemaContext
- ) {
+ public TransactionProxy(ActorContext actorContext, TransactionType transactionType,
+ SchemaContext schemaContext) {
this.actorContext = Preconditions.checkNotNull(actorContext, "actorContext should not be null");
this.transactionType = Preconditions.checkNotNull(transactionType, "transactionType should not be null");
- this.executor = Preconditions.checkNotNull(executor, "executor should not be null");
this.schemaContext = Preconditions.checkNotNull(schemaContext, "schemaContext should not be null");
String memberName = actorContext.getCurrentMemberName();
if(memberName == null){
memberName = "UNKNOWN-MEMBER";
}
- this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(counter.getAndIncrement()).build();
+
+ this.identifier = TransactionIdentifier.builder().memberName(memberName).counter(
+ counter.getAndIncrement()).build();
LOG.debug("Created txn {}", identifier);
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read(
final YangInstanceIdentifier path) {
+ Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+ "Read operation on write-only transaction is not allowed");
+
LOG.debug("txn {} read {}", identifier, path);
createTransactionIfMissing(actorContext, path);
return transactionContext(path).readData(path);
}
- @Override public CheckedFuture<Boolean, ReadFailedException> exists(
- YangInstanceIdentifier path) {
+ @Override
+ public CheckedFuture<Boolean, ReadFailedException> exists(YangInstanceIdentifier path) {
+
+ Preconditions.checkState(transactionType != TransactionType.WRITE_ONLY,
+ "Exists operation on write-only transaction is not allowed");
+
LOG.debug("txn {} exists {}", identifier, path);
createTransactionIfMissing(actorContext, path);
return transactionContext(path).dataExists(path);
}
+ private void checkModificationState() {
+ Preconditions.checkState(transactionType != TransactionType.READ_ONLY,
+ "Modification operation on read-only transaction is not allowed");
+ Preconditions.checkState(!inReadyState,
+ "Transaction is sealed - further modifications are allowed");
+ }
+
@Override
public void write(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ checkModificationState();
+
LOG.debug("txn {} write {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public void merge(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ checkModificationState();
+
LOG.debug("txn {} merge {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public void delete(YangInstanceIdentifier path) {
+ checkModificationState();
+
LOG.debug("txn {} delete {}", identifier, path);
createTransactionIfMissing(actorContext, path);
@Override
public DOMStoreThreePhaseCommitCohort ready() {
+
+ checkModificationState();
+
+ inReadyState = true;
+
List<ActorPath> cohortPaths = new ArrayList<>();
- LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier, remoteTransactionPaths.size());
+ LOG.debug("txn {} Trying to get {} transactions ready for commit", identifier,
+ remoteTransactionPaths.size());
for(TransactionContext transactionContext : remoteTransactionPaths.values()) {
- LOG.debug("txn {} Readying transaction for shard {}", identifier, transactionContext.getShardName());
+ LOG.debug("txn {} Readying transaction for shard {}", identifier,
+ transactionContext.getShardName());
Object result = transactionContext.readyTransaction();
if(result.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)){
- ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(actorContext.getActorSystem(),result);
- String resolvedCohortPath = transactionContext
- .getResolvedCohortPath(reply.getCohortPath().toString());
+ ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(
+ actorContext.getActorSystem(),result);
+ String resolvedCohortPath = transactionContext.getResolvedCohortPath(
+ reply.getCohortPath().toString());
cohortPaths.add(actorContext.actorFor(resolvedCohortPath));
+ } else {
+ LOG.error("Was expecting {} but got {}", ReadyTransactionReply.SERIALIZABLE_CLASS,
+ result.getClass());
}
}
- return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString(), executor);
+ return new ThreePhaseCommitCohortProxy(actorContext, cohortPaths, identifier.toString());
}
@Override
Object response = actorContext.executeShardOperation(shardName,
new CreateTransaction(identifier.toString(),this.transactionType.ordinal() ).toSerializable(),
ActorContext.ASK_DURATION);
- if (response.getClass()
- .equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
+ if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
CreateTransactionReply reply =
CreateTransactionReply.fromSerializable(response);
transactionActor);
remoteTransactionPaths.put(shardName, transactionContext);
+ } else {
+ LOG.error("Was expecting {} but got {}", CreateTransactionReply.SERIALIZABLE_CLASS,
+ response.getClass());
}
- } catch(TimeoutException | PrimaryNotFoundException e){
+ } catch(Exception e){
LOG.error("txn {} Creating NoOpTransaction because of : {}", identifier, e.getMessage());
- remoteTransactionPaths.put(shardName,
- new NoOpTransactionContext(shardName));
+ remoteTransactionPaths.put(shardName, new NoOpTransactionContext(shardName, e));
}
}
this.actor = actor;
}
- @Override public String getShardName() {
+ @Override
+ public String getShardName() {
return shardName;
}
return actor;
}
- @Override public String getResolvedCohortPath(String cohortPath) {
+ @Override
+ public String getResolvedCohortPath(String cohortPath) {
return actorContext.resolvePath(actorPath, cohortPath);
}
- @Override public void closeTransaction() {
- getActor().tell(
- new CloseTransaction().toSerializable(), null);
+ @Override
+ public void closeTransaction() {
+ actorContext.sendRemoteOperationAsync(getActor(), new CloseTransaction().toSerializable());
}
- @Override public Object readyTransaction() {
+ @Override
+ public Object readyTransaction() {
return actorContext.executeRemoteOperation(getActor(),
- new ReadyTransaction().toSerializable(),
- ActorContext.ASK_DURATION
- );
-
+ new ReadyTransaction().toSerializable(), ActorContext.ASK_DURATION);
}
- @Override public void deleteData(YangInstanceIdentifier path) {
- getActor().tell(new DeleteData(path).toSerializable(), null);
+ @Override
+ public void deleteData(YangInstanceIdentifier path) {
+ actorContext.sendRemoteOperationAsync(getActor(), new DeleteData(path).toSerializable() );
}
- @Override public void mergeData(YangInstanceIdentifier path,
- NormalizedNode<?, ?> data) {
- getActor()
- .tell(new MergeData(path, data, schemaContext).toSerializable(),
- null);
+ @Override
+ public void mergeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ actorContext.sendRemoteOperationAsync(getActor(),
+ new MergeData(path, data, schemaContext).toSerializable());
}
@Override
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
final YangInstanceIdentifier path) {
- Callable<Optional<NormalizedNode<?, ?>>> call =
- new Callable<Optional<NormalizedNode<?, ?>>>() {
-
- @Override public Optional<NormalizedNode<?, ?>> call()
- throws Exception {
- Object response = actorContext
- .executeRemoteOperation(getActor(),
- new ReadData(path).toSerializable(),
- ActorContext.ASK_DURATION);
- if (response.getClass()
- .equals(ReadDataReply.SERIALIZABLE_CLASS)) {
- ReadDataReply reply = ReadDataReply
- .fromSerializable(schemaContext, path,
- response);
+ final SettableFuture<Optional<NormalizedNode<?, ?>>> returnFuture = SettableFuture.create();
+
+ OnComplete<Object> onComplete = new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable failure, Object response) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(new ReadFailedException(
+ "Error reading data for path " + path, failure));
+ } else {
+ if (response.getClass().equals(ReadDataReply.SERIALIZABLE_CLASS)) {
+ ReadDataReply reply = ReadDataReply.fromSerializable(schemaContext,
+ path, response);
if (reply.getNormalizedNode() == null) {
- return Optional.absent();
+ returnFuture.set(Optional.<NormalizedNode<?, ?>>absent());
+ } else {
+ returnFuture.set(Optional.<NormalizedNode<?, ?>>of(
+ reply.getNormalizedNode()));
}
- return Optional.<NormalizedNode<?, ?>>of(
- reply.getNormalizedNode());
+ } else {
+ returnFuture.setException(new ReadFailedException(
+ "Invalid response reading data for path " + path));
}
-
- throw new ReadFailedException("Read Failed " + path);
}
- };
+ }
+ };
- return MappingCheckedFuture
- .create(executor.submit(call), ReadFailedException.MAPPER);
- }
+ Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ new ReadData(path).toSerializable(), ActorContext.ASK_DURATION);
+ future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
- @Override public void writeData(YangInstanceIdentifier path,
- NormalizedNode<?, ?> data) {
- getActor()
- .tell(new WriteData(path, data, schemaContext).toSerializable(),
- null);
+ return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
}
- @Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
- final YangInstanceIdentifier path) {
-
- Callable<Boolean> call = new Callable<Boolean>() {
-
- @Override public Boolean call() throws Exception {
- Object o = actorContext.executeRemoteOperation(getActor(),
- new DataExists(path).toSerializable(),
- ActorContext.ASK_DURATION
- );
-
+ @Override
+ public void writeData(YangInstanceIdentifier path, NormalizedNode<?, ?> data) {
+ actorContext.sendRemoteOperationAsync(getActor(),
+ new WriteData(path, data, schemaContext).toSerializable());
+ }
- if (DataExistsReply.SERIALIZABLE_CLASS
- .equals(o.getClass())) {
- return DataExistsReply.fromSerializable(o).exists();
+ @Override
+ public CheckedFuture<Boolean, ReadFailedException> dataExists(
+ final YangInstanceIdentifier path) {
+
+ final SettableFuture<Boolean> returnFuture = SettableFuture.create();
+
+ OnComplete<Object> onComplete = new OnComplete<Object>() {
+ @Override
+ public void onComplete(Throwable failure, Object response) throws Throwable {
+ if(failure != null) {
+ returnFuture.setException(new ReadFailedException(
+ "Error checking exists for path " + path, failure));
+ } else {
+ if (response.getClass().equals(DataExistsReply.SERIALIZABLE_CLASS)) {
+ returnFuture.set(Boolean.valueOf(DataExistsReply.
+ fromSerializable(response).exists()));
+ } else {
+ returnFuture.setException(new ReadFailedException(
+ "Invalid response checking exists for path " + path));
+ }
}
-
- throw new ReadFailedException("Exists Failed " + path);
}
};
- return MappingCheckedFuture
- .create(executor.submit(call), ReadFailedException.MAPPER);
+
+ Future<Object> future = actorContext.executeRemoteOperationAsync(getActor(),
+ new DataExists(path).toSerializable(), ActorContext.ASK_DURATION);
+ future.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
+
+ return MappingCheckedFuture.create(returnFuture, ReadFailedException.MAPPER);
}
}
LOG = LoggerFactory.getLogger(NoOpTransactionContext.class);
private final String shardName;
+ private final Exception failure;
private ActorRef cohort;
- public NoOpTransactionContext(String shardName){
+ public NoOpTransactionContext(String shardName, Exception failure){
this.shardName = shardName;
+ this.failure = failure;
}
- @Override public String getShardName() {
+
+ @Override
+ public String getShardName() {
return shardName;
}
- @Override public String getResolvedCohortPath(String cohortPath) {
+ @Override
+ public String getResolvedCohortPath(String cohortPath) {
return cohort.path().toString();
}
- @Override public void closeTransaction() {
+ @Override
+ public void closeTransaction() {
LOG.warn("txn {} closeTransaction called", identifier);
}
return new ReadyTransactionReply(cohort.path()).toSerializable();
}
- @Override public void deleteData(YangInstanceIdentifier path) {
+ @Override
+ public void deleteData(YangInstanceIdentifier path) {
LOG.warn("txt {} deleteData called path = {}", identifier, path);
}
- @Override public void mergeData(YangInstanceIdentifier path,
+ @Override
+ public void mergeData(YangInstanceIdentifier path,
NormalizedNode<?, ?> data) {
LOG.warn("txn {} mergeData called path = {}", identifier, path);
}
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> readData(
YangInstanceIdentifier path) {
LOG.warn("txn {} readData called path = {}", identifier, path);
- return Futures.immediateCheckedFuture(
- Optional.<NormalizedNode<?, ?>>absent());
+ return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+ "Error reading data for path " + path, failure));
}
@Override public void writeData(YangInstanceIdentifier path,
@Override public CheckedFuture<Boolean, ReadFailedException> dataExists(
YangInstanceIdentifier path) {
LOG.warn("txn {} dataExists called path = {}", identifier, path);
-
- // Returning false instead of an exception to keep this aligned with
- // read
- return Futures.immediateCheckedFuture(false);
+ return Futures.immediateFailedCheckedFuture(new ReadFailedException(
+ "Error checking exists for path " + path, failure));
}
}
.setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path)).build();
}
- public static DeleteData fromSerizalizable(Object serializable){
+ public static DeleteData fromSerializable(Object serializable){
ShardTransactionMessages.DeleteData o = (ShardTransactionMessages.DeleteData) serializable;
return new DeleteData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
}
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
import akka.util.Timeout;
+
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.Configuration;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.messages.FindPrimary;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
private final ClusterWrapper clusterWrapper;
private final Configuration configuration;
- private SchemaContext schemaContext = null;
-
public ActorContext(ActorSystem actorSystem, ActorRef shardManager,
ClusterWrapper clusterWrapper,
Configuration configuration) {
}
}
+ /**
+ * Execute an operation on a remote actor asynchronously.
+ *
+ * @param actor the ActorSelection
+ * @param message the message to send
+ * @param duration the maximum amount of time to send he message
+ * @return a Future containing the eventual result
+ */
+ public Future<Object> executeRemoteOperationAsync(ActorSelection actor, Object message,
+ FiniteDuration duration) {
+
+ LOG.debug("Sending remote message {} to {}", message.getClass().toString(), actor.toString());
+
+ return ask(actor, message, new Timeout(duration));
+ }
+
+ /**
+ * Sends an operation to be executed by a remote actor asynchronously without waiting for a
+ * reply (essentially set and forget).
+ *
+ * @param actor the ActorSelection
+ * @param message the message to send
+ */
+ public void sendRemoteOperationAsync(ActorSelection actor, Object message) {
+ actor.tell(message, ActorRef.noSender());
+ }
+
/**
* Execute an operation on the primary for a given shard
* <p>
store.onGlobalContextUpdated(testSchemaContext);
}
- private FiniteDuration ASK_RESULT_DURATION = Duration.create(3000, TimeUnit.MILLISECONDS);
+ private FiniteDuration ASK_RESULT_DURATION = Duration.create(5000, TimeUnit.MILLISECONDS);
@Test(expected = TestException.class)
package org.opendaylight.controller.cluster.datastore;
-import akka.actor.ActorRef;
+import akka.actor.ActorPath;
+import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.Futures;
+import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-
-import junit.framework.Assert;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.isA;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.times;
-import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.stubbing.Stubber;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.SerializableMessage;
+import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
+import scala.concurrent.duration.FiniteDuration;
-import java.util.Arrays;
-import java.util.concurrent.Executors;
-
-import static org.junit.Assert.assertNotNull;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
public class ThreePhaseCommitCohortProxyTest extends AbstractActorTest {
- private ThreePhaseCommitCohortProxy proxy;
- private Props props;
- private ActorRef actorRef;
- private MockActorContext actorContext;
- private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(
- Executors.newSingleThreadExecutor());
+ @Mock
+ private ActorContext actorContext;
@Before
- public void setUp(){
- props = Props.create(MessageCollectorActor.class);
- actorRef = getSystem().actorOf(props);
- actorContext = new MockActorContext(this.getSystem());
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
- proxy =
- new ThreePhaseCommitCohortProxy(actorContext,
- Arrays.asList(actorRef.path()), "txn-1", executor);
+ doReturn(getSystem()).when(actorContext).getActorSystem();
+ }
+ private ThreePhaseCommitCohortProxy setupProxy(int nCohorts) {
+ List<ActorPath> cohorts = Lists.newArrayList();
+ for(int i = 1; i <= nCohorts; i++) {
+ ActorPath path = getSystem().actorOf(Props.create(MessageCollectorActor.class)).path();
+ cohorts.add(path);
+ doReturn(mock(ActorSelection.class)).when(actorContext).actorSelection(path);
+ }
+
+ return new ThreePhaseCommitCohortProxy(actorContext, cohorts, "txn-1");
}
- @After
- public void tearDown() {
- executor.shutdownNow();
+ private void setupMockActorContext(Class<?> requestType, Object... responses) {
+ Stubber stubber = doReturn(responses[0] instanceof Throwable ? Futures
+ .failed((Throwable) responses[0]) : Futures
+ .successful(((SerializableMessage) responses[0]).toSerializable()));
+
+ for(int i = 1; i < responses.length; i++) {
+ stubber = stubber.doReturn(responses[i] instanceof Throwable ? Futures
+ .failed((Throwable) responses[i]) : Futures
+ .successful(((SerializableMessage) responses[i]).toSerializable()));
+ }
+
+ stubber.when(actorContext).executeRemoteOperationAsync(any(ActorSelection.class),
+ isA(requestType), any(FiniteDuration.class));
+ }
+
+ private void verifyCohortInvocations(int nCohorts, Class<?> requestType) {
+ verify(actorContext, times(nCohorts)).executeRemoteOperationAsync(
+ any(ActorSelection.class), isA(requestType), any(FiniteDuration.class));
+ }
+
+ @Test
+ public void testCanCommitWithOneCohort() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true));
+
+ ListenableFuture<Boolean> future = proxy.canCommit();
+
+ assertEquals("canCommit", true, future.get());
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(false));
+
+ future = proxy.canCommit();
+
+ assertEquals("canCommit", false, future.get());
+
+ verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
}
@Test
- public void testCanCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new CanCommitTransactionReply(true).toSerializable());
+ public void testCanCommitWithMultipleCohorts() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true), new CanCommitTransactionReply(true));
ListenableFuture<Boolean> future = proxy.canCommit();
- Assert.assertTrue(future.get().booleanValue());
+ assertEquals("canCommit", true, future.get());
+ verifyCohortInvocations(2, CanCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test
+ public void testCanCommitWithMultipleCohortsAndOneFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(3);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new CanCommitTransactionReply(true), new CanCommitTransactionReply(false),
+ new CanCommitTransactionReply(true));
+
+ ListenableFuture<Boolean> future = proxy.canCommit();
+
+ assertEquals("canCommit", false, future.get());
+
+ verifyCohortInvocations(3, CanCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCanCommitWithExceptionFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+ proxy.canCommit().get();
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCanCommitWithInvalidResponseType() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CanCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply());
+
+ proxy.canCommit().get();
}
@Test
public void testPreCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new PreCommitTransactionReply().toSerializable());
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
- ListenableFuture<Void> future = proxy.preCommit();
+ setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply());
- future.get();
+ proxy.preCommit().get();
+ verifyCohortInvocations(1, PreCommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testPreCommitWithFailure() throws Exception {
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(PreCommitTransaction.SERIALIZABLE_CLASS,
+ new PreCommitTransactionReply(), new RuntimeException("mock"));
+
+ proxy.preCommit().get();
}
@Test
public void testAbort() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new AbortTransactionReply().toSerializable());
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
- ListenableFuture<Void> future = proxy.abort();
+ setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new AbortTransactionReply());
- future.get();
+ proxy.abort().get();
+ verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test
+ public void testAbortWithFailure() throws Exception {
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(AbortTransaction.SERIALIZABLE_CLASS, new RuntimeException("mock"));
+
+ // The exception should not get propagated.
+ proxy.abort().get();
+
+ verifyCohortInvocations(1, AbortTransaction.SERIALIZABLE_CLASS);
}
@Test
public void testCommit() throws Exception {
- actorContext.setExecuteRemoteOperationResponse(new CommitTransactionReply().toSerializable());
- ListenableFuture<Void> future = proxy.commit();
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+ new CommitTransactionReply());
+
+ proxy.commit().get();
+
+ verifyCohortInvocations(2, CommitTransaction.SERIALIZABLE_CLASS);
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void testCommitWithFailure() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
- future.get();
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new CommitTransactionReply(),
+ new RuntimeException("mock"));
+
+ proxy.commit().get();
+ }
+
+ @Test(expected = ExecutionException.class)
+ public void teseCommitWithInvalidResponseType() throws Exception {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(1);
+
+ setupMockActorContext(CommitTransaction.SERIALIZABLE_CLASS, new PreCommitTransactionReply());
+
+ proxy.commit().get();
}
@Test
- public void testGetCohortPaths() throws Exception {
- assertNotNull(proxy.getCohortPaths());
+ public void testGetCohortPaths() {
+
+ ThreePhaseCommitCohortProxy proxy = setupProxy(2);
+
+ List<ActorPath> paths = proxy.getCohortPaths();
+ assertNotNull("getCohortPaths returned null", paths);
+ assertEquals("getCohortPaths size", 2, paths.size());
}
}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+import akka.actor.ActorPath;
import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
import akka.actor.Props;
+import akka.dispatch.Futures;
import com.google.common.base.Optional;
-import com.google.common.util.concurrent.CheckedFuture;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import junit.framework.Assert;
-import org.junit.After;
+
import org.junit.Before;
import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.WRITE_ONLY;
+import static org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType.READ_WRITE;
+
+import org.opendaylight.controller.cluster.datastore.TransactionProxy.TransactionType;
import org.opendaylight.controller.cluster.datastore.exceptions.PrimaryNotFoundException;
import org.opendaylight.controller.cluster.datastore.exceptions.TimeoutException;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.DataExists;
import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
import org.opendaylight.controller.cluster.datastore.messages.MergeData;
-import org.opendaylight.controller.cluster.datastore.messages.PrimaryFound;
+import org.opendaylight.controller.cluster.datastore.messages.ReadData;
import org.opendaylight.controller.cluster.datastore.messages.ReadDataReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.DefaultShardStrategy;
import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategyFactory;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.cluster.datastore.utils.DoNothingActor;
-import org.opendaylight.controller.cluster.datastore.utils.MessageCollectorActor;
-import org.opendaylight.controller.cluster.datastore.utils.MockActorContext;
-import org.opendaylight.controller.cluster.datastore.utils.MockClusterWrapper;
import org.opendaylight.controller.cluster.datastore.utils.MockConfiguration;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-import java.util.List;
-import java.util.concurrent.Executors;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
-import static junit.framework.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.argThat;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.isA;
+
+@SuppressWarnings("resource")
public class TransactionProxyTest extends AbstractActorTest {
+ @SuppressWarnings("serial")
+ static class TestException extends RuntimeException {
+ }
+
+ static interface Invoker {
+ void invoke(TransactionProxy proxy) throws Exception;
+ }
+
private final Configuration configuration = new MockConfiguration();
- private final ActorContext testContext =
- new ActorContext(getSystem(), getSystem().actorOf(Props.create(DoNothingActor.class)), new MockClusterWrapper(), configuration );
+ @Mock
+ private ActorContext mockActorContext;
- private final ListeningExecutorService transactionExecutor =
- MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
+ private SchemaContext schemaContext;
+
+ String memberName = "mock-member";
@Before
public void setUp(){
- ShardStrategyFactory.setConfiguration(configuration);
- }
+ MockitoAnnotations.initMocks(this);
- @After
- public void tearDown() {
- transactionExecutor.shutdownNow();
- }
+ schemaContext = TestModel.createTestContext();
- @Test
- public void testRead() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ doReturn(getSystem()).when(mockActorContext).getActorSystem();
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ ShardStrategyFactory.setConfiguration(configuration);
+ }
+ private CreateTransaction eqCreateTransaction(final String memberName,
+ final TransactionType type) {
+ ArgumentMatcher<CreateTransaction> matcher = new ArgumentMatcher<CreateTransaction>() {
+ @Override
+ public boolean matches(Object argument) {
+ CreateTransaction obj = CreateTransaction.fromSerializable(argument);
+ return obj.getTransactionId().startsWith(memberName) &&
+ obj.getTransactionType() == type.ordinal();
+ }
+ };
+
+ return argThat(matcher);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ private DataExists eqDataExists() {
+ ArgumentMatcher<DataExists> matcher = new ArgumentMatcher<DataExists>() {
+ @Override
+ public boolean matches(Object argument) {
+ DataExists obj = DataExists.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
+ return argThat(matcher);
+ }
- actorContext.setExecuteRemoteOperationResponse(
- new ReadDataReply(TestModel.createTestContext(), null)
- .toSerializable());
+ private ReadData eqReadData() {
+ ArgumentMatcher<ReadData> matcher = new ArgumentMatcher<ReadData>() {
+ @Override
+ public boolean matches(Object argument) {
+ ReadData obj = ReadData.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
+ return argThat(matcher);
+ }
- Optional<NormalizedNode<?, ?>> normalizedNodeOptional = read.get();
+ private WriteData eqWriteData(final NormalizedNode<?, ?> nodeToWrite) {
+ ArgumentMatcher<WriteData> matcher = new ArgumentMatcher<WriteData>() {
+ @Override
+ public boolean matches(Object argument) {
+ WriteData obj = WriteData.fromSerializable(argument, schemaContext);
+ return obj.getPath().equals(TestModel.TEST_PATH) &&
+ obj.getData().equals(nodeToWrite);
+ }
+ };
+
+ return argThat(matcher);
+ }
- Assert.assertFalse(normalizedNodeOptional.isPresent());
+ private MergeData eqMergeData(final NormalizedNode<?, ?> nodeToWrite) {
+ ArgumentMatcher<MergeData> matcher = new ArgumentMatcher<MergeData>() {
+ @Override
+ public boolean matches(Object argument) {
+ MergeData obj = MergeData.fromSerializable(argument, schemaContext);
+ return obj.getPath().equals(TestModel.TEST_PATH) &&
+ obj.getData().equals(nodeToWrite);
+ }
+ };
+
+ return argThat(matcher);
+ }
- actorContext.setExecuteRemoteOperationResponse(new ReadDataReply(
- TestModel.createTestContext(),ImmutableNodes.containerNode(TestModel.TEST_QNAME)).toSerializable());
+ private DeleteData eqDeleteData() {
+ ArgumentMatcher<DeleteData> matcher = new ArgumentMatcher<DeleteData>() {
+ @Override
+ public boolean matches(Object argument) {
+ DeleteData obj = DeleteData.fromSerializable(argument);
+ return obj.getPath().equals(TestModel.TEST_PATH);
+ }
+ };
- read = transactionProxy.read(TestModel.TEST_PATH);
+ return argThat(matcher);
+ }
- normalizedNodeOptional = read.get();
+ private Object readyTxReply(ActorPath path) {
+ return new ReadyTransactionReply(path).toSerializable();
+ }
- Assert.assertTrue(normalizedNodeOptional.isPresent());
+ private Future<Object> readDataReply(NormalizedNode<?, ?> data) {
+ return Futures.successful(new ReadDataReply(schemaContext, data)
+ .toSerializable());
}
- @Test
- public void testExists() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ private Future<Object> dataExistsReply(boolean exists) {
+ return Futures.successful(new DataExistsReply(exists).toSerializable());
+ }
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ private ActorSelection actorSelection(ActorRef actorRef) {
+ return getSystem().actorSelection(actorRef.path());
+ }
+ private FiniteDuration anyDuration() {
+ return any(FiniteDuration.class);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ private CreateTransactionReply createTransactionReply(ActorRef actorRef){
+ return CreateTransactionReply.newBuilder()
+ .setTransactionActorPath(actorRef.path().toString())
+ .setTransactionId("txn-1").build();
+ }
+ private ActorRef setupActorContextWithInitialCreateTransaction(TransactionType type) {
+ ActorRef actorRef = getSystem().actorOf(Props.create(DoNothingActor.class));
+ doReturn(getSystem().actorSelection(actorRef.path())).
+ when(mockActorContext).actorSelection(actorRef.path().toString());
+ doReturn(memberName).when(mockActorContext).getCurrentMemberName();
+ doReturn(createTransactionReply(actorRef)).when(mockActorContext).
+ executeShardOperation(eq(DefaultShardStrategy.DEFAULT_SHARD),
+ eqCreateTransaction(memberName, type), anyDuration());
+ doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(
+ anyString(), eq(actorRef.path().toString()));
+ doReturn(actorRef.path()).when(mockActorContext).actorFor(actorRef.path().toString());
+
+ return actorRef;
+ }
- actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(false).toSerializable());
+ @Test
+ public void testRead() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
- CheckedFuture<Boolean, ReadFailedException> exists =
- transactionProxy.exists(TestModel.TEST_PATH);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- Assert.assertFalse(exists.checkedGet());
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- actorContext.setExecuteRemoteOperationResponse(new DataExistsReply(true).toSerializable());
+ Optional<NormalizedNode<?, ?>> readOptional = transactionProxy.read(
+ TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
- exists = transactionProxy.exists(TestModel.TEST_PATH);
+ assertEquals("NormalizedNode isPresent", false, readOptional.isPresent());
- Assert.assertTrue(exists.checkedGet());
+ NormalizedNode<?, ?> expectedNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- actorContext.setExecuteRemoteOperationResponse("bad message");
+ doReturn(readDataReply(expectedNode)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- exists = transactionProxy.exists(TestModel.TEST_PATH);
+ readOptional = transactionProxy.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
- try {
- exists.checkedGet();
- fail();
- } catch(ReadFailedException e){
- }
+ assertEquals("NormalizedNode isPresent", true, readOptional.isPresent());
+ assertEquals("Response NormalizedNode", expectedNode, readOptional.get());
}
@Test(expected = ReadFailedException.class)
public void testReadWhenAnInvalidMessageIsSentInReply() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ doReturn(Futures.successful(new Object())).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException>
- read = transactionProxy.read(TestModel.TEST_PATH);
-
- read.checkedGet();
+ transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
}
- @Test
- public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
-
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new PrimaryNotFoundException("test"));
+ @Test(expected = TestException.class)
+ public void testReadWithAsyncRemoteOperatonFailure() throws Throwable {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ doThrow(new TestException()).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
-
- Assert.assertFalse(read.get().isPresent());
-
+ try {
+ transactionProxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
}
+ private void testExceptionOnInitialCreateTransaction(Exception exToThrow, Invoker invoker)
+ throws Throwable {
- @Test
- public void testReadWhenATimeoutExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
+ doThrow(exToThrow).when(mockActorContext).executeShardOperation(
+ anyString(), any(), anyDuration());
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new TimeoutException("test", new Exception("reason")));
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ try {
+ invoker.invoke(transactionProxy);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
+ }
+ private void testReadWithExceptionOnInitialCreateTransaction(Exception exToThrow) throws Throwable {
+ testExceptionOnInitialCreateTransaction(exToThrow, new Invoker() {
+ @Override
+ public void invoke(TransactionProxy proxy) throws Exception {
+ proxy.read(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
+ });
+ }
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
+ @Test(expected = PrimaryNotFoundException.class)
+ public void testReadWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"));
+ }
- Assert.assertFalse(read.get().isPresent());
+ @Test(expected = TimeoutException.class)
+ public void testReadWhenATimeoutExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new TimeoutException("test",
+ new Exception("reason")));
+ }
+ @Test(expected = TestException.class)
+ public void testReadWhenAnyOtherExceptionIsThrown() throws Throwable {
+ testReadWithExceptionOnInitialCreateTransaction(new TestException());
}
@Test
- public void testReadWhenAAnyOtherExceptionIsThrown() throws Exception {
- final ActorContext actorContext = mock(ActorContext.class);
-
- when(actorContext.executeShardOperation(anyString(), any(), any(
- FiniteDuration.class))).thenThrow(new NullPointerException());
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ public void testExists() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- try {
- ListenableFuture<Optional<NormalizedNode<?, ?>>> read =
- transactionProxy.read(TestModel.TEST_PATH);
- fail("A null pointer exception was expected");
- } catch(NullPointerException e){
+ doReturn(dataExistsReply(false)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
- }
- }
+ Boolean exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
+ assertEquals("Exists response", false, exists);
+ doReturn(dataExistsReply(true)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDataExists(), anyDuration());
- @Test
- public void testWrite() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ exists = transactionProxy.exists(TestModel.TEST_PATH).checkedGet();
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ assertEquals("Exists response", true, exists);
+ }
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ @Test(expected = PrimaryNotFoundException.class)
+ public void testExistsWhenAPrimaryNotFoundExceptionIsThrown() throws Throwable {
+ testExceptionOnInitialCreateTransaction(new PrimaryNotFoundException("test"), new Invoker() {
+ @Override
+ public void invoke(TransactionProxy proxy) throws Exception {
+ proxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
+ });
+ }
- transactionProxy.write(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+ @Test(expected = ReadFailedException.class)
+ public void testExistsWhenAnInvalidMessageIsSentInReply() throws Exception {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
+ doReturn(Futures.successful(new Object())).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
- Assert.assertNotNull(messages);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- Assert.assertTrue(messages instanceof List);
+ transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ }
- List<Object> listMessages = (List<Object>) messages;
+ @Test(expected = TestException.class)
+ public void testExistsWithAsyncRemoteOperatonFailure() throws Throwable {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
- Assert.assertEquals(1, listMessages.size());
+ doThrow(new TestException()).when(mockActorContext).
+ executeRemoteOperationAsync(any(ActorSelection.class), any(), anyDuration());
- Assert.assertEquals(WriteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
- }
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_ONLY, schemaContext);
- private Object createPrimaryFound(ActorRef actorRef) {
- return new PrimaryFound(actorRef.path().toString()).toSerializable();
+ try {
+ transactionProxy.exists(TestModel.TEST_PATH).checkedGet(5, TimeUnit.SECONDS);
+ fail("Expected ReadFailedException");
+ } catch(ReadFailedException e) {
+ // Expected - throw cause - expects TestException.
+ throw e.getCause();
+ }
}
@Test
- public void testMerge() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ public void testWrite() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- transactionProxy.merge(TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.NAME_QNAME));
+ transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqWriteData(nodeToWrite));
+ }
- Assert.assertNotNull(messages);
+ @Test
+ public void testMerge() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- Assert.assertTrue(messages instanceof List);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
- List<Object> listMessages = (List<Object>) messages;
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- Assert.assertEquals(1, listMessages.size());
+ transactionProxy.merge(TestModel.TEST_PATH, nodeToWrite);
- Assert.assertEquals(MergeData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqMergeData(nodeToWrite));
}
@Test
public void testDelete() throws Exception {
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(WRITE_ONLY);
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ WRITE_ONLY, schemaContext);
transactionProxy.delete(TestModel.TEST_PATH);
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
-
- Assert.assertNotNull(messages);
-
- Assert.assertTrue(messages instanceof List);
-
- List<Object> listMessages = (List<Object>) messages;
-
- Assert.assertEquals(1, listMessages.size());
-
- Assert.assertEquals(DeleteData.SERIALIZABLE_CLASS, listMessages.get(0).getClass());
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqDeleteData());
}
+ @SuppressWarnings("unchecked")
@Test
public void testReady() throws Exception {
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef doNothingActorRef = getSystem().actorOf(props);
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(doNothingActorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(doNothingActorRef));
- actorContext.setExecuteRemoteOperationResponse(new ReadyTransactionReply(doNothingActorRef.path()).toSerializable());
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ doReturn(readyTxReply(actorRef.path())).when(mockActorContext).executeRemoteOperation(
+ eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS), anyDuration());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_WRITE, schemaContext);
transactionProxy.read(TestModel.TEST_PATH);
DOMStoreThreePhaseCommitCohort ready = transactionProxy.ready();
- Assert.assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
+ assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
- Assert.assertTrue("No cohort paths returned", proxy.getCohortPaths().size() > 0);
-
+ assertEquals("getCohortPaths", Arrays.asList(actorRef.path()), proxy.getCohortPaths());
}
@Test
- public void testGetIdentifier(){
- final Props props = Props.create(DoNothingActor.class);
- final ActorRef doNothingActorRef = getSystem().actorOf(props);
-
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteShardOperationResponse( createTransactionReply(doNothingActorRef) );
-
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
-
- Assert.assertNotNull(transactionProxy.getIdentifier());
+ public void testGetIdentifier() {
+ setupActorContextWithInitialCreateTransaction(READ_ONLY);
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ TransactionProxy.TransactionType.READ_ONLY, schemaContext);
+
+ Object id = transactionProxy.getIdentifier();
+ assertNotNull("getIdentifier returned null", id);
+ assertTrue("Invalid identifier: " + id, id.toString().startsWith(memberName));
}
+ @SuppressWarnings("unchecked")
@Test
- public void testClose(){
- final Props props = Props.create(MessageCollectorActor.class);
- final ActorRef actorRef = getSystem().actorOf(props);
+ public void testClose() throws Exception{
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(READ_WRITE);
- final MockActorContext actorContext = new MockActorContext(this.getSystem());
- actorContext.setExecuteLocalOperationResponse(createPrimaryFound(actorRef));
- actorContext.setExecuteShardOperationResponse(createTransactionReply(actorRef));
- actorContext.setExecuteRemoteOperationResponse("message");
+ doReturn(readDataReply(null)).when(mockActorContext).executeRemoteOperationAsync(
+ eq(actorSelection(actorRef)), eqReadData(), anyDuration());
- TransactionProxy transactionProxy =
- new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, transactionExecutor, TestModel.createTestContext());
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_WRITE, schemaContext);
transactionProxy.read(TestModel.TEST_PATH);
transactionProxy.close();
- Object messages = testContext
- .executeLocalOperation(actorRef, "messages",
- ActorContext.ASK_DURATION);
-
- Assert.assertNotNull(messages);
-
- Assert.assertTrue(messages instanceof List);
-
- List<Object> listMessages = (List<Object>) messages;
-
- Assert.assertEquals(1, listMessages.size());
-
- Assert.assertTrue(listMessages.get(0).getClass().equals(CloseTransaction.SERIALIZABLE_CLASS));
- }
-
- private CreateTransactionReply createTransactionReply(ActorRef actorRef){
- return CreateTransactionReply.newBuilder()
- .setTransactionActorPath(actorRef.path().toString())
- .setTransactionId("txn-1")
- .build();
+ verify(mockActorContext).sendRemoteOperationAsync(
+ eq(actorSelection(actorRef)), isA(CloseTransaction.SERIALIZABLE_CLASS));
}
}
package org.opendaylight.controller.cluster.datastore.utils;
+import java.util.concurrent.TimeUnit;
import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.japi.Creator;
import akka.testkit.JavaTestKit;
+
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.AbstractActorTest;
import org.opendaylight.controller.cluster.datastore.ClusterWrapper;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardFound;
import org.opendaylight.controller.cluster.datastore.messages.LocalShardNotFound;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
}
private static Props props(final boolean found, final ActorRef actorRef){
- return Props.create(new Creator<MockShardManager>() {
+ return Props.create(new MockShardManagerCreator(found, actorRef) );
+ }
- @Override public MockShardManager create()
- throws Exception {
- return new MockShardManager(found,
- actorRef);
- }
- });
+ @SuppressWarnings("serial")
+ private static class MockShardManagerCreator implements Creator<MockShardManager> {
+ final boolean found;
+ final ActorRef actorRef;
+
+ MockShardManagerCreator(boolean found, ActorRef actorRef) {
+ this.found = found;
+ this.actorRef = actorRef;
+ }
+
+ @Override
+ public MockShardManager create() throws Exception {
+ return new MockShardManager(found, actorRef);
+ }
}
}
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardManagerActorRef = getSystem()
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
new JavaTestKit(getSystem()) {{
new Within(duration("1 seconds")) {
+ @Override
protected void run() {
ActorRef shardManagerActorRef = getSystem()
}};
}
+
+ @Test
+ public void testExecuteRemoteOperation() {
+ new JavaTestKit(getSystem()) {{
+
+ new Within(duration("3 seconds")) {
+ @Override
+ protected void run() {
+
+ ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+ ActorRef shardManagerActorRef = getSystem()
+ .actorOf(MockShardManager.props(true, shardActorRef));
+
+ ActorContext actorContext =
+ new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+ Object out = actorContext.executeRemoteOperation(actor, "hello", duration("3 seconds"));
+
+ assertEquals("hello", out);
+
+ expectNoMsg();
+ }
+ };
+ }};
+ }
+
+ @Test
+ public void testExecuteRemoteOperationAsync() {
+ new JavaTestKit(getSystem()) {{
+
+ new Within(duration("3 seconds")) {
+ @Override
+ protected void run() {
+
+ ActorRef shardActorRef = getSystem().actorOf(Props.create(EchoActor.class));
+
+ ActorRef shardManagerActorRef = getSystem()
+ .actorOf(MockShardManager.props(true, shardActorRef));
+
+ ActorContext actorContext =
+ new ActorContext(getSystem(), shardManagerActorRef , mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ ActorSelection actor = actorContext.actorSelection(shardActorRef.path());
+
+ Future<Object> future = actorContext.executeRemoteOperationAsync(actor, "hello",
+ Duration.create(3, TimeUnit.SECONDS));
+
+ try {
+ Object result = Await.result(future, Duration.create(3, TimeUnit.SECONDS));
+ assertEquals("Result", "hello", result);
+ } catch(Exception e) {
+ throw new AssertionError(e);
+ }
+
+ expectNoMsg();
+ }
+ };
+ }};
+ }
}
package org.opendaylight.controller.cluster.datastore.utils;
-
+import static org.junit.Assert.assertNotNull;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
public class MockActorContext extends ActorContext {
- private Object executeShardOperationResponse;
- private Object executeRemoteOperationResponse;
- private Object executeLocalOperationResponse;
- private Object executeLocalShardOperationResponse;
+ private volatile Object executeShardOperationResponse;
+ private volatile Object executeRemoteOperationResponse;
+ private volatile Object executeLocalOperationResponse;
+ private volatile Object executeLocalShardOperationResponse;
+ private volatile Exception executeRemoteOperationFailure;
+ private volatile Object inputMessage;
public MockActorContext(ActorSystem actorSystem) {
super(actorSystem, null, new MockClusterWrapper(), new MockConfiguration());
executeRemoteOperationResponse = response;
}
+ public void setExecuteRemoteOperationFailure(Exception executeRemoteOperationFailure) {
+ this.executeRemoteOperationFailure = executeRemoteOperationFailure;
+ }
+
public void setExecuteLocalOperationResponse(
Object executeLocalOperationResponse) {
this.executeLocalOperationResponse = executeLocalOperationResponse;
this.executeLocalShardOperationResponse = executeLocalShardOperationResponse;
}
- @Override public Object executeLocalOperation(ActorRef actor,
+ @SuppressWarnings("unchecked")
+ public <T> T getInputMessage(Class<T> expType) throws Exception {
+ assertNotNull("Input message was null", inputMessage);
+ return (T) expType.getMethod("fromSerializable", Object.class).invoke(null, inputMessage);
+ }
+
+ @Override
+ public Object executeLocalOperation(ActorRef actor,
Object message, FiniteDuration duration) {
return this.executeLocalOperationResponse;
}
- @Override public Object executeLocalShardOperation(String shardName,
+ @Override
+ public Object executeLocalShardOperation(String shardName,
Object message, FiniteDuration duration) {
return this.executeLocalShardOperationResponse;
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.base.Preconditions;
+
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.yangtools.util.concurrent.NotificationManager;
import org.slf4j.LoggerFactory;
class ChangeListenerNotifyTask implements Runnable {
-
private static final Logger LOG = LoggerFactory.getLogger(ChangeListenerNotifyTask.class);
- private final Iterable<? extends DataChangeListenerRegistration<?>> listeners;
- private final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event;
-
@SuppressWarnings("rawtypes")
- private final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent>
- notificationMgr;
+ private final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent> notificationMgr;
+ private final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event;
+ private final DataChangeListenerRegistration<?> listener;
@SuppressWarnings("rawtypes")
- public ChangeListenerNotifyTask(final Iterable<? extends DataChangeListenerRegistration<?>> listeners,
+ public ChangeListenerNotifyTask(final DataChangeListenerRegistration<?> listener,
final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> event,
final NotificationManager<AsyncDataChangeListener,AsyncDataChangeEvent> notificationMgr) {
- this.listeners = listeners;
- this.event = event;
- this.notificationMgr = notificationMgr;
+ this.notificationMgr = Preconditions.checkNotNull(notificationMgr);
+ this.listener = Preconditions.checkNotNull(listener);
+ this.event = Preconditions.checkNotNull(event);
}
@Override
public void run() {
-
- for (DataChangeListenerRegistration<?> listener : listeners) {
- notificationMgr.submitNotification(listener.getInstance(), event);
+ final AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> l = listener.getInstance();
+ if (l == null) {
+ LOG.trace("Skipping event delivery to unregistered listener {}", l);
+ return;
}
+ LOG.trace("Listener {} event {}", l, event);
+
+ // FIXME: Yo dawg I heard you like queues, so this was queued to be queued
+ notificationMgr.submitNotification(l, event);
}
@Override
public String toString() {
- return "ChangeListenerNotifyTask [listeners=" + listeners + ", event=" + event + "]";
+ return "ChangeListenerNotifyTask [listener=" + listener + ", event=" + event + "]";
}
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.base.Preconditions;
+
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import com.google.common.base.Preconditions;
-
public final class DOMImmutableDataChangeEvent implements
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
updated.put(path, after);
return this;
}
+
+ public boolean isEmpty() {
+ return created.isEmpty() && removed.isEmpty() && updated.isEmpty();
+ }
}
private static final class RemoveEventFactory implements SimpleEventFactory {
import javax.annotation.concurrent.GuardedBy;
-import java.util.Collections;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
.addCreated(path, data) //
.build();
- new ChangeListenerNotifyTask(Collections.singletonList(reg), event,
+ new ChangeListenerNotifyTask(reg, event,
dataChangeListenerNotificationManager).run();
}
}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
-import static org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.builder;
-
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
+import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
+import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
import java.util.Map.Entry;
-import java.util.Set;
import java.util.concurrent.Callable;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
-import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.Builder;
import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.SimpleEventFactory;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree;
-import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Node;
import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Walker;
import org.opendaylight.yangtools.util.concurrent.NotificationManager;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNodeContainer;
*/
final class ResolveDataChangeEventsTask implements Callable<Iterable<ChangeListenerNotifyTask>> {
private static final Logger LOG = LoggerFactory.getLogger(ResolveDataChangeEventsTask.class);
- private static final DOMImmutableDataChangeEvent NO_CHANGE = builder(DataChangeScope.BASE).build();
- private final Multimap<ListenerTree.Node, DOMImmutableDataChangeEvent> events = HashMultimap.create();
+ @SuppressWarnings("rawtypes")
+ private final NotificationManager<AsyncDataChangeListener, AsyncDataChangeEvent> notificationMgr;
private final DataTreeCandidate candidate;
private final ListenerTree listenerRoot;
- @SuppressWarnings("rawtypes")
- private final NotificationManager<AsyncDataChangeListener, AsyncDataChangeEvent> notificationMgr;
+ private Multimap<DataChangeListenerRegistration<?>, DOMImmutableDataChangeEvent> collectedEvents;
@SuppressWarnings("rawtypes")
public ResolveDataChangeEventsTask(final DataTreeCandidate candidate, final ListenerTree listenerTree,
* order to delivery data change events.
*/
@Override
- public Iterable<ChangeListenerNotifyTask> call() {
+ public synchronized Iterable<ChangeListenerNotifyTask> call() {
try (final Walker w = listenerRoot.getWalker()) {
- resolveAnyChangeEvent(candidate.getRootPath(), Collections.singleton(w.getRootNode()), candidate.getRootNode());
- return createNotificationTasks();
- }
- }
-
- /**
- *
- * Walks map of listeners to data change events, creates notification
- * delivery tasks.
- *
- * Walks map of registered and affected listeners and creates notification
- * tasks from set of listeners and events to be delivered.
- *
- * If set of listeners has more then one event (applicable to wildcarded
- * listeners), merges all data change events into one, final which contains
- * all separate updates.
- *
- * Dispatch between merge variant and reuse variant of notification task is
- * done in
- * {@link #addNotificationTask(com.google.common.collect.ImmutableList.Builder, Node, java.util.Collection)}
- *
- * @return Collection of notification tasks.
- */
- private Collection<ChangeListenerNotifyTask> createNotificationTasks() {
- ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder = ImmutableList.builder();
- for (Entry<ListenerTree.Node, Collection<DOMImmutableDataChangeEvent>> entry : events.asMap().entrySet()) {
- addNotificationTask(taskListBuilder, entry.getKey(), entry.getValue());
- }
- return taskListBuilder.build();
- }
-
- /**
- * Adds notification task to task list.
- *
- * If entry collection contains one event, this event is reused and added to
- * notification tasks for listeners (see
- * {@link #addNotificationTaskByScope(com.google.common.collect.ImmutableList.Builder, Node, DOMImmutableDataChangeEvent)}
- * . Otherwise events are merged by scope and distributed between listeners
- * to particular scope. See
- * {@link #addNotificationTasksAndMergeEvents(com.google.common.collect.ImmutableList.Builder, Node, java.util.Collection)}
- * .
- *
- * @param taskListBuilder
- * @param listeners
- * @param entries
- */
- private void addNotificationTask(final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder,
- final ListenerTree.Node listeners, final Collection<DOMImmutableDataChangeEvent> entries) {
-
- if (!entries.isEmpty()) {
- if (entries.size() == 1) {
- addNotificationTaskByScope(taskListBuilder, listeners, Iterables.getOnlyElement(entries));
- } else {
- addNotificationTasksAndMergeEvents(taskListBuilder, listeners, entries);
- }
- }
- }
+ // Defensive: reset internal state
+ collectedEvents = ArrayListMultimap.create();
- /**
- *
- * Add notification deliveries task to the listener.
- *
- *
- * @param taskListBuilder
- * @param listeners
- * @param event
- */
- private void addNotificationTaskByScope(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final ListenerTree.Node listeners,
- final DOMImmutableDataChangeEvent event) {
- DataChangeScope eventScope = event.getScope();
- for (DataChangeListenerRegistration<?> listenerReg : listeners.getListeners()) {
- DataChangeScope listenerScope = listenerReg.getScope();
- List<DataChangeListenerRegistration<?>> listenerSet = Collections
- .<DataChangeListenerRegistration<?>> singletonList(listenerReg);
- if (eventScope == DataChangeScope.BASE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- } else if (eventScope == DataChangeScope.ONE && listenerScope != DataChangeScope.BASE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- } else if (eventScope == DataChangeScope.SUBTREE && listenerScope == DataChangeScope.SUBTREE) {
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- }
- }
- }
+ // Run through the tree
+ final ResolveDataChangeState s = ResolveDataChangeState.initial(candidate.getRootPath(), w.getRootNode());
+ resolveAnyChangeEvent(s, candidate.getRootNode());
- /**
- *
- * Add notification tasks with merged event
- *
- * Separate Events by scope and creates merged notification tasks for each
- * and every scope which is present.
- *
- * Adds merged events to task list based on scope requested by client.
- *
- * @param taskListBuilder
- * @param listeners
- * @param entries
- */
- private void addNotificationTasksAndMergeEvents(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final ListenerTree.Node listeners,
- final Collection<DOMImmutableDataChangeEvent> entries) {
-
- final Builder baseBuilder = builder(DataChangeScope.BASE);
- final Builder oneBuilder = builder(DataChangeScope.ONE);
- final Builder subtreeBuilder = builder(DataChangeScope.SUBTREE);
-
- boolean baseModified = false;
- boolean oneModified = false;
- boolean subtreeModified = false;
- for (final DOMImmutableDataChangeEvent entry : entries) {
- switch (entry.getScope()) {
- // Absence of breaks is intentional here. Subtree contains base and
- // one, one also contains base
- case BASE:
- baseBuilder.merge(entry);
- baseModified = true;
- case ONE:
- oneBuilder.merge(entry);
- oneModified = true;
- case SUBTREE:
- subtreeBuilder.merge(entry);
- subtreeModified = true;
+ /*
+ * Convert to tasks, but be mindful of multiple values -- those indicate multiple
+ * wildcard matches, which need to be merged.
+ */
+ final Collection<ChangeListenerNotifyTask> ret = new ArrayList<>();
+ for (Entry<DataChangeListenerRegistration<?>, Collection<DOMImmutableDataChangeEvent>> e : collectedEvents.asMap().entrySet()) {
+ final Collection<DOMImmutableDataChangeEvent> col = e.getValue();
+ final DOMImmutableDataChangeEvent event;
+
+ if (col.size() != 1) {
+ final Builder b = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE);
+ for (DOMImmutableDataChangeEvent i : col) {
+ b.merge(i);
+ }
+
+ event = b.build();
+ LOG.trace("Merged events {} into event {}", col, event);
+ } else {
+ event = col.iterator().next();
+ }
+
+ ret.add(new ChangeListenerNotifyTask(e.getKey(), event, notificationMgr));
}
- }
- if (baseModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, baseBuilder.build());
- }
- if (oneModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, oneBuilder.build());
- }
- if (subtreeModified) {
- addNotificationTaskExclusively(taskListBuilder, listeners, subtreeBuilder.build());
- }
- }
-
- private void addNotificationTaskExclusively(
- final ImmutableList.Builder<ChangeListenerNotifyTask> taskListBuilder, final Node listeners,
- final DOMImmutableDataChangeEvent event) {
- for (DataChangeListenerRegistration<?> listener : listeners.getListeners()) {
- if (listener.getScope() == event.getScope()) {
- Set<DataChangeListenerRegistration<?>> listenerSet = Collections
- .<DataChangeListenerRegistration<?>> singleton(listener);
- taskListBuilder.add(new ChangeListenerNotifyTask(listenerSet, event, notificationMgr));
- }
+ // FIXME: so now we have tasks to submit tasks... Inception-style!
+ LOG.debug("Created tasks {}", ret);
+ return ret;
}
}
* - Original (before) state of current node
* @param after
* - After state of current node
- * @return Data Change Event of this node and all it's children
+ * @return True if the subtree changed, false otherwise
*/
- private DOMImmutableDataChangeEvent resolveAnyChangeEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final DataTreeCandidateNode node) {
-
+ private boolean resolveAnyChangeEvent(final ResolveDataChangeState state, final DataTreeCandidateNode node) {
if (node.getModificationType() != ModificationType.UNMODIFIED &&
!node.getDataAfter().isPresent() && !node.getDataBefore().isPresent()) {
LOG.debug("Modification at {} has type {}, but no before- and after-data. Assuming unchanged.",
- path, node.getModificationType());
- return NO_CHANGE;
+ state.getPath(), node.getModificationType());
+ return false;
}
// no before and after state is present
switch (node.getModificationType()) {
case SUBTREE_MODIFIED:
- return resolveSubtreeChangeEvent(path, listeners, node);
+ return resolveSubtreeChangeEvent(state, node);
case MERGE:
case WRITE:
Preconditions.checkArgument(node.getDataAfter().isPresent(),
- "Modification at {} has type {} but no after-data", path, node.getModificationType());
- if (node.getDataBefore().isPresent()) {
- return resolveReplacedEvent(path, listeners, node.getDataBefore().get(), node.getDataAfter().get());
- } else {
- return resolveCreateEvent(path, listeners, node.getDataAfter().get());
+ "Modification at {} has type {} but no after-data", state.getPath(), node.getModificationType());
+ if (!node.getDataBefore().isPresent()) {
+ resolveCreateEvent(state, node.getDataAfter().get());
+ return true;
}
+
+ return resolveReplacedEvent(state, node.getDataBefore().get(), node.getDataAfter().get());
case DELETE:
Preconditions.checkArgument(node.getDataBefore().isPresent(),
- "Modification at {} has type {} but no before-data", path, node.getModificationType());
- return resolveDeleteEvent(path, listeners, node.getDataBefore().get());
+ "Modification at {} has type {} but no before-data", state.getPath(), node.getModificationType());
+ resolveDeleteEvent(state, node.getDataBefore().get());
+ return true;
case UNMODIFIED:
- return NO_CHANGE;
+ return false;
}
- throw new IllegalStateException(String.format("Unhandled node state %s at %s", node.getModificationType(), path));
+ throw new IllegalStateException(String.format("Unhandled node state %s at %s", node.getModificationType(), state.getPath()));
}
- private DOMImmutableDataChangeEvent resolveReplacedEvent(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<?, ?> beforeData,
- final NormalizedNode<?, ?> afterData) {
-
- // FIXME: BUG-1493: check the listeners to prune unneeded changes:
- // for subtrees, we have to do all
- // for one, we need to expand children
- // for base, we just report replacement
+ private boolean resolveReplacedEvent(final ResolveDataChangeState state,
+ final NormalizedNode<?, ?> beforeData, final NormalizedNode<?, ?> afterData) {
if (beforeData instanceof NormalizedNodeContainer<?, ?, ?>) {
- // Node is container (contains child) and we have interested
- // listeners registered for it, that means we need to do
- // resolution of changes on children level and can not
- // shortcut resolution.
- LOG.trace("Resolving subtree replace event for {} before {}, after {}",path,beforeData,afterData);
+ /*
+ * Node is a container (contains a child) and we have interested
+ * listeners registered for it, that means we need to do
+ * resolution of changes on children level and can not
+ * shortcut resolution.
+ */
+ LOG.trace("Resolving subtree replace event for {} before {}, after {}", state.getPath(), beforeData, afterData);
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> beforeCont = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) beforeData;
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> afterCont = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) afterData;
- return resolveNodeContainerReplaced(path, listeners, beforeCont, afterCont);
- } else if (!beforeData.equals(afterData)) {
- // Node is Leaf type (does not contain child nodes)
- // so normal equals method is sufficient for determining change.
- LOG.trace("Resolving leaf replace event for {} , before {}, after {}",path,beforeData,afterData);
- DOMImmutableDataChangeEvent event = builder(DataChangeScope.BASE).setBefore(beforeData).setAfter(afterData)
- .addUpdated(path, beforeData, afterData).build();
- addPartialTask(listeners, event);
- return event;
- } else {
- return NO_CHANGE;
+ return resolveNodeContainerReplaced(state, beforeCont, afterCont);
}
+
+ // Node is a Leaf type (does not contain child nodes)
+ // so normal equals method is sufficient for determining change.
+ if (beforeData.equals(afterData)) {
+ LOG.trace("Skipping equal leaf {}", state.getPath());
+ return false;
+ }
+
+ LOG.trace("Resolving leaf replace event for {} , before {}, after {}", state.getPath(), beforeData, afterData);
+ DOMImmutableDataChangeEvent event = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE).addUpdated(state.getPath(), beforeData, afterData).build();
+ state.addEvent(event);
+ state.collectEvents(beforeData, afterData, collectedEvents);
+ return true;
}
- private DOMImmutableDataChangeEvent resolveNodeContainerReplaced(final YangInstanceIdentifier path,
- final Collection<Node> listeners,
+ private boolean resolveNodeContainerReplaced(final ResolveDataChangeState state,
final NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> beforeCont,
final NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> afterCont) {
- final List<DOMImmutableDataChangeEvent> childChanges = new LinkedList<>();
+ if (!state.needsProcessing()) {
+ LOG.trace("Not processing replaced container {}", state.getPath());
+ return true;
+ }
// We look at all children from before and compare it with after state.
+ boolean childChanged = false;
for (NormalizedNode<PathArgument, ?> beforeChild : beforeCont.getValue()) {
final PathArgument childId = beforeChild.getIdentifier();
- YangInstanceIdentifier childPath = path.node(childId);
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- Optional<NormalizedNode<PathArgument, ?>> afterChild = afterCont.getChild(childId);
- DOMImmutableDataChangeEvent childChange = resolveNodeContainerChildUpdated(childPath, childListeners,
- beforeChild, afterChild);
- // If change is empty (equals to NO_CHANGE)
- if (childChange != NO_CHANGE) {
- childChanges.add(childChange);
+ if (resolveNodeContainerChildUpdated(state.child(childId), beforeChild, afterCont.getChild(childId))) {
+ childChanged = true;
}
}
* created.
*/
if (!beforeCont.getChild(childId).isPresent()) {
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- YangInstanceIdentifier childPath = path.node(childId);
- childChanges.add(resolveSameEventRecursivelly(childPath , childListeners, afterChild,
- DOMImmutableDataChangeEvent.getCreateEventFactory()));
+ resolveSameEventRecursivelly(state.child(childId), afterChild, DOMImmutableDataChangeEvent.getCreateEventFactory());
+ childChanged = true;
}
}
- if (childChanges.isEmpty()) {
- return NO_CHANGE;
- }
- Builder eventBuilder = builder(DataChangeScope.BASE) //
- .setBefore(beforeCont) //
- .setAfter(afterCont)
- .addUpdated(path, beforeCont, afterCont);
- for (DOMImmutableDataChangeEvent childChange : childChanges) {
- eventBuilder.merge(childChange);
+ if (childChanged) {
+ DOMImmutableDataChangeEvent event = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE)
+ .addUpdated(state.getPath(), beforeCont, afterCont).build();
+ state.addEvent(event);
}
- DOMImmutableDataChangeEvent replaceEvent = eventBuilder.build();
- addPartialTask(listeners, replaceEvent);
- return replaceEvent;
+ state.collectEvents(beforeCont, afterCont, collectedEvents);
+ return childChanged;
}
- private DOMImmutableDataChangeEvent resolveNodeContainerChildUpdated(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<PathArgument, ?> before,
- final Optional<NormalizedNode<PathArgument, ?>> after) {
-
+ private boolean resolveNodeContainerChildUpdated(final ResolveDataChangeState state,
+ final NormalizedNode<PathArgument, ?> before, final Optional<NormalizedNode<PathArgument, ?>> after) {
if (after.isPresent()) {
// REPLACE or SUBTREE Modified
- return resolveReplacedEvent(path, listeners, before, after.get());
-
- } else {
- // AFTER state is not present - child was deleted.
- return resolveSameEventRecursivelly(path, listeners, before,
- DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ return resolveReplacedEvent(state, before, after.get());
}
+
+ // AFTER state is not present - child was deleted.
+ resolveSameEventRecursivelly(state, before, DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ return true;
}
/**
* Resolves create events deep down the interest listener tree.
*
- *
* @param path
* @param listeners
* @param afterState
* @return
*/
- private DOMImmutableDataChangeEvent resolveCreateEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final NormalizedNode<?, ?> afterState) {
+ private void resolveCreateEvent(final ResolveDataChangeState state, final NormalizedNode<?, ?> afterState) {
@SuppressWarnings({ "unchecked", "rawtypes" })
final NormalizedNode<PathArgument, ?> node = (NormalizedNode) afterState;
- return resolveSameEventRecursivelly(path, listeners, node, DOMImmutableDataChangeEvent.getCreateEventFactory());
+ resolveSameEventRecursivelly(state, node, DOMImmutableDataChangeEvent.getCreateEventFactory());
}
- private DOMImmutableDataChangeEvent resolveDeleteEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final NormalizedNode<?, ?> beforeState) {
-
+ private void resolveDeleteEvent(final ResolveDataChangeState state, final NormalizedNode<?, ?> beforeState) {
@SuppressWarnings({ "unchecked", "rawtypes" })
final NormalizedNode<PathArgument, ?> node = (NormalizedNode) beforeState;
- return resolveSameEventRecursivelly(path, listeners, node, DOMImmutableDataChangeEvent.getRemoveEventFactory());
+ resolveSameEventRecursivelly(state, node, DOMImmutableDataChangeEvent.getRemoveEventFactory());
}
- private DOMImmutableDataChangeEvent resolveSameEventRecursivelly(final YangInstanceIdentifier path,
- final Collection<Node> listeners, final NormalizedNode<PathArgument, ?> node,
- final SimpleEventFactory eventFactory) {
- final DOMImmutableDataChangeEvent event = eventFactory.create(path, node);
- DOMImmutableDataChangeEvent propagateEvent = event;
+ private void resolveSameEventRecursivelly(final ResolveDataChangeState state,
+ final NormalizedNode<PathArgument, ?> node, final SimpleEventFactory eventFactory) {
+ if (!state.needsProcessing()) {
+ LOG.trace("Skipping child {}", state.getPath());
+ return;
+ }
+
// We have listeners for this node or it's children, so we will try
// to do additional processing
if (node instanceof NormalizedNodeContainer<?, ?, ?>) {
- LOG.trace("Resolving subtree recursive event for {}, type {}", path, eventFactory);
-
- Builder eventBuilder = builder(DataChangeScope.BASE);
- eventBuilder.merge(event);
- eventBuilder.setBefore(event.getOriginalSubtree());
- eventBuilder.setAfter(event.getUpdatedSubtree());
+ LOG.trace("Resolving subtree recursive event for {}, type {}", state.getPath(), eventFactory);
// Node has children, so we will try to resolve it's children
// changes.
@SuppressWarnings("unchecked")
NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>> container = (NormalizedNodeContainer<?, PathArgument, NormalizedNode<PathArgument, ?>>) node;
for (NormalizedNode<PathArgument, ?> child : container.getValue()) {
- PathArgument childId = child.getIdentifier();
+ final PathArgument childId = child.getIdentifier();
+
LOG.trace("Resolving event for child {}", childId);
- Collection<Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
- eventBuilder.merge(resolveSameEventRecursivelly(path.node(childId), childListeners, child, eventFactory));
+ resolveSameEventRecursivelly(state.child(childId), child, eventFactory);
}
- propagateEvent = eventBuilder.build();
}
- if (!listeners.isEmpty()) {
- addPartialTask(listeners, propagateEvent);
- }
- return propagateEvent;
- }
- private DOMImmutableDataChangeEvent resolveSubtreeChangeEvent(final YangInstanceIdentifier path,
- final Collection<ListenerTree.Node> listeners, final DataTreeCandidateNode modification) {
+ final DOMImmutableDataChangeEvent event = eventFactory.create(state.getPath(), node);
+ LOG.trace("Adding event {} at path {}", event, state.getPath());
+ state.addEvent(event);
+ state.collectEvents(event.getOriginalSubtree(), event.getUpdatedSubtree(), collectedEvents);
+ }
- Preconditions.checkArgument(modification.getDataBefore().isPresent(), "Subtree change with before-data not present at path %s", path);
- Preconditions.checkArgument(modification.getDataAfter().isPresent(), "Subtree change with after-data not present at path %s", path);
+ private boolean resolveSubtreeChangeEvent(final ResolveDataChangeState state, final DataTreeCandidateNode modification) {
+ Preconditions.checkArgument(modification.getDataBefore().isPresent(), "Subtree change with before-data not present at path %s", state.getPath());
+ Preconditions.checkArgument(modification.getDataAfter().isPresent(), "Subtree change with after-data not present at path %s", state.getPath());
- Builder one = builder(DataChangeScope.ONE).
- setBefore(modification.getDataBefore().get()).
- setAfter(modification.getDataAfter().get());
- Builder subtree = builder(DataChangeScope.SUBTREE).
- setBefore(modification.getDataBefore().get()).
- setAfter(modification.getDataAfter().get());
- boolean oneModified = false;
+ DataChangeScope scope = null;
for (DataTreeCandidateNode childMod : modification.getChildNodes()) {
- PathArgument childId = childMod.getIdentifier();
- YangInstanceIdentifier childPath = path.node(childId);
- Collection<ListenerTree.Node> childListeners = getListenerChildrenWildcarded(listeners, childId);
-
+ final ResolveDataChangeState childState = state.child(childMod.getIdentifier());
switch (childMod.getModificationType()) {
case WRITE:
case MERGE:
case DELETE:
- one.merge(resolveAnyChangeEvent(childPath, childListeners, childMod));
- oneModified = true;
+ if (resolveAnyChangeEvent(childState, childMod)) {
+ scope = DataChangeScope.ONE;
+ }
break;
case SUBTREE_MODIFIED:
- subtree.merge(resolveSubtreeChangeEvent(childPath, childListeners, childMod));
+ if (resolveSubtreeChangeEvent(childState, childMod) && scope == null) {
+ scope = DataChangeScope.SUBTREE;
+ }
break;
case UNMODIFIED:
// no-op
break;
}
}
- final DOMImmutableDataChangeEvent oneChangeEvent;
- if(oneModified) {
- one.addUpdated(path, modification.getDataBefore().get(), modification.getDataAfter().get());
- oneChangeEvent = one.build();
- subtree.merge(oneChangeEvent);
- } else {
- oneChangeEvent = null;
- subtree.addUpdated(path, modification.getDataBefore().get(), modification.getDataAfter().get());
- }
- DOMImmutableDataChangeEvent subtreeEvent = subtree.build();
- if (!listeners.isEmpty()) {
- if(oneChangeEvent != null) {
- addPartialTask(listeners, oneChangeEvent);
- }
- addPartialTask(listeners, subtreeEvent);
- }
- return subtreeEvent;
- }
- private DOMImmutableDataChangeEvent addPartialTask(final Collection<ListenerTree.Node> listeners,
- final DOMImmutableDataChangeEvent event) {
- for (ListenerTree.Node listenerNode : listeners) {
- if (!listenerNode.getListeners().isEmpty()) {
- LOG.trace("Adding event {} for listeners {}",event,listenerNode);
- events.put(listenerNode, event);
- }
- }
- return event;
- }
+ final NormalizedNode<?, ?> before = modification.getDataBefore().get();
+ final NormalizedNode<?, ?> after = modification.getDataAfter().get();
- private static Collection<ListenerTree.Node> getListenerChildrenWildcarded(final Collection<ListenerTree.Node> parentNodes,
- final PathArgument child) {
- if (parentNodes.isEmpty()) {
- return Collections.emptyList();
- }
- com.google.common.collect.ImmutableList.Builder<ListenerTree.Node> result = ImmutableList.builder();
- if (child instanceof NodeWithValue || child instanceof NodeIdentifierWithPredicates) {
- NodeIdentifier wildcardedIdentifier = new NodeIdentifier(child.getNodeType());
- addChildrenNodesToBuilder(result, parentNodes, wildcardedIdentifier);
+ if (scope != null) {
+ DOMImmutableDataChangeEvent one = DOMImmutableDataChangeEvent.builder(scope).addUpdated(state.getPath(), before, after).build();
+ state.addEvent(one);
}
- addChildrenNodesToBuilder(result, parentNodes, child);
- return result.build();
- }
- private static void addChildrenNodesToBuilder(final ImmutableList.Builder<ListenerTree.Node> result,
- final Collection<ListenerTree.Node> parentNodes, final PathArgument childIdentifier) {
- for (ListenerTree.Node node : parentNodes) {
- Optional<ListenerTree.Node> child = node.getChild(childIdentifier);
- if (child.isPresent()) {
- result.add(child.get());
- }
- }
+ state.collectEvents(before, after, collectedEvents);
+ return scope != null;
}
@SuppressWarnings("rawtypes")
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.impl;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Multimap;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.dom.store.impl.DOMImmutableDataChangeEvent.Builder;
+import org.opendaylight.controller.md.sal.dom.store.impl.tree.ListenerTree.Node;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Recursion state used in {@link ResolveDataChangeEventsTask}. Instances of this
+ * method track which listeners are affected by a particular change node. It takes
+ * care of properly inheriting SUB/ONE listeners and also provides a means to
+ * understand when actual processing need not occur.
+ */
+final class ResolveDataChangeState {
+ private static final Logger LOG = LoggerFactory.getLogger(ResolveDataChangeState.class);
+ /**
+ * Inherited from all parents
+ */
+ private final Iterable<Builder> inheritedSub;
+ /**
+ * Inherited from immediate parent
+ */
+ private final Iterable<Builder> inheritedOne;
+ private final YangInstanceIdentifier nodeId;
+ private final Collection<Node> nodes;
+
+ private final Map<DataChangeListenerRegistration<?>, Builder> subBuilders = new HashMap<>();
+ private final Map<DataChangeListenerRegistration<?>, Builder> oneBuilders = new HashMap<>();
+ private final Map<DataChangeListenerRegistration<?>, Builder> baseBuilders = new HashMap<>();
+
+ private ResolveDataChangeState(final YangInstanceIdentifier nodeId,
+ final Iterable<Builder> inheritedSub, final Iterable<Builder> inheritedOne,
+ final Collection<Node> nodes) {
+ this.nodeId = Preconditions.checkNotNull(nodeId);
+ this.nodes = Preconditions.checkNotNull(nodes);
+ this.inheritedSub = Preconditions.checkNotNull(inheritedSub);
+ this.inheritedOne = Preconditions.checkNotNull(inheritedOne);
+
+ /*
+ * Collect the nodes which need to be propagated from us to the child.
+ */
+ for (Node n : nodes) {
+ for (DataChangeListenerRegistration<?> l : n.getListeners()) {
+ final Builder b = DOMImmutableDataChangeEvent.builder(DataChangeScope.BASE);
+ switch (l.getScope()) {
+ case BASE:
+ baseBuilders.put(l, b);
+ break;
+ case ONE:
+ oneBuilders.put(l, b);
+ break;
+ case SUBTREE:
+ subBuilders.put(l, b);
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Create an initial state handle at a particular root node.
+ *
+ * @param rootId root instance identifier
+ * @param root root node
+ * @return
+ */
+ public static ResolveDataChangeState initial(final YangInstanceIdentifier rootId, final Node root) {
+ return new ResolveDataChangeState(rootId, Collections.<Builder>emptyList(),
+ Collections.<Builder>emptyList(), Collections.singletonList(root));
+ }
+
+ /**
+ * Create a state handle for iterating over a particular child.
+ *
+ * @param childId ID of the child
+ * @return State handle
+ */
+ public ResolveDataChangeState child(final PathArgument childId) {
+ return new ResolveDataChangeState(nodeId.node(childId),
+ Iterables.concat(inheritedSub, subBuilders.values()),
+ oneBuilders.values(), getListenerChildrenWildcarded(nodes, childId));
+ }
+
+ /**
+ * Get the current path
+ *
+ * @return Current path.
+ */
+ public YangInstanceIdentifier getPath() {
+ return nodeId;
+ }
+
+ /**
+ * Check if this child needs processing.
+ *
+ * @return True if processing needs to occur, false otherwise.
+ */
+ public boolean needsProcessing() {
+ // May have underlying listeners, so we need to process
+ if (!nodes.isEmpty()) {
+ return true;
+ }
+ // Have SUBTREE listeners
+ if (!Iterables.isEmpty(inheritedSub)) {
+ return true;
+ }
+ // Have ONE listeners
+ if (!Iterables.isEmpty(inheritedOne)) {
+ return true;
+ }
+
+ // FIXME: do we need anything else? If not, flip this to 'false'
+ return true;
+ }
+
+ /**
+ * Add an event to all current listeners.
+ *
+ * @param event
+ */
+ public void addEvent(final DOMImmutableDataChangeEvent event) {
+ // Subtree builders get always notified
+ for (Builder b : subBuilders.values()) {
+ b.merge(event);
+ }
+ for (Builder b : inheritedSub) {
+ b.merge(event);
+ }
+
+ if (event.getScope() == DataChangeScope.ONE || event.getScope() == DataChangeScope.BASE) {
+ for (Builder b : oneBuilders.values()) {
+ b.merge(event);
+ }
+ }
+
+ if (event.getScope() == DataChangeScope.BASE) {
+ for (Builder b : inheritedOne) {
+ b.merge(event);
+ }
+ for (Builder b : baseBuilders.values()) {
+ b.merge(event);
+ }
+ }
+ }
+
+ /**
+ * Gather all non-empty events into the provided map.
+ *
+ * @param before before-image
+ * @param after after-image
+ * @param map target map
+ */
+ public void collectEvents(final NormalizedNode<?, ?> before, final NormalizedNode<?, ?> after,
+ final Multimap<DataChangeListenerRegistration<?>, DOMImmutableDataChangeEvent> map) {
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : baseBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : oneBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+ for (Entry<DataChangeListenerRegistration<?>, Builder> e : subBuilders.entrySet()) {
+ final Builder b = e.getValue();
+ if (!b.isEmpty()) {
+ map.put(e.getKey(), b.setBefore(before).setAfter(after).build());
+ }
+ }
+
+ LOG.trace("Collected events {}", map);
+ }
+
+ private static Collection<Node> getListenerChildrenWildcarded(final Collection<Node> parentNodes,
+ final PathArgument child) {
+ if (parentNodes.isEmpty()) {
+ return Collections.emptyList();
+ }
+
+ final List<Node> result = new ArrayList<>();
+ if (child instanceof NodeWithValue || child instanceof NodeIdentifierWithPredicates) {
+ NodeIdentifier wildcardedIdentifier = new NodeIdentifier(child.getNodeType());
+ addChildNodes(result, parentNodes, wildcardedIdentifier);
+ }
+ addChildNodes(result, parentNodes, child);
+ return result;
+ }
+
+ private static void addChildNodes(final List<Node> result, final Collection<Node> parentNodes, final PathArgument childIdentifier) {
+ for (Node node : parentNodes) {
+ Optional<Node> child = node.getChild(childIdentifier);
+ if (child.isPresent()) {
+ result.add(child.get());
+ }
+ }
+ }
+}
*/
package org.opendaylight.controller.md.sal.dom.store.impl;
+import com.google.common.util.concurrent.MoreExecutors;
+
import java.util.Collection;
import java.util.Map;
+
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeBuilder;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import com.google.common.util.concurrent.MoreExecutors;
-
public abstract class AbstractDataChangeListenerTest {
protected static final YangInstanceIdentifier TOP_LEVEL = YangInstanceIdentifier
}
}
+ /**
+ * Create a new test task. The task will operate on the backed database,
+ * and will use the proper background executor service.
+ *
+ * @return Test task initialized to clean up {@value #TOP_LEVEL} and its
+ * children.
+ */
public final DatastoreTestTask newTestTask() {
return new DatastoreTestTask(datastore, dclExecutorService).cleanup(DatastoreTestTask
.simpleDelete(TOP_LEVEL));
import org.opendaylight.controller.md.sal.dom.store.impl.DatastoreTestTask.WriteTransactionCustomizer;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+/**
+ * Base template for a test suite for testing DataChangeListener functionality.
+ */
public abstract class DefaultDataChangeListenerTestSuite extends AbstractDataChangeListenerTest {
protected static final String FOO_SIBLING = "foo-sibling";
+ /**
+ * Callback invoked when the test suite can modify task parameters.
+ *
+ * @param task Update task configuration as needed
+ */
abstract protected void customizeTask(DatastoreTestTask task);
@Test
assertNotNull(change);
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR));
+ /*
+ * Created data must not contain nested-list item, since that is two-level deep.
+ */
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR));
+ assertContains(change.getCreatedData(), path(FOO) );
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change = task.getChangeEvent();
assertNotNull(change);
-
- assertContains(change.getCreatedData(), path(FOO, BAZ));
+ /*
+ * Created data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getCreatedData(), path(FOO, BAZ));
assertContains(change.getUpdatedData(), path(FOO));
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO, BAR));
+ /*
+ * Removed data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAR));
}
assertNotNull(change);
assertFalse(change.getCreatedData().isEmpty());
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR), path(FOO, BAZ));
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
+ // Base event should contain only changed item, no details about child.
+ assertContains(change.getCreatedData(), path(FOO));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR), path(FOO, BAZ));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
assertEmpty(change.getUpdatedData());
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO),path(FOO, BAZ),path(FOO,BAR));
+ /*
+ * Scope base listener event should contain top-level-list item and nested list path
+ * and should not contain baz, bar which are two-level deep
+ */
+ assertContains(change.getRemovedPaths(), path(FOO));
+ assertNotContains(change.getRemovedPaths(),path(FOO, BAZ),path(FOO,BAR));
}
@Override
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.top.level.list.NestedList;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
assertNotNull(change);
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR));
+ assertContains(change.getCreatedData(), path(FOO), path(FOO).node(NestedList.QNAME));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change = task.getChangeEvent();
assertNotNull(change);
-
- assertContains(change.getCreatedData(), path(FOO, BAZ));
- assertContains(change.getUpdatedData(), path(FOO));
+ /*
+ * Created data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getCreatedData(), path(FOO, BAZ));
+ assertContains(change.getUpdatedData(), path(FOO),path(FOO).node(NestedList.QNAME));
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO, BAR));
+ /*
+ * Removed data must NOT contain nested-list item since scope is base, and change is two
+ * level deep.
+ */
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAR));
}
assertNotNull(change);
assertFalse(change.getCreatedData().isEmpty());
- assertContains(change.getCreatedData(), path(FOO), path(FOO, BAR), path(FOO, BAZ));
- assertNotContains(change.getCreatedData(), TOP_LEVEL);
+ // Base event should contain only changed item, and details about immediate child.
+ assertContains(change.getCreatedData(), path(FOO),path(FOO).node(NestedList.QNAME));
+ assertNotContains(change.getCreatedData(), TOP_LEVEL,path(FOO, BAR), path(FOO, BAZ));
assertEmpty(change.getUpdatedData());
assertEmpty(change.getRemovedPaths());
assertEmpty(change.getUpdatedData());
assertNotContains(change.getUpdatedData(), TOP_LEVEL);
- assertContains(change.getRemovedPaths(), path(FOO),path(FOO, BAZ),path(FOO,BAR));
+ assertContains(change.getRemovedPaths(), path(FOO),path(FOO).node(NestedList.QNAME));
+ assertNotContains(change.getRemovedPaths(), path(FOO, BAZ),path(FOO,BAR));
}
@Override
final CompositeNode schemasNode =
(CompositeNode) NetconfMessageTransformUtil.findNode(schemasNodeResult.getResult(), DATA_STATE_SCHEMAS_IDENTIFIER);
- return create(schemasNode);
+ return create(id, schemasNode);
}
/**
* Parse response of get(netconf-state/schemas) to find all schemas under netconf-state/schemas
*/
@VisibleForTesting
- protected static NetconfStateSchemas create(final CompositeNode schemasNode) {
+ protected static NetconfStateSchemas create(final RemoteDeviceId id, final CompositeNode schemasNode) {
final Set<RemoteYangSchema> availableYangSchemas = Sets.newHashSet();
for (final CompositeNode schemaNode : schemasNode.getCompositesByName(Schema.QNAME.withoutRevision())) {
- availableYangSchemas.add(RemoteYangSchema.createFromCompositeNode(schemaNode));
+ final Optional<RemoteYangSchema> fromCompositeNode = RemoteYangSchema.createFromCompositeNode(id, schemaNode);
+ if(fromCompositeNode.isPresent()) {
+ availableYangSchemas.add(fromCompositeNode.get());
+ }
}
return new NetconfStateSchemas(availableYangSchemas);
return qname;
}
- static RemoteYangSchema createFromCompositeNode(final CompositeNode schemaNode) {
+ static Optional<RemoteYangSchema> createFromCompositeNode(final RemoteDeviceId id, final CompositeNode schemaNode) {
Preconditions.checkArgument(schemaNode.getKey().equals(Schema.QNAME.withoutRevision()), "Wrong QName %s", schemaNode.getKey());
QName childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_FORMAT.withoutRevision();
final String formatAsString = getSingleChildNodeValue(schemaNode, childNode).get();
- Preconditions.checkArgument(formatAsString.equals(Yang.QNAME.getLocalName()),
- "Expecting format to be only %s, not %s", Yang.QNAME.getLocalName(), formatAsString);
+ if(formatAsString.equals(Yang.QNAME.getLocalName()) == false) {
+ logger.debug("{}: Ignoring schema due to unsupported format: {}", id, formatAsString);
+ return Optional.absent();
+ }
childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_LOCATION.withoutRevision();
final Set<String> locationsAsString = getAllChildNodeValues(schemaNode, childNode);
- Preconditions.checkArgument(locationsAsString.contains(Schema.Location.Enumeration.NETCONF.toString()),
- "Expecting location to be %s, not %s", Schema.Location.Enumeration.NETCONF.toString(), locationsAsString);
+ if(locationsAsString.contains(Schema.Location.Enumeration.NETCONF.toString()) == false) {
+ logger.debug("{}: Ignoring schema due to unsupported location: {}", id, locationsAsString);
+ return Optional.absent();
+ }
childNode = NetconfMessageTransformUtil.IETF_NETCONF_MONITORING_SCHEMA_NAMESPACE.withoutRevision();
final String namespaceAsString = getSingleChildNodeValue(schemaNode, childNode).get();
? QName.create(namespaceAsString, revisionAsString.get(), moduleNameAsString)
: QName.create(URI.create(namespaceAsString), null, moduleNameAsString).withoutRevision();
- return new RemoteYangSchema(moduleQName);
+ return Optional.of(new RemoteYangSchema(moduleQName));
}
private static Set<String> getAllChildNodeValues(final CompositeNode schemaNode, final QName childNodeQName) {
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
-
import com.google.common.collect.Sets;
+
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
}
// FIXME: do we really want to continue here?
- moduleBasedCaps.add(QName.create(namespace, revision, moduleName));
+ moduleBasedCaps.add(QName.cachedReference(QName.create(namespace, revision, moduleName)));
nonModuleCaps.remove(capability);
}
import java.util.Set;
import org.junit.Test;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
+import org.opendaylight.controller.sal.connect.util.RemoteDeviceId;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.impl.codec.xml.XmlDocumentUtils;
public void testCreate() throws Exception {
final Document schemasXml = XmlUtil.readXmlToDocument(getClass().getResourceAsStream("/netconf-state.schemas.payload.xml"));
final CompositeNode compositeNodeSchemas = (CompositeNode) XmlDocumentUtils.toDomNode(schemasXml);
- final NetconfStateSchemas schemas = NetconfStateSchemas.create(compositeNodeSchemas);
+ final NetconfStateSchemas schemas = NetconfStateSchemas.create(new RemoteDeviceId("device"), compositeNodeSchemas);
final Set<QName> availableYangSchemasQNames = schemas.getAvailableYangSchemasQNames();
assertEquals(73, availableYangSchemasQNames.size());
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
</dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>netconf-util</artifactId>
- </dependency>
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-spi</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-core-spi</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-common-impl</artifactId>
</dependency>
<dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-common-impl</artifactId>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>netconf-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-clustering-commons</artifactId>
+ </dependency>
+
<!-- Yang tools-->
<dependency>
<artifactId>scala-library</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.1</version>
+ </dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>junit</groupId>
<Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
<Export-package></Export-package>
<Private-Package></Private-Package>
- <Import-Package>!org.jboss.*;!com.jcraft.*;*</Import-Package>
+ <Import-Package>!org.iq80.*;!*snappy;!org.jboss.*;!com.jcraft.*;!org.fusesource.*;*</Import-Package>
<Embed-Dependency>
+ sal-clustering-commons;
+ sal-akka-raft;
+ *metrics*;
!sal*;
!*config-api*;
!*testkit*;
import org.opendaylight.controller.remote.rpc.messages.InvokeRpc;
import org.opendaylight.controller.remote.rpc.messages.RpcResponse;
import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
import org.opendaylight.controller.sal.core.api.RoutedRpcDefaultImplementation;
import org.opendaylight.controller.sal.core.api.RpcImplementation;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.controller.remote.rpc.registry.RpcRegistry;
import org.opendaylight.controller.remote.rpc.utils.ActorUtil;
import org.opendaylight.controller.remote.rpc.utils.RoutingLogic;
-import org.opendaylight.controller.remote.rpc.utils.XmlUtils;
+import org.opendaylight.controller.xml.codec.XmlUtils;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.yangtools.yang.common.RpcResult;
public RpcRegistry() {
bucketStore = getContext().actorOf(Props.create(BucketStore.class), "store");
+
+ log.info("Bucket store path = {}", bucketStore.path().toString());
}
public RpcRegistry(ActorRef bucketStore) {
import akka.cluster.Cluster;
import akka.event.Logging;
import akka.event.LoggingAdapter;
+import org.opendaylight.controller.utils.ConditionalProbe;
import java.util.HashMap;
import java.util.Map;
*/
private ActorRef gossiper;
+ private ConditionalProbe probe;
+
public BucketStore(){
gossiper = getContext().actorOf(Props.create(Gossiper.class), "gossiper");
}
@Override
public void onReceive(Object message) throws Exception {
- log.debug("Received message: node[{}], message[{}]", selfAddress, message);
+ log.debug("Received message: node[{}], message[{}]", selfAddress,
+ message);
- if (message instanceof UpdateBucket)
- receiveUpdateBucket(((UpdateBucket) message).getBucket());
+ if (probe != null) {
- else if (message instanceof GetAllBuckets)
- receiveGetAllBucket();
+ probe.tell(message, getSelf());
+ }
- else if (message instanceof GetLocalBucket)
+ if (message instanceof ConditionalProbe) {
+ log.info("Received probe {} {}", getSelf(), message);
+ probe = (ConditionalProbe) message;
+ } else if (message instanceof UpdateBucket) {
+ receiveUpdateBucket(((UpdateBucket) message).getBucket());
+ } else if (message instanceof GetAllBuckets) {
+ receiveGetAllBucket();
+ } else if (message instanceof GetLocalBucket) {
receiveGetLocalBucket();
-
- else if (message instanceof GetBucketsByMembers)
- receiveGetBucketsByMembers(((GetBucketsByMembers) message).getMembers());
-
- else if (message instanceof GetBucketVersions)
+ } else if (message instanceof GetBucketsByMembers) {
+ receiveGetBucketsByMembers(
+ ((GetBucketsByMembers) message).getMembers());
+ } else if (message instanceof GetBucketVersions) {
receiveGetBucketVersions();
-
- else if (message instanceof UpdateRemoteBuckets)
- receiveUpdateRemoteBuckets(((UpdateRemoteBuckets) message).getBuckets());
-
- else {
+ } else if (message instanceof UpdateRemoteBuckets) {
+ receiveUpdateRemoteBuckets(
+ ((UpdateRemoteBuckets) message).getBuckets());
+ } else {
log.debug("Unhandled message [{}]", message);
unhandled(message);
}
Address getSelfAddress() {
return selfAddress;
}
+
}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.utils;
+
+import akka.actor.ActorRef;
+import com.google.common.base.Predicate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ConditionalProbe {
+ private final ActorRef actorRef;
+ private final Predicate predicate;
+ Logger log = LoggerFactory.getLogger(ConditionalProbe.class);
+
+ public ConditionalProbe(ActorRef actorRef, Predicate predicate) {
+ this.actorRef = actorRef;
+ this.predicate = predicate;
+ }
+
+ public void tell(Object message, ActorRef sender){
+ if(predicate.apply(message)) {
+ log.info("sending message to probe {}", message);
+ actorRef.tell(message, sender);
+ }
+ }
+}
package org.opendaylight.controller.remote.rpc.registry;
+
import akka.actor.ActorPath;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.ChildActorPath;
import akka.actor.Props;
-import akka.japi.Pair;
import akka.testkit.JavaTestKit;
+import com.google.common.base.Predicate;
import com.typesafe.config.ConfigFactory;
+
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.remote.rpc.RouteIdentifierImpl;
+import org.opendaylight.controller.remote.rpc.registry.gossip.Messages;
import org.opendaylight.controller.sal.connector.api.RpcRouter;
+import org.opendaylight.controller.utils.ConditionalProbe;
import org.opendaylight.yangtools.yang.common.QName;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
+import javax.annotation.Nullable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
+import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter;
import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.AddOrUpdateRoutes;
import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.RemoveRoutes;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRouters;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.FindRoutersReply;
-import static org.opendaylight.controller.remote.rpc.registry.RpcRegistry.Messages.SetLocalRouter;
public class RpcRegistryTest {
- private static ActorSystem node1;
- private static ActorSystem node2;
- private static ActorSystem node3;
-
- private ActorRef registry1;
- private ActorRef registry2;
- private ActorRef registry3;
-
- @BeforeClass
- public static void setup() throws InterruptedException {
- Thread.sleep(1000); //give some time for previous test to close netty ports
- node1 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberA"));
- node2 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberB"));
- node3 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberC"));
- }
-
- @AfterClass
- public static void teardown(){
- JavaTestKit.shutdownActorSystem(node1);
- JavaTestKit.shutdownActorSystem(node2);
- JavaTestKit.shutdownActorSystem(node3);
- if (node1 != null)
- node1.shutdown();
- if (node2 != null)
- node2.shutdown();
- if (node3 != null)
- node3.shutdown();
-
- }
-
- @Before
- public void createRpcRegistry() throws InterruptedException {
- registry1 = node1.actorOf(Props.create(RpcRegistry.class));
- registry2 = node2.actorOf(Props.create(RpcRegistry.class));
- registry3 = node3.actorOf(Props.create(RpcRegistry.class));
- }
-
- @After
- public void stopRpcRegistry() throws InterruptedException {
- if (registry1 != null)
- node1.stop(registry1);
- if (registry2 != null)
- node2.stop(registry2);
- if (registry3 != null)
- node3.stop(registry3);
- }
+ private static ActorSystem node1;
+ private static ActorSystem node2;
+ private static ActorSystem node3;
+
+ private ActorRef registry1;
+ private ActorRef registry2;
+ private ActorRef registry3;
+
+ @BeforeClass
+ public static void setup() throws InterruptedException {
+ node1 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberA"));
+ node2 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberB"));
+ node3 = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("memberC"));
+ }
+
+ @AfterClass
+ public static void teardown() {
+ JavaTestKit.shutdownActorSystem(node1);
+ JavaTestKit.shutdownActorSystem(node2);
+ JavaTestKit.shutdownActorSystem(node3);
+ if (node1 != null)
+ node1.shutdown();
+ if (node2 != null)
+ node2.shutdown();
+ if (node3 != null)
+ node3.shutdown();
+
+ }
+
+ @Before
+ public void createRpcRegistry() throws InterruptedException {
+ registry1 = node1.actorOf(Props.create(RpcRegistry.class));
+ registry2 = node2.actorOf(Props.create(RpcRegistry.class));
+ registry3 = node3.actorOf(Props.create(RpcRegistry.class));
+ }
+
+ @After
+ public void stopRpcRegistry() throws InterruptedException {
+ if (registry1 != null)
+ node1.stop(registry1);
+ if (registry2 != null)
+ node2.stop(registry2);
+ if (registry3 != null)
+ node3.stop(registry3);
+ }
+
+ /**
+ * One node cluster.
+ * 1. Register rpc, ensure router can be found
+ * 2. Then remove rpc, ensure its deleted
+ *
+ * @throws URISyntaxException
+ * @throws InterruptedException
+ */
+ @Test
+ public void testAddRemoveRpcOnSameNode() throws URISyntaxException, InterruptedException {
+ validateSystemStartup();
+
+ final JavaTestKit mockBroker = new JavaTestKit(node1);
+
+ final ActorPath bucketStorePath = new ChildActorPath(registry1.path(), "store");
+
+ //install probe
+ final JavaTestKit probe1 = createProbeForMessage(
+ node1, bucketStorePath, Messages.BucketStoreMessages.UpdateBucket.class);
+
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker.getRef()), mockBroker.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker.getRef());
+
+ //Bucket store should get an update bucket message. Updated bucket contains added rpc.
+ probe1.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateBucket.class);
+
+ //Now remove rpc
+ registry1.tell(getRemoveRouteMessage(), mockBroker.getRef());
+
+ //Bucket store should get an update bucket message. Rpc is removed in the updated bucket
+ probe1.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateBucket.class);
+
+
+ }
+
+
+ /**
+ * Three node cluster.
+ * 1. Register rpc on 1 node, ensure 2nd node gets updated
+ * 2. Remove rpc on 1 node, ensure 2nd node gets updated
+ *
+ * @throws URISyntaxException
+ * @throws InterruptedException
+ */
+ @Test
+ public void testRpcAddRemoveInCluster() throws URISyntaxException, InterruptedException {
- /**
- * One node cluster.
- * 1. Register rpc, ensure router can be found
- * 2. Then remove rpc, ensure its deleted
- *
- * @throws URISyntaxException
- * @throws InterruptedException
- */
- @Test
- public void testAddRemoveRpcOnSameNode() throws URISyntaxException, InterruptedException {
-
- final JavaTestKit mockBroker = new JavaTestKit(node1);
-
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker.getRef()), mockBroker.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker.getRef());
-
- Thread.sleep(1000);//
-
- //find the route on node 1's registry
- registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef());
- FindRoutersReply message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- List<Pair<ActorRef, Long>> pairs = message.getRouterWithUpdateTime();
-
- validateRouterReceived(pairs, mockBroker.getRef());
-
- //Now remove rpc
- registry1.tell(getRemoveRouteMessage(), mockBroker.getRef());
- Thread.sleep(1000);
- //find the route on node 1's registry
- registry1.tell(new FindRouters(createRouteId()), mockBroker.getRef());
- message = mockBroker.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- pairs = message.getRouterWithUpdateTime();
-
- Assert.assertTrue(pairs.isEmpty());
- }
+ validateSystemStartup();
+
+ final JavaTestKit mockBroker1 = new JavaTestKit(node1);
+
+ //install probe on node2's bucket store
+ final ActorPath bucketStorePath = new ChildActorPath(registry2.path(), "store");
+ final JavaTestKit probe2 = createProbeForMessage(
+ node2, bucketStorePath, Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- /**
- * Three node cluster.
- * 1. Register rpc on 1 node, ensure its router can be found on other 2.
- * 2. Remove rpc on 1 node, ensure its removed on other 2.
- *
- * @throws URISyntaxException
- * @throws InterruptedException
- */
- @Test
- public void testRpcAddRemoveInCluster() throws URISyntaxException, InterruptedException {
- validateSystemStartup();
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- final JavaTestKit mockBroker1 = new JavaTestKit(node1);
- final JavaTestKit mockBroker2 = new JavaTestKit(node2);
- final JavaTestKit mockBroker3 = new JavaTestKit(node3);
+ //Bucket store on node2 should get a message to update its local copy of remote buckets
+ probe2.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
+ //Now remove
+ registry1.tell(getRemoveRouteMessage(), mockBroker1.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ //Bucket store on node2 should get a message to update its local copy of remote buckets
+ probe2.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //find the route in node 2's registry
- List<Pair<ActorRef, Long>> pairs = findRouters(registry2, mockBroker2);
- validateRouterReceived(pairs, mockBroker1.getRef());
+ }
- //find the route in node 3's registry
- pairs = findRouters(registry3, mockBroker3);
- validateRouterReceived(pairs, mockBroker1.getRef());
+ /**
+ * Three node cluster.
+ * Register rpc on 2 nodes. Ensure 3rd gets updated.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testRpcAddedOnMultiNodes() throws Exception {
- //Now remove
- registry1.tell(getRemoveRouteMessage(), mockBroker1.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ validateSystemStartup();
- pairs = findRouters(registry2, mockBroker2);
- Assert.assertTrue(pairs.isEmpty());
+ final JavaTestKit mockBroker1 = new JavaTestKit(node1);
+ final JavaTestKit mockBroker2 = new JavaTestKit(node2);
+ final JavaTestKit mockBroker3 = new JavaTestKit(node3);
- pairs = findRouters(registry3, mockBroker3);
- Assert.assertTrue(pairs.isEmpty());
- }
+ registry3.tell(new SetLocalRouter(mockBroker3.getRef()), mockBroker3.getRef());
- /**
- * Three node cluster.
- * Register rpc on 2 nodes. Ensure 2 routers are found on 3rd.
- *
- * @throws Exception
- */
- @Test
- public void testAnRpcAddedOnMultiNodesShouldReturnMultiRouter() throws Exception {
+ //install probe on node 3
+ final ActorPath bucketStorePath = new ChildActorPath(registry3.path(), "store");
+ final JavaTestKit probe3 = createProbeForMessage(
+ node3, bucketStorePath, Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- validateSystemStartup();
- final JavaTestKit mockBroker1 = new JavaTestKit(node1);
- final JavaTestKit mockBroker2 = new JavaTestKit(node2);
- final JavaTestKit mockBroker3 = new JavaTestKit(node3);
+ //Add rpc on node 1
+ registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
+ registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- //Thread.sleep(5000);//let system come up
+ probe3.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
- //Add rpc on node 1
- registry1.tell(new SetLocalRouter(mockBroker1.getRef()), mockBroker1.getRef());
- registry1.tell(getAddRouteMessage(), mockBroker1.getRef());
- //Add same rpc on node 2
- registry2.tell(new SetLocalRouter(mockBroker2.getRef()), mockBroker2.getRef());
- registry2.tell(getAddRouteMessage(), mockBroker2.getRef());
+ //Add same rpc on node 2
+ registry2.tell(new SetLocalRouter(mockBroker2.getRef()), mockBroker2.getRef());
+ registry2.tell(getAddRouteMessage(), mockBroker2.getRef());
- registry3.tell(new SetLocalRouter(mockBroker3.getRef()), mockBroker3.getRef());
- Thread.sleep(1000);// give some time for bucket store data sync
+ probe3.expectMsgClass(
+ FiniteDuration.apply(10, TimeUnit.SECONDS),
+ Messages.BucketStoreMessages.UpdateRemoteBuckets.class);
+ }
- //find the route in node 3's registry
- registry3.tell(new FindRouters(createRouteId()), mockBroker3.getRef());
- FindRoutersReply message = mockBroker3.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- List<Pair<ActorRef, Long>> pairs = message.getRouterWithUpdateTime();
+ private JavaTestKit createProbeForMessage(ActorSystem node, ActorPath subjectPath, final Class clazz) {
+ final JavaTestKit probe = new JavaTestKit(node);
- validateMultiRouterReceived(pairs, mockBroker1.getRef(), mockBroker2.getRef());
+ ConditionalProbe conditionalProbe =
+ new ConditionalProbe(probe.getRef(), new Predicate() {
+ @Override
+ public boolean apply(@Nullable Object input) {
+ return clazz.equals(input.getClass());
+ }
+ });
- }
+ ActorSelection subject = node.actorSelection(subjectPath);
+ subject.tell(conditionalProbe, ActorRef.noSender());
- private List<Pair<ActorRef, Long>> findRouters(ActorRef registry, JavaTestKit receivingActor) throws URISyntaxException {
- registry.tell(new FindRouters(createRouteId()), receivingActor.getRef());
- FindRoutersReply message = receivingActor.expectMsgClass(JavaTestKit.duration("10 second"), FindRoutersReply.class);
- return message.getRouterWithUpdateTime();
- }
+ return probe;
- private void validateMultiRouterReceived(List<Pair<ActorRef, Long>> actual, ActorRef... expected) {
- Assert.assertTrue(actual != null);
- Assert.assertTrue(actual.size() == expected.length);
- }
+ }
- private void validateRouterReceived(List<Pair<ActorRef, Long>> actual, ActorRef expected){
- Assert.assertTrue(actual != null);
- Assert.assertTrue(actual.size() == 1);
+ private void validateSystemStartup() throws InterruptedException {
- for (Pair<ActorRef, Long> pair : actual){
- Assert.assertTrue(expected.path().uid() == pair.first().path().uid());
- }
- }
+ ActorPath gossiper1Path = new ChildActorPath(new ChildActorPath(registry1.path(), "store"), "gossiper");
+ ActorPath gossiper2Path = new ChildActorPath(new ChildActorPath(registry2.path(), "store"), "gossiper");
+ ActorPath gossiper3Path = new ChildActorPath(new ChildActorPath(registry3.path(), "store"), "gossiper");
- private void validateSystemStartup() throws InterruptedException {
+ ActorSelection gossiper1 = node1.actorSelection(gossiper1Path);
+ ActorSelection gossiper2 = node2.actorSelection(gossiper2Path);
+ ActorSelection gossiper3 = node3.actorSelection(gossiper3Path);
- Thread.sleep(5000);
- ActorPath gossiper1Path = new ChildActorPath(new ChildActorPath(registry1.path(), "store"), "gossiper");
- ActorPath gossiper2Path = new ChildActorPath(new ChildActorPath(registry2.path(), "store"), "gossiper");
- ActorPath gossiper3Path = new ChildActorPath(new ChildActorPath(registry3.path(), "store"), "gossiper");
- ActorSelection gossiper1 = node1.actorSelection(gossiper1Path);
- ActorSelection gossiper2 = node2.actorSelection(gossiper2Path);
- ActorSelection gossiper3 = node3.actorSelection(gossiper3Path);
+ if (!resolveReference(gossiper1, gossiper2, gossiper3))
+ Assert.fail("Could not find gossipers");
+ }
+ private Boolean resolveReference(ActorSelection... gossipers) {
- if (!resolveReference(gossiper1, gossiper2, gossiper3))
- Assert.fail("Could not find gossipers");
- }
+ Boolean resolved = true;
+ for (int i = 0; i < 5; i++) {
- private Boolean resolveReference(ActorSelection... gossipers) throws InterruptedException {
+ resolved = true;
+ System.out.println(System.currentTimeMillis() + " Resolving gossipers; trial #" + i);
- Boolean resolved = true;
+ for (ActorSelection gossiper : gossipers) {
+ ActorRef ref = null;
- for (int i=0; i< 5; i++) {
- Thread.sleep(1000);
- for (ActorSelection gossiper : gossipers) {
- Future<ActorRef> future = gossiper.resolveOne(new FiniteDuration(5000, TimeUnit.MILLISECONDS));
+ try {
+ Future<ActorRef> future = gossiper.resolveOne(new FiniteDuration(15000, TimeUnit.MILLISECONDS));
+ ref = Await.result(future, new FiniteDuration(10000, TimeUnit.MILLISECONDS));
+ } catch (Exception e) {
+ System.out.println("Could not find gossiper in attempt#" + i + ". Got exception " + e.getMessage());
+ }
- ActorRef ref = null;
- try {
- ref = Await.result(future, new FiniteDuration(10000, TimeUnit.MILLISECONDS));
- } catch (Exception e) {
- e.printStackTrace();
- }
+ if (ref == null)
+ resolved = false;
+ }
- if (ref == null)
- resolved = false;
- }
+ if (resolved) break;
- if (resolved) break;
- }
- return resolved;
}
+ return resolved;
+ }
- private AddOrUpdateRoutes getAddRouteMessage() throws URISyntaxException {
- return new AddOrUpdateRoutes(createRouteIds());
- }
+ private AddOrUpdateRoutes getAddRouteMessage() throws URISyntaxException {
+ return new AddOrUpdateRoutes(createRouteIds());
+ }
- private RemoveRoutes getRemoveRouteMessage() throws URISyntaxException {
- return new RemoveRoutes(createRouteIds());
- }
+ private RemoveRoutes getRemoveRouteMessage() throws URISyntaxException {
+ return new RemoveRoutes(createRouteIds());
+ }
- private List<RpcRouter.RouteIdentifier<?,?,?>> createRouteIds() throws URISyntaxException {
- QName type = new QName(new URI("/mockrpc"), "mockrpc");
- List<RpcRouter.RouteIdentifier<?,?,?>> routeIds = new ArrayList<>();
- routeIds.add(new RouteIdentifierImpl(null, type, null));
- return routeIds;
- }
+ private List<RpcRouter.RouteIdentifier<?, ?, ?>> createRouteIds() throws URISyntaxException {
+ QName type = new QName(new URI("/mockrpc"), "mockrpc");
+ List<RpcRouter.RouteIdentifier<?, ?, ?>> routeIds = new ArrayList<>();
+ routeIds.add(new RouteIdentifierImpl(null, type, null));
+ return routeIds;
+ }
- private RpcRouter.RouteIdentifier<?,?,?> createRouteId() throws URISyntaxException {
- QName type = new QName(new URI("/mockrpc"), "mockrpc");
- return new RouteIdentifierImpl(null, type, null);
- }
-}
\ No newline at end of file
+}
odl-cluster{
akka {
- loglevel = "INFO"
+ loglevel = "DEBUG"
#log-config-on-start = on
actor {
loggers = ["akka.event.slf4j.Slf4jLogger"]
actor {
provider = "akka.cluster.ClusterActorRefProvider"
+ debug {
+ #lifecycle = on
+ }
}
remote {
log-received-messages = off
--- /dev/null
+<configuration scan="true">
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
+ </pattern>
+ </encoder>
+ </appender>
+
+ <root level="debug">
+ <appender-ref ref="STDOUT" />
+ </root>
+</configuration>
protected static final String API_VERSION = "1.0.0";
protected static final String SWAGGER_VERSION = "1.2";
protected static final String RESTCONF_CONTEXT_ROOT = "restconf";
+
+ static final String MODULE_NAME_SUFFIX = "_module";
protected final DateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
private final ModelGenerator jsonConverter = new ModelGenerator();
List<Parameter> pathParams = new ArrayList<Parameter>();
String resourcePath = getDataStorePath("/config/", context);
+ addRootPostLink(m, (DataNodeContainer) node, pathParams, resourcePath, apis);
addApis(node, apis, resourcePath, pathParams, schemaContext, true);
pathParams = new ArrayList<Parameter>();
return null;
}
+ private void addRootPostLink(final Module m, final DataNodeContainer node, final List<Parameter> pathParams,
+ final String resourcePath, final List<Api> apis) {
+ if (containsListOrContainer(m.getChildNodes())) {
+ final Api apiForRootPostUri = new Api();
+ apiForRootPostUri.setPath(resourcePath);
+ apiForRootPostUri.setOperations(operationPost(m.getName()+MODULE_NAME_SUFFIX, m.getDescription(), m, pathParams, true));
+ apis.add(apiForRootPostUri);
+ }
+ }
+
protected ApiDeclaration createApiDeclaration(String basePath) {
ApiDeclaration doc = new ApiDeclaration();
doc.setApiVersion(API_VERSION);
String resourcePath = parentPath + createPath(node, pathParams, schemaContext) + "/";
_logger.debug("Adding path: [{}]", resourcePath);
api.setPath(resourcePath);
- api.setOperations(operations(node, pathParams, addConfigApi));
- apis.add(api);
+
+ Iterable<DataSchemaNode> childSchemaNodes = Collections.<DataSchemaNode> emptySet();
if ((node instanceof ListSchemaNode) || (node instanceof ContainerSchemaNode)) {
- DataNodeContainer schemaNode = (DataNodeContainer) node;
-
- for (DataSchemaNode childNode : schemaNode.getChildNodes()) {
- // We don't support going to leaf nodes today. Only lists and
- // containers.
- if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
- // keep config and operation attributes separate.
- if (childNode.isConfiguration() == addConfigApi) {
- addApis(childNode, apis, resourcePath, pathParams, schemaContext, addConfigApi);
- }
+ DataNodeContainer dataNodeContainer = (DataNodeContainer) node;
+ childSchemaNodes = dataNodeContainer.getChildNodes();
+ }
+ api.setOperations(operation(node, pathParams, addConfigApi, childSchemaNodes));
+ apis.add(api);
+
+ for (DataSchemaNode childNode : childSchemaNodes) {
+ if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
+ // keep config and operation attributes separate.
+ if (childNode.isConfiguration() == addConfigApi) {
+ addApis(childNode, apis, resourcePath, pathParams, schemaContext, addConfigApi);
}
}
}
}
+ private boolean containsListOrContainer(final Iterable<DataSchemaNode> nodes) {
+ for (DataSchemaNode child : nodes) {
+ if (child instanceof ListSchemaNode || child instanceof ContainerSchemaNode) {
+ return true;
+ }
+ }
+ return false;
+ }
+
/**
* @param node
* @param pathParams
* @return
*/
- private List<Operation> operations(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig) {
+ private List<Operation> operation(DataSchemaNode node, List<Parameter> pathParams, boolean isConfig, Iterable<DataSchemaNode> childSchemaNodes) {
List<Operation> operations = new ArrayList<>();
OperationBuilder.Get getBuilder = new OperationBuilder.Get(node, isConfig);
operations.add(getBuilder.pathParams(pathParams).build());
if (isConfig) {
- OperationBuilder.Post postBuilder = new OperationBuilder.Post(node);
- operations.add(postBuilder.pathParams(pathParams).build());
-
- OperationBuilder.Put putBuilder = new OperationBuilder.Put(node);
+ OperationBuilder.Put putBuilder = new OperationBuilder.Put(node.getQName().getLocalName(),
+ node.getDescription());
operations.add(putBuilder.pathParams(pathParams).build());
OperationBuilder.Delete deleteBuilder = new OperationBuilder.Delete(node);
operations.add(deleteBuilder.pathParams(pathParams).build());
+
+ if (containsListOrContainer(childSchemaNodes)) {
+ operations.addAll(operationPost(node.getQName().getLocalName(), node.getDescription(), (DataNodeContainer) node,
+ pathParams, isConfig));
+ }
+ }
+ return operations;
+ }
+
+ /**
+ * @param node
+ * @param pathParams
+ * @return
+ */
+ private List<Operation> operationPost(final String name, final String description, final DataNodeContainer dataNodeContainer, List<Parameter> pathParams, boolean isConfig) {
+ List<Operation> operations = new ArrayList<>();
+ if (isConfig) {
+ OperationBuilder.Post postBuilder = new OperationBuilder.Post(name, description, dataNodeContainer);
+ operations.add(postBuilder.pathParams(pathParams).build());
}
return operations;
}
*/
package org.opendaylight.controller.sal.rest.doc.impl;
+import static org.opendaylight.controller.sal.rest.doc.impl.BaseYangSwaggerGenerator.MODULE_NAME_SUFFIX;
+import static org.opendaylight.controller.sal.rest.doc.model.builder.OperationBuilder.Post.METHOD_NAME;
import static org.opendaylight.controller.sal.rest.doc.util.RestDocgenUtil.resolveNodesName;
+import com.google.common.base.Preconditions;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import org.opendaylight.yangtools.yang.model.api.ChoiceNode;
import org.opendaylight.yangtools.yang.model.api.ConstraintDefinition;
import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
import org.opendaylight.yangtools.yang.model.api.IdentitySchemaNode;
import org.opendaylight.yangtools.yang.model.api.LeafListSchemaNode;
import org.opendaylight.yangtools.yang.model.api.Module;
import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.opendaylight.yangtools.yang.model.api.SchemaNode;
import org.opendaylight.yangtools.yang.model.api.TypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition;
import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition;
public JSONObject convertToJsonSchema(Module module, SchemaContext schemaContext) throws IOException, JSONException {
JSONObject models = new JSONObject();
topLevelModule = module;
- processContainers(module, models, schemaContext);
+ processModules(module, models);
+ processContainersAndLists(module, models, schemaContext);
processRPCs(module, models, schemaContext);
processIdentities(module, models);
return models;
}
- private void processContainers(Module module, JSONObject models, SchemaContext schemaContext) throws IOException,
- JSONException {
+ private void processModules(Module module, JSONObject models) throws JSONException {
+ createConcreteModelForPost(models, module.getName()+MODULE_NAME_SUFFIX, createPropertiesForPost(module));
+ }
+
+ private void processContainersAndLists(Module module, JSONObject models, SchemaContext schemaContext)
+ throws IOException, JSONException {
String moduleName = module.getName();
for (DataSchemaNode childNode : module.getChildNodes()) {
- JSONObject configModuleJSON = null;
- JSONObject operationalModuleJSON = null;
-
- String childNodeName = childNode.getQName().getLocalName();
- /*
- * For every container in the module
- */
- if (childNode instanceof ContainerSchemaNode) {
- configModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models, true,
- schemaContext);
- operationalModuleJSON = processContainer((ContainerSchemaNode) childNode, moduleName, true, models,
- false, schemaContext);
- }
-
- if (configModuleJSON != null) {
- _logger.debug("Adding model for [{}]", OperationBuilder.CONFIG + childNodeName);
- configModuleJSON.put("id", OperationBuilder.CONFIG + childNodeName);
- models.put(OperationBuilder.CONFIG + childNodeName, configModuleJSON);
- }
- if (operationalModuleJSON != null) {
- _logger.debug("Adding model for [{}]", OperationBuilder.OPERATIONAL + childNodeName);
- operationalModuleJSON.put("id", OperationBuilder.OPERATIONAL + childNodeName);
- models.put(OperationBuilder.OPERATIONAL + childNodeName, operationalModuleJSON);
- }
+ // For every container and list in the module
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, true, schemaContext);
+ processDataNodeContainer((DataNodeContainer) childNode, moduleName, models, false, schemaContext);
}
}
ContainerSchemaNode input = rpc.getInput();
if (input != null) {
- JSONObject inputJSON = processContainer(input, moduleName, true, models, schemaContext);
+ JSONObject inputJSON = processDataNodeContainer(input, moduleName, models, schemaContext);
String filename = "(" + rpc.getQName().getLocalName() + ")input";
inputJSON.put("id", filename);
// writeToFile(filename, inputJSON.toString(2), moduleName);
ContainerSchemaNode output = rpc.getOutput();
if (output != null) {
- JSONObject outputJSON = processContainer(output, moduleName, true, models, schemaContext);
+ JSONObject outputJSON = processDataNodeContainer(output, moduleName, models, schemaContext);
String filename = "(" + rpc.getQName().getLocalName() + ")output";
outputJSON.put("id", filename);
models.put(filename, outputJSON);
}
/**
- * Processes the container node and populates the moduleJSON
+ * Processes the container and list nodes and populates the moduleJSON
*
* @param container
* @param moduleName
* @throws JSONException
* @throws IOException
*/
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt,
- JSONObject models, SchemaContext schemaContext) throws JSONException, IOException {
- return processContainer(container, moduleName, addSchemaStmt, models, (Boolean) null, schemaContext);
+ private JSONObject processDataNodeContainer(DataNodeContainer dataNode, String moduleName, JSONObject models,
+ SchemaContext schemaContext) throws JSONException, IOException {
+ return processDataNodeContainer(dataNode, moduleName, models, (Boolean) null, schemaContext);
}
- private JSONObject processContainer(ContainerSchemaNode container, String moduleName, boolean addSchemaStmt,
- JSONObject models, Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
- JSONObject moduleJSON = getSchemaTemplate();
- if (addSchemaStmt) {
- moduleJSON = getSchemaTemplate();
- } else {
- moduleJSON = new JSONObject();
+ private JSONObject processDataNodeContainer(DataNodeContainer dataNode, String moduleName, JSONObject models,
+ Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
+ if (dataNode instanceof ListSchemaNode || dataNode instanceof ContainerSchemaNode) {
+ Preconditions.checkArgument(dataNode instanceof SchemaNode, "Data node should be also schema node");
+ Iterable<DataSchemaNode> containerChildren = dataNode.getChildNodes();
+ JSONObject properties = processChildren(containerChildren, ((SchemaNode) dataNode).getQName(), moduleName,
+ models, isConfig, schemaContext);
+
+ String nodeName = (BooleanUtils.isNotFalse(isConfig) ? OperationBuilder.CONFIG
+ : OperationBuilder.OPERATIONAL) + ((SchemaNode) dataNode).getQName().getLocalName();
+
+ JSONObject childSchema = getSchemaTemplate();
+ childSchema.put(TYPE_KEY, OBJECT_TYPE);
+ childSchema.put(PROPERTIES_KEY, properties);
+ childSchema.put("id", nodeName);
+ models.put(nodeName, childSchema);
+
+ if (BooleanUtils.isNotFalse(isConfig)) {
+ createConcreteModelForPost(models, ((SchemaNode) dataNode).getQName().getLocalName(),
+ createPropertiesForPost(dataNode));
+ }
+
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY, nodeName);
+ JSONObject dataNodeProperties = new JSONObject();
+ dataNodeProperties.put(TYPE_KEY, dataNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
+ dataNodeProperties.put(ITEMS_KEY, items);
+
+ return dataNodeProperties;
}
- moduleJSON.put(TYPE_KEY, OBJECT_TYPE);
+ return null;
+ }
- String containerDescription = container.getDescription();
- moduleJSON.put(DESCRIPTION_KEY, containerDescription);
+ private void createConcreteModelForPost(final JSONObject models, final String localName, final JSONObject properties)
+ throws JSONException {
+ String nodePostName = OperationBuilder.CONFIG + localName + METHOD_NAME;
+ JSONObject postSchema = getSchemaTemplate();
+ postSchema.put(TYPE_KEY, OBJECT_TYPE);
+ postSchema.put("id", nodePostName);
+ postSchema.put(PROPERTIES_KEY, properties);
+ models.put(nodePostName, postSchema);
+ }
- JSONObject properties = processChildren(container.getChildNodes(), container.getQName(), moduleName, models,
- isConfig, schemaContext);
- moduleJSON.put(PROPERTIES_KEY, properties);
- return moduleJSON;
+ private JSONObject createPropertiesForPost(final DataNodeContainer dataNodeContainer) throws JSONException {
+ JSONObject properties = new JSONObject();
+ for (DataSchemaNode childNode : dataNodeContainer.getChildNodes()) {
+ if (childNode instanceof ListSchemaNode || childNode instanceof ContainerSchemaNode) {
+ JSONObject items = new JSONObject();
+ items.put(REF_KEY, "(config)" + childNode.getQName().getLocalName());
+ JSONObject property = new JSONObject();
+ property.put(TYPE_KEY, childNode instanceof ListSchemaNode ? ARRAY_TYPE : OBJECT_TYPE);
+ property.put(ITEMS_KEY, items);
+ properties.put(childNode.getQName().getLocalName(), property);
+ }
+ }
+ return properties;
}
private JSONObject processChildren(Iterable<DataSchemaNode> nodes, QName parentQName, String moduleName,
if (node instanceof LeafSchemaNode) {
property = processLeafNode((LeafSchemaNode) node);
} else if (node instanceof ListSchemaNode) {
- property = processListSchemaNode((ListSchemaNode) node, moduleName, models, isConfig, schemaContext);
+ property = processDataNodeContainer((ListSchemaNode) node, moduleName, models, isConfig,
+ schemaContext);
} else if (node instanceof LeafListSchemaNode) {
property = processLeafListNode((LeafListSchemaNode) node);
property = processAnyXMLNode((AnyXmlSchemaNode) node);
} else if (node instanceof ContainerSchemaNode) {
- property = processContainer((ContainerSchemaNode) node, moduleName, false, models, isConfig,
+ property = processDataNodeContainer((ContainerSchemaNode) node, moduleName, models, isConfig,
schemaContext);
} else {
}
}
- /**
- * Parses a ListSchema node.
- *
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence, we
- * have to write some properties to a new file, while continuing to process the rest.
- *
- * @param listNode
- * @param moduleName
- * @param isConfig
- * @return
- * @throws JSONException
- * @throws IOException
- */
- private JSONObject processListSchemaNode(ListSchemaNode listNode, String moduleName, JSONObject models,
- Boolean isConfig, SchemaContext schemaContext) throws JSONException, IOException {
-
- String fileName = (BooleanUtils.isNotFalse(isConfig) ? OperationBuilder.CONFIG : OperationBuilder.OPERATIONAL)
- + listNode.getQName().getLocalName();
-
- JSONObject childSchemaProperties = processChildren(listNode.getChildNodes(), listNode.getQName(), moduleName,
- models, schemaContext);
- JSONObject childSchema = getSchemaTemplate();
- childSchema.put(TYPE_KEY, OBJECT_TYPE);
- childSchema.put(PROPERTIES_KEY, childSchemaProperties);
-
- /*
- * Due to a limitation of the RAML--->JAX-RS tool, sub-properties must be in a separate JSON schema file. Hence,
- * we have to write some properties to a new file, while continuing to process the rest.
- */
- // writeToFile(fileName, childSchema.toString(2), moduleName);
- childSchema.put("id", fileName);
- models.put(fileName, childSchema);
-
- JSONObject listNodeProperties = new JSONObject();
- listNodeProperties.put(TYPE_KEY, ARRAY_TYPE);
-
- JSONObject items = new JSONObject();
- items.put(REF_KEY, fileName);
- listNodeProperties.put(ITEMS_KEY, items);
-
- return listNodeProperties;
-
- }
-
/**
*
* @param leafNode
import java.util.ArrayList;
import java.util.List;
-
import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
+import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.ListSchemaNode;
/**
*
*/
public static class Put {
protected Operation spec;
- protected DataSchemaNode schemaNode;
+ protected String nodeName;
private final String METHOD_NAME = "PUT";
- public Put(DataSchemaNode node) {
- this.schemaNode = node;
+ public Put(String nodeName, final String description) {
+ this.nodeName = nodeName;
spec = new Operation();
- spec.setType(CONFIG + node.getQName().getLocalName());
- spec.setNotes(node.getDescription());
+ spec.setType(CONFIG + nodeName);
+ spec.setNotes(description);
}
public Put pathParams(List<Parameter> params) {
List<Parameter> parameters = new ArrayList<>(params);
Parameter payload = new Parameter();
payload.setParamType("body");
- payload.setType(CONFIG + schemaNode.getQName().getLocalName());
+ payload.setType(CONFIG + nodeName);
parameters.add(payload);
spec.setParameters(parameters);
return this;
public Operation build() {
spec.setMethod(METHOD_NAME);
- spec.setNickname(METHOD_NAME + "-" + schemaNode.getQName().getLocalName());
+ spec.setNickname(METHOD_NAME + "-" + nodeName);
return spec;
}
}
*/
public static final class Post extends Put {
- private final String METHOD_NAME = "POST";
+ public static final String METHOD_NAME = "POST";
+ private final DataNodeContainer dataNodeContainer;
- public Post(DataSchemaNode node) {
- super(node);
+ public Post(final String nodeName, final String description, final DataNodeContainer dataNodeContainer) {
+ super(nodeName, description);
+ this.dataNodeContainer = dataNodeContainer;
+ spec.setType(CONFIG + nodeName + METHOD_NAME);
}
@Override
public Operation build() {
spec.setMethod(METHOD_NAME);
- spec.setNickname(METHOD_NAME + "-" + schemaNode.getQName().getLocalName());
+ spec.setNickname(METHOD_NAME + "-" + nodeName);
return spec;
}
+
+ @Override
+ public Put pathParams(List<Parameter> params) {
+ List<Parameter> parameters = new ArrayList<>(params);
+ for (DataSchemaNode node : dataNodeContainer.getChildNodes()) {
+ if (node instanceof ListSchemaNode || node instanceof ContainerSchemaNode) {
+ Parameter payload = new Parameter();
+ payload.setParamType("body");
+ payload.setType(CONFIG + node.getQName().getLocalName());
+ payload.setName("**"+CONFIG + node.getQName().getLocalName());
+ parameters.add(payload);
+ }
+ }
+ spec.setParameters(parameters);
+ return this;
+
+ }
+
+ public Post summary(final String summary) {
+ spec.setSummary(summary);
+ return this;
+ }
}
/**
import java.io.File;
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import org.opendaylight.controller.sal.rest.doc.swagger.Api;
import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
import org.opendaylight.controller.sal.rest.doc.swagger.Operation;
+import org.opendaylight.controller.sal.rest.doc.swagger.Parameter;
import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
import org.opendaylight.yangtools.yang.model.api.Module;
}
/**
- * Method: getApiDeclaration(String module, String revision, UriInfo
- * uriInfo)
+ * Method: getApiDeclaration(String module, String revision, UriInfo uriInfo)
*/
@Test
public void testGetModuleDoc() throws Exception {
for (Entry<File, Module> m : helper.getModules().entrySet()) {
if (m.getKey().getAbsolutePath().endsWith("toaster_short.yang")) {
- ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(),
- "http://localhost:8080/restconf", "",schemaContext);
+ ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), "http://localhost:8080/restconf", "",
+ schemaContext);
validateToaster(doc);
validateTosterDocContainsModulePrefixes(doc);
- Assert.assertNotNull(doc);
+ validateSwaggerModules(doc);
+ validateSwaggerApisForPost(doc);
+ }
+ }
+ }
+
+ /**
+ * Validate whether ApiDelcaration contains Apis with concrete path and whether this Apis contain specified POST
+ * operations.
+ */
+ private void validateSwaggerApisForPost(final ApiDeclaration doc) {
+ // two POST URI with concrete schema name in summary
+ Api lstApi = findApi("/config/toaster2:lst/", doc);
+ assertNotNull("Api /config/toaster2:lst/ wasn't found", lstApi);
+ assertTrue("POST for cont1 in lst is missing",
+ findOperation(lstApi.getOperations(), "POST", "(config)lstPOST", "(config)lst1", "(config)cont1"));
+
+ Api cont1Api = findApi("/config/toaster2:lst/cont1/", doc);
+ assertNotNull("Api /config/toaster2:lst/cont1/ wasn't found", cont1Api);
+ assertTrue("POST for cont11 in cont1 is missing",
+ findOperation(cont1Api.getOperations(), "POST", "(config)cont1POST", "(config)cont11", "(config)lst11"));
+
+ // no POST URI
+ Api cont11Api = findApi("/config/toaster2:lst/cont1/cont11/", doc);
+ assertNotNull("Api /config/toaster2:lst/cont1/cont11/ wasn't found", cont11Api);
+ assertTrue("POST operation shouldn't be present.", findOperations(cont11Api.getOperations(), "POST").isEmpty());
+
+ }
+
+ /**
+ * Tries to find operation with name {@code operationName} and with summary {@code summary}
+ */
+ private boolean findOperation(List<Operation> operations, String operationName, String type,
+ String... searchedParameters) {
+ Set<Operation> filteredOperations = findOperations(operations, operationName);
+ for (Operation operation : filteredOperations) {
+ if (operation.getType().equals(type)) {
+ List<Parameter> parameters = operation.getParameters();
+ return containAllParameters(parameters, searchedParameters);
}
}
+ return false;
+ }
+
+ private Set<Operation> findOperations(final List<Operation> operations, final String operationName) {
+ final Set<Operation> filteredOperations = new HashSet<>();
+ for (Operation operation : operations) {
+ if (operation.getMethod().equals(operationName)) {
+ filteredOperations.add(operation);
+ }
+ }
+ return filteredOperations;
+ }
+
+ private boolean containAllParameters(final List<Parameter> searchedIns, String[] searchedWhats) {
+ for (String searchedWhat : searchedWhats) {
+ boolean parameterFound = false;
+ for (Parameter searchedIn : searchedIns) {
+ if (searchedIn.getType().equals(searchedWhat)) {
+ parameterFound = true;
+ }
+ }
+ if (!parameterFound) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Tries to find {@code Api} with path {@code path}
+ */
+ private Api findApi(final String path, final ApiDeclaration doc) {
+ for (Api api : doc.getApis()) {
+ if (api.getPath().equals(path)) {
+ return api;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Validates whether doc {@code doc} contains concrete specified models.
+ */
+ private void validateSwaggerModules(ApiDeclaration doc) {
+ JSONObject models = doc.getModels();
+ assertNotNull(models);
+ try {
+ JSONObject configLst = models.getJSONObject("(config)lst");
+ assertNotNull(configLst);
+
+ containsReferences(configLst, "lst1");
+ containsReferences(configLst, "cont1");
+
+ JSONObject configLst1 = models.getJSONObject("(config)lst1");
+ assertNotNull(configLst1);
+
+ JSONObject configCont1 = models.getJSONObject("(config)cont1");
+ assertNotNull(configCont1);
+
+ containsReferences(configCont1, "cont11");
+ containsReferences(configCont1, "lst11");
+
+ JSONObject configCont11 = models.getJSONObject("(config)cont11");
+ assertNotNull(configCont11);
+
+ JSONObject configLst11 = models.getJSONObject("(config)lst11");
+ assertNotNull(configLst11);
+ } catch (JSONException e) {
+ fail("JSONException wasn't expected");
+ }
+
+ }
+
+ /**
+ * Checks whether object {@code mainObject} contains in properties/items key $ref with concrete value.
+ */
+ private void containsReferences(final JSONObject mainObject, final String childObject) throws JSONException {
+ JSONObject properties = mainObject.getJSONObject("properties");
+ assertNotNull(properties);
+
+ JSONObject nodeInProperties = properties.getJSONObject(childObject);
+ assertNotNull(nodeInProperties);
+
+ JSONObject itemsInNodeInProperties = nodeInProperties.getJSONObject("items");
+ assertNotNull(itemsInNodeInProperties);
+
+ String itemRef = itemsInNodeInProperties.getString("$ref");
+ assertEquals("(config)" + childObject, itemRef);
}
@Test
for (Entry<File, Module> m : helper.getModules().entrySet()) {
if (m.getKey().getAbsolutePath().endsWith("toaster.yang")) {
- ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(),
- "http://localhost:8080/restconf", "",schemaContext);
+ ApiDeclaration doc = generator.getSwaggerDocSpec(m.getValue(), "http://localhost:8080/restconf", "",
+ schemaContext);
Assert.assertNotNull(doc);
- //testing bugs.opendaylight.org bug 1290. UnionType model type.
+ // testing bugs.opendaylight.org bug 1290. UnionType model type.
String jsonString = doc.getModels().toString();
- assertTrue(
- jsonString.contains( "testUnion\":{\"type\":\"integer or string\",\"required\":false}" ) );
+ assertTrue(jsonString.contains("testUnion\":{\"type\":\"integer or string\",\"required\":false}"));
}
}
}
* @throws Exception
*/
private void validateToaster(ApiDeclaration doc) throws Exception {
- Set<String> expectedUrls = new TreeSet<>(Arrays.asList(new String[] {
- "/config/toaster2:toaster/", "/operational/toaster2:toaster/",
- "/operations/toaster2:cancel-toast", "/operations/toaster2:make-toast",
- "/operations/toaster2:restock-toaster",
+ Set<String> expectedUrls = new TreeSet<>(Arrays.asList(new String[] { "/config/toaster2:toaster/",
+ "/operational/toaster2:toaster/", "/operations/toaster2:cancel-toast",
+ "/operations/toaster2:make-toast", "/operations/toaster2:restock-toaster",
"/config/toaster2:toaster/toasterSlot/{slotId}/toaster-augmented:slotInfo/" }));
Set<String> actualUrls = new TreeSet<>();
fail("Missing expected urls: " + expectedUrls);
}
- Set<String> expectedConfigMethods = new TreeSet<>(Arrays.asList(new String[] { "GET",
- "PUT", "DELETE" }));
+ Set<String> expectedConfigMethods = new TreeSet<>(Arrays.asList(new String[] { "GET", "PUT", "DELETE" }));
Set<String> actualConfigMethods = new TreeSet<>();
for (Operation oper : configApi.getOperations()) {
actualConfigMethods.add(oper.getMethod());
// TODO: we should really do some more validation of the
// documentation...
/**
- * Missing validation: Explicit validation of URLs, and their methods
- * Input / output models.
+ * Missing validation: Explicit validation of URLs, and their methods Input / output models.
*/
}
try {
JSONObject configToaster = topLevelJson.getJSONObject("(config)toaster");
assertNotNull("(config)toaster JSON object missing", configToaster);
- //without module prefix
+ // without module prefix
containsProperties(configToaster, "toasterSlot");
JSONObject toasterSlot = topLevelJson.getJSONObject("(config)toasterSlot");
assertNotNull("(config)toasterSlot JSON object missing", toasterSlot);
- //with module prefix
+ // with module prefix
containsProperties(toasterSlot, "toaster-augmented:slotInfo");
} catch (JSONException e) {
- fail("Json exception while reading JSON object. Original message "+e.getMessage());
+ fail("Json exception while reading JSON object. Original message " + e.getMessage());
}
}
- private void containsProperties(final JSONObject jsonObject,final String...properties) throws JSONException {
+ private void containsProperties(final JSONObject jsonObject, final String... properties) throws JSONException {
for (String property : properties) {
JSONObject propertiesObject = jsonObject.getJSONObject("properties");
assertNotNull("Properties object missing in ", propertiesObject);
JSONObject concretePropertyObject = propertiesObject.getJSONObject(property);
- assertNotNull(property + " is missing",concretePropertyObject);
+ assertNotNull(property + " is missing", concretePropertyObject);
}
}
}
"This variable indicates the current state of
the toaster.";
}
- }
+ }
rpc make-toast {
description
"The darkness factor. Basically, the number of ms to multiple the doneness value by.";
}
} // container toaster
+
+ list lst {
+ container cont1 {
+ container cont11 {
+ leaf lf111 {
+ type uint32;
+ }
+ leaf lf112 {
+ type string;
+ }
+ }
+ list lst11 {
+ leaf lf111 {
+ type string;
+ }
+ }
+ }
+ list lst1 {
+ key "key1 key2";
+ leaf key1 {
+ type int32;
+ }
+ leaf key2 {
+ type int8;
+ }
+ leaf lf11 {
+ type int16;
+ }
+ }
+ leaf lf1 {
+ type string;
+ }
+ }
rpc make-toast {
description
public void applyOperation(final ReadWriteTransaction transaction) {
final Node node = toTopologyNode(toTopologyNodeId(notification.getId()), notification.getNodeRef());
final InstanceIdentifier<Node> path = getNodePath(toTopologyNodeId(notification.getId()));
- transaction.put(LogicalDatastoreType.OPERATIONAL, path, node);
+ transaction.merge(LogicalDatastoreType.OPERATIONAL, path, node, true);
}
});
}
TerminationPoint point = toTerminationPoint(toTerminationPointId(notification.getId()),
notification.getNodeConnectorRef());
final InstanceIdentifier<TerminationPoint> path = tpPath(nodeId, point.getKey().getTpId());
- transaction.put(LogicalDatastoreType.OPERATIONAL, path, point);
+ transaction.merge(LogicalDatastoreType.OPERATIONAL, path, point, true);
if ((fcncu.getState() != null && fcncu.getState().isLinkDown())
|| (fcncu.getConfiguration() != null && fcncu.getConfiguration().isPORTDOWN())) {
removeAffectedLinks(point.getTpId());
package org.opendaylight.controller.netconf.client;
import io.netty.channel.Channel;
+
import java.util.Collection;
+
import org.opendaylight.controller.netconf.nettyutil.AbstractNetconfSession;
import org.opendaylight.controller.netconf.nettyutil.handler.NetconfEXICodec;
import org.opendaylight.controller.netconf.nettyutil.handler.NetconfEXIToMessageDecoder;
private static final Logger logger = LoggerFactory.getLogger(NetconfClientSession.class);
private final Collection<String> capabilities;
- public NetconfClientSession(NetconfClientSessionListener sessionListener, Channel channel, long sessionId,
- Collection<String> capabilities) {
+ /**
+ * Construct a new session.
+ *
+ * @param sessionListener
+ * @param channel
+ * @param sessionId
+ * @param capabilities set of advertised capabilities. Expected to be immutable.
+ */
+ public NetconfClientSession(final NetconfClientSessionListener sessionListener, final Channel channel, final long sessionId,
+ final Collection<String> capabilities) {
super(sessionListener, channel, sessionId);
this.capabilities = capabilities;
logger.debug("Client Session {} created", toString());
}
@Override
- protected void addExiHandlers(NetconfEXICodec exiCodec) {
+ protected void addExiHandlers(final NetconfEXICodec exiCodec) {
// TODO used only in negotiator, client supports only auto start-exi
replaceMessageDecoder(new NetconfEXIToMessageDecoder(exiCodec));
replaceMessageEncoder(new NetconfMessageToEXIEncoder(exiCodec));
package org.opendaylight.controller.netconf.client;
+import com.google.common.collect.ImmutableList;
+
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
private static final String EXI_1_0_CAPABILITY_MARKER = "exi:1.0";
- protected NetconfClientSessionNegotiator(NetconfClientSessionPreferences sessionPreferences,
- Promise<NetconfClientSession> promise,
- Channel channel,
- Timer timer,
- NetconfClientSessionListener sessionListener,
- long connectionTimeoutMillis) {
+ protected NetconfClientSessionNegotiator(final NetconfClientSessionPreferences sessionPreferences,
+ final Promise<NetconfClientSession> promise,
+ final Channel channel,
+ final Timer timer,
+ final NetconfClientSessionListener sessionListener,
+ final long connectionTimeoutMillis) {
super(sessionPreferences, promise, channel, timer, sessionListener, connectionTimeoutMillis);
}
@Override
- protected void handleMessage(NetconfHelloMessage netconfMessage) throws NetconfDocumentedException {
+ protected void handleMessage(final NetconfHelloMessage netconfMessage) throws NetconfDocumentedException {
final NetconfClientSession session = getSessionForHelloMessage(netconfMessage);
replaceHelloMessageInboundHandler(session);
});
}
- private boolean shouldUseExi(NetconfHelloMessage helloMsg) {
+ private boolean shouldUseExi(final NetconfHelloMessage helloMsg) {
return containsExi10Capability(helloMsg.getDocument())
&& containsExi10Capability(sessionPreferences.getHelloMessage().getDocument());
}
return false;
}
- private long extractSessionId(Document doc) {
+ private long extractSessionId(final Document doc) {
final Node sessionIdNode = (Node) XmlUtil.evaluateXPath(sessionIdXPath, doc, XPathConstants.NODE);
String textContent = sessionIdNode.getTextContent();
if (textContent == null || textContent.equals("")) {
}
@Override
- protected NetconfClientSession getSession(NetconfClientSessionListener sessionListener, Channel channel,
- NetconfHelloMessage message) throws NetconfDocumentedException {
+ protected NetconfClientSession getSession(final NetconfClientSessionListener sessionListener, final Channel channel,
+ final NetconfHelloMessage message) throws NetconfDocumentedException {
long sessionId = extractSessionId(message.getDocument());
- Collection<String> capabilities = NetconfMessageUtil.extractCapabilitiesFromHello(message.getDocument());
+
+ // Copy here is important: it disconnects the strings from the document
+ Collection<String> capabilities = ImmutableList.copyOf(NetconfMessageUtil.extractCapabilitiesFromHello(message.getDocument()));
+
+ // FIXME: scalability: we could instantiate a cache to share the same collections
return new NetconfClientSession(sessionListener, channel, sessionId, capabilities);
}
private static final String EXI_CONFIRMED_HANDLER = "exiConfirmedHandler";
private final NetconfClientSession session;
- private NetconfStartExiMessage startExiMessage;
+ private final NetconfStartExiMessage startExiMessage;
- ExiConfirmationInboundHandler(NetconfClientSession session, final NetconfStartExiMessage startExiMessage) {
+ ExiConfirmationInboundHandler(final NetconfClientSession session, final NetconfStartExiMessage startExiMessage) {
this.session = session;
this.startExiMessage = startExiMessage;
}
@Override
- public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
+ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {
ctx.pipeline().remove(ExiConfirmationInboundHandler.EXI_CONFIRMED_HANDLER);
NetconfMessage netconfMessage = (NetconfMessage) msg;
private OutputStream stdIn;
- private Queue<ByteBuf> postponed = new LinkedList<>();
+ private final Queue<ByteBuf> postponed = new LinkedList<>();
private ChannelHandlerContext ctx;
private ChannelPromise disconnectPromise;
private final Object lock = new Object();
- public SshClientAdapter(SshClient sshClient, Invoker invoker) {
+ public SshClientAdapter(final SshClient sshClient, final Invoker invoker) {
this.sshClient = sshClient;
this.invoker = invoker;
}
- // TODO: refactor
+ // TODO ganymed spawns a Thread that receives the data from remote inside TransportManager
+ // Get rid of this thread and reuse Ganymed internal thread (not sure if its possible without modifications in ganymed)
public void run() {
try {
- SshSession session = sshClient.openSession();
+ final SshSession session = sshClient.openSession();
invoker.invoke(session);
- InputStream stdOut = session.getStdout();
- session.getStderr();
+ final InputStream stdOut = session.getStdout();
synchronized (lock) {
-
stdIn = session.getStdin();
- ByteBuf message;
- while ((message = postponed.poll()) != null) {
- writeImpl(message);
+ while (postponed.peek() != null) {
+ writeImpl(postponed.poll());
}
}
while (!stopRequested.get()) {
- byte[] readBuff = new byte[BUFFER_SIZE];
- int c = stdOut.read(readBuff);
+ final byte[] readBuff = new byte[BUFFER_SIZE];
+ final int c = stdOut.read(readBuff);
if (c == -1) {
continue;
}
- byte[] tranBuff = new byte[c];
- System.arraycopy(readBuff, 0, tranBuff, 0, c);
- ByteBuf byteBuf = Unpooled.buffer(c);
- byteBuf.writeBytes(tranBuff);
- ctx.fireChannelRead(byteBuf);
+ ctx.fireChannelRead(Unpooled.copiedBuffer(readBuff, 0, c));
}
- } catch (Exception e) {
+ } catch (final Exception e) {
logger.error("Unexpected exception", e);
} finally {
sshClient.close();
}
// TODO: needs rework to match netconf framer API.
- public void write(ByteBuf message) throws IOException {
+ public void write(final ByteBuf message) throws IOException {
synchronized (lock) {
if (stdIn == null) {
postponed.add(message);
}
}
- private void writeImpl(ByteBuf message) throws IOException {
+ private void writeImpl(final ByteBuf message) throws IOException {
message.getBytes(0, stdIn, message.readableBytes());
message.release();
stdIn.flush();
}
- public void stop(ChannelPromise promise) {
+ public void stop(final ChannelPromise promise) {
synchronized (lock) {
stopRequested.set(true);
disconnectPromise = promise;
}
}
- public Thread start(ChannelHandlerContext ctx, ChannelFuture channelFuture) {
+ public Thread start(final ChannelHandlerContext ctx, final ChannelFuture channelFuture) {
checkArgument(channelFuture.isSuccess());
checkNotNull(ctx.channel().remoteAddress());
synchronized (this) {
checkState(this.ctx == null);
this.ctx = ctx;
}
- String threadName = toString();
- Thread thread = new Thread(this, threadName);
+ final String threadName = toString();
+ final Thread thread = new Thread(this, threadName);
thread.start();
return thread;
}
package org.opendaylight.controller.netconf.nettyutil.handler.ssh.client;
import ch.ethz.ssh2.Session;
-import ch.ethz.ssh2.StreamGobbler;
-
import ch.ethz.ssh2.channel.Channel;
import java.io.Closeable;
import java.io.IOException;
class SshSession implements Closeable {
private final Session session;
- public SshSession(Session session) {
+ public SshSession(final Session session) {
this.session = session;
}
-
- public void startSubSystem(String name) throws IOException {
+ public void startSubSystem(final String name) throws IOException {
session.startSubSystem(name);
}
public InputStream getStdout() {
- return new StreamGobbler(session.getStdout());
+ return session.getStdout();
}
+ // FIXME according to http://www.ganymed.ethz.ch/ssh2/FAQ.html#blocking you should read data from both stdout and stderr to prevent window filling up (since stdout and stderr share a window)
+ // FIXME stdErr is not used anywhere
public InputStream getStderr() {
return session.getStderr();
}
ChannelFuture clientChannelFuture = initializeNettyConnection(localAddress, bossGroup, sshClientHandler);
// get channel
final Channel channel = clientChannelFuture.awaitUninterruptibly().channel();
+
+ // write additional header before polling thread is started
+ // polling thread could process and forward data before additional header is written
+ // This will result into unexpected state: hello message without additional header and the next message with additional header
+ channel.writeAndFlush(Unpooled.copiedBuffer(additionalHeader.getBytes()));
+
new ClientInputStreamPoolingThread(session, ss.getStdout(), channel, new AutoCloseable() {
@Override
public void close() throws Exception {
}
}
}, sshClientHandler.getChannelHandlerContext()).start();
-
- // write additional header
- channel.writeAndFlush(Unpooled.copiedBuffer(additionalHeader.getBytes()));
} else {
logger.debug("{} Wrong subsystem requested:'{}', closing ssh session", serverSession, subsystem);
String reason = "Only netconf subsystem is supported, requested:" + subsystem;
import java.io.InputStreamReader;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.annotation.Arg;
import net.sourceforge.argparse4j.inf.ArgumentParser;
@Arg(dest = "starting-port")
public int startingPort;
+ @Arg(dest = "generate-config-connection-timeout")
+ public int generateConfigsTimeout;
+
@Arg(dest = "generate-configs-dir")
public File generateConfigsDir;
@Arg(dest = "ssh")
public boolean ssh;
+ @Arg(dest = "exi")
+ public boolean exi;
+
static ArgumentParser getParser() {
final ArgumentParser parser = ArgumentParsers.newArgumentParser("netconf testool");
parser.addArgument("--devices-count")
.help("First port for simulated device. Each other device will have previous+1 port number")
.dest("starting-port");
+ parser.addArgument("--generate-config-connection-timeout")
+ .type(Integer.class)
+ .setDefault((int)TimeUnit.MINUTES.toMillis(5))
+ .help("Timeout to be generated in initial config files")
+ .dest("generate-config-connection-timeout");
+
parser.addArgument("--generate-configs-batch-size")
.type(Integer.class)
.setDefault(100)
.help("Whether to use ssh for transport or just pure tcp")
.dest("ssh");
+ parser.addArgument("--exi")
+ .type(Boolean.class)
+ .setDefault(false)
+ .help("Whether to use exi to transport xml content")
+ .dest("exi");
+
return parser;
}
}
public static void main(final String[] args) {
+ ch.ethz.ssh2.log.Logger.enabled = true;
+
final Params params = parseArgs(args, Params.getParser());
params.validate();
try {
final List<Integer> openDevices = netconfDeviceSimulator.start(params);
if(params.generateConfigsDir != null) {
- new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize);
+ new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout);
}
} catch (final Exception e) {
LOG.error("Unhandled exception", e);
this.openDevices = openDevices;
}
- public void generate(final boolean useSsh, final int batchSize) {
+ public void generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout) {
if(directory.exists() == false) {
checkState(directory.mkdirs(), "Unable to create folder %s" + directory);
}
configBlueprint = configBlueprint.replace(NETCONF_USE_SSH, "%s");
final String before = configBlueprint.substring(0, configBlueprint.indexOf("<module>"));
- final String middleBlueprint = configBlueprint.substring(configBlueprint.indexOf("<module>"), configBlueprint.indexOf("</module>") + "</module>".length());
+ final String middleBlueprint = configBlueprint.substring(configBlueprint.indexOf("<module>"), configBlueprint.indexOf("</module>"));
final String after = configBlueprint.substring(configBlueprint.indexOf("</module>") + "</module>".length());
int connectorCount = 0;
}
final String name = String.valueOf(openDevice) + SIM_DEVICE_SUFFIX;
- final String configContent = String.format(middleBlueprint, name, String.valueOf(openDevice), String.valueOf(!useSsh));
+ String configContent = String.format(middleBlueprint, name, String.valueOf(openDevice), String.valueOf(!useSsh));
+ configContent = String.format("%s%s%d%s\n%s\n", configContent, "<connection-timeout-millis>", generateConfigsTimeout, "</connection-timeout-millis>", "</module>");
+
b.append(configContent);
connectorCount++;
if(connectorCount == batchSize) {
import io.netty.util.HashedWheelTimer;
import java.io.Closeable;
import java.io.IOException;
-import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.net.Inet4Address;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
import org.opendaylight.controller.netconf.impl.NetconfServerDispatcher;
import org.opendaylight.controller.netconf.impl.NetconfServerSessionNegotiatorFactory;
this.hashedWheelTimer = hashedWheelTimer;
}
- private NetconfServerDispatcher createDispatcher(final Map<ModuleBuilder, String> moduleBuilders) {
+ private NetconfServerDispatcher createDispatcher(final Map<ModuleBuilder, String> moduleBuilders, final boolean exi) {
final Set<Capability> capabilities = Sets.newHashSet(Collections2.transform(moduleBuilders.keySet(), new Function<ModuleBuilder, Capability>() {
@Override
final DefaultCommitNotificationProducer commitNotifier = new DefaultCommitNotificationProducer(ManagementFactory.getPlatformMBeanServer());
+ final Set<String> serverCapabilities = exi
+ ? NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES
+ : Sets.newHashSet(XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_0, XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_1);
+
final NetconfServerSessionNegotiatorFactory serverNegotiatorFactory = new NetconfServerSessionNegotiatorFactory(
- hashedWheelTimer, simulatedOperationProvider, idProvider, CONNECTION_TIMEOUT_MILLIS, commitNotifier, new LoggingMonitoringService());
+ hashedWheelTimer, simulatedOperationProvider, idProvider, CONNECTION_TIMEOUT_MILLIS, commitNotifier, new LoggingMonitoringService(), serverCapabilities);
final NetconfServerDispatcher.ServerChannelInitializer serverChannelInitializer = new NetconfServerDispatcher.ServerChannelInitializer(
serverNegotiatorFactory);
public List<Integer> start(final Main.Params params) {
final Map<ModuleBuilder, String> moduleBuilders = parseSchemasToModuleBuilders(params);
- final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders);
+ final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders, params.exi);
int currentPort = params.startingPort;
<!-- Karaf Distribution -->
<module>opendaylight/dummy-console</module>
<module>opendaylight/karaf-branding</module>
+ <module>opendaylight/distribution/opendaylight-karaf-empty</module>
<module>opendaylight/distribution/opendaylight-karaf</module>
+ <module>opendaylight/distribution/opendaylight-karaf-resources</module>
<module>features</module>
</modules>
<scm>