Merge "Netconf-cli compilable and included in project"
authorTony Tkacik <ttkacik@cisco.com>
Fri, 28 Nov 2014 10:00:35 +0000 (10:00 +0000)
committerGerrit Code Review <gerrit@opendaylight.org>
Fri, 28 Nov 2014 10:00:35 +0000 (10:00 +0000)
115 files changed:
features/mdsal/pom.xml
opendaylight/config/config-persister-file-xml-adapter/src/test/java/org/opendaylight/controller/config/persist/storage/file/xml/FileStorageAdapterTest.java
opendaylight/config/logback-config-loader/src/main/java/org/opendaylight/controller/logback/config/loader/Activator.java
opendaylight/config/logback-config-loader/src/main/java/org/opendaylight/controller/logback/config/loader/impl/LogbackConfigUtil.java
opendaylight/config/logback-config-loader/src/main/java/org/opendaylight/controller/logback/config/loader/impl/LogbackConfigurationLoader.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/LogbackConfigurationLoaderTest.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/TestAppender.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/logwork/Debugger.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/logwork/Errorer.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/logwork/Informer.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/logwork/Tracer.java
opendaylight/config/logback-config-loader/src/test/java/org/opendaylight/controller/logback/config/loader/test/logwork/Warner.java
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/stream/NormalizedNodeInputStreamReader.java
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/stream/NormalizedNodeOutputStreamWriter.java
opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/stream/NormalizedNodeStreamReader.java
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/utils/stream/NormalizedNodeStreamReaderWriterTest.java
opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/cluster/datastore/node/utils/stream/SampleNormalizedNodeSerializable.java
opendaylight/md-sal/sal-rest-docgen/pom.xml
opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/StaticDocGenerator.java [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/README.txt [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/fonts.css [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/index.html [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/opendaylight.css [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger-ui.js [new file with mode: 0644]
opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger.js [new file with mode: 0644]
opendaylight/md-sal/samples/clustering-test-app/provider/pom.xml
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/StatPermCollector.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/StatisticsManager.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatAbstractListenCommit.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatAbstractNotifyCommit.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatListenCommitGroup.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatListenCommitMeter.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatListenCommitQueue.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatNodeRegistrationImpl.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatPermCollectorImpl.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatRpcMsgManagerImpl.java
opendaylight/md-sal/statistics-manager/src/main/java/org/opendaylight/controller/md/statistics/manager/impl/StatisticsManagerImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/CapabilityProviderImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/DefaultCommitNotificationProducer.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSession.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionListener.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionListenerFactory.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiator.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiatorFactory.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/SubtreeFilter.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/CapabilityProvider.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultCloseSession.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultCommit.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultGetSchema.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultStartExi.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultStopExi.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivator.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/osgi/NetconfMonitoringServiceImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationRouterImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryListenerImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceSnapshotImpl.java
opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/util/DeserializerExceptionHandler.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/AdditionalHeaderParserTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/ConcurrentClientsTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/NetconfDispatcherImplTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/NetconfMonitoringServiceImplTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/SubtreeFilterTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultCloseSessionTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultCommitTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultGetSchemaTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/mapping/operations/DefaultStopExiTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivatorTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryTrackerTest.java
opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/util/DeserializerExceptionHandlerTest.java
opendaylight/netconf/netconf-it/src/test/java/org/opendaylight/controller/netconf/it/AbstractNetconfConfigTest.java
opendaylight/netconf/netconf-it/src/test/java/org/opendaylight/controller/netconf/it/NetconfITMonitoringTest.java
opendaylight/netconf/netconf-it/src/test/java/org/opendaylight/controller/netconf/it/NetconfITSecureTest.java
opendaylight/netconf/netconf-it/src/test/java/org/opendaylight/controller/netconf/it/SSLUtil.java
opendaylight/netconf/netconf-mapping-api/src/main/java/org/opendaylight/controller/netconf/mapping/api/Capability.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/Get.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/osgi/NetconfMonitoringActivator.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/osgi/NetconfMonitoringServiceTracker.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/xml/JaxBSerializer.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/xml/model/MonitoringSchema.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/xml/model/MonitoringSession.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/xml/model/NetconfState.java
opendaylight/netconf/netconf-monitoring/src/main/java/org/opendaylight/controller/netconf/monitoring/xml/model/package-info.java
opendaylight/netconf/netconf-monitoring/src/test/java/org/opendaylight/controller/netconf/monitoring/osgi/NetconfMonitoringActivatorTest.java
opendaylight/netconf/netconf-monitoring/src/test/java/org/opendaylight/controller/netconf/monitoring/osgi/NetconfMonitoringOperationServiceTest.java
opendaylight/netconf/netconf-monitoring/src/test/java/org/opendaylight/controller/netconf/monitoring/osgi/NetconfMonitoringServiceTrackerTest.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/AbstractNetconfSession.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/AbstractNetconfSessionNegotiator.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ChunkedFramingMechanismEncoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/EOMFramingMechanismEncoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/FramingMechanismHandlerFactory.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfChunkAggregator.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfEXIToMessageDecoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfHelloMessageToXMLEncoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfMessageToXMLEncoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfXMLToHelloMessageDecoder.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/exi/EXIParameters.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/AsyncSshHandler.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/AsyncSshHandlerReader.java
opendaylight/netconf/netconf-netty-util/src/main/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/AsyncSshHandlerWriter.java
opendaylight/netconf/netconf-netty-util/src/test/java/org/opendaylight/controller/netconf/nettyutil/handler/NetconfEXIHandlersTest.java
opendaylight/netconf/netconf-netty-util/src/test/java/org/opendaylight/controller/netconf/nettyutil/handler/exi/EXIParametersTest.java
opendaylight/netconf/netconf-netty-util/src/test/java/org/opendaylight/controller/netconf/nettyutil/handler/exi/NetconfStartExiMessageTest.java
opendaylight/netconf/netconf-netty-util/src/test/java/org/opendaylight/controller/netconf/nettyutil/handler/ssh/client/AsyncSshHandlerTest.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/RemoteNetconfCommand.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/SshProxyClientHandler.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/SshProxyServer.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/osgi/AuthProviderTracker.java
opendaylight/netconf/netconf-ssh/src/main/java/org/opendaylight/controller/netconf/ssh/osgi/NetconfSSHActivator.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/EchoClient.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/EchoClientHandler.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/EchoServer.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/EchoServerHandler.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/ProxyServerHandler.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/netty/SSHTest.java
opendaylight/netconf/netconf-ssh/src/test/java/org/opendaylight/controller/netconf/ssh/authentication/SSHServerTest.java

index 036d18fb859adb534507efe6c789694ffd3c2cc4..6f43768a9dbb4a966ce35e37bcf3679cb308ef1e 100644 (file)
           </dependenciesToScan>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>unpack-static-documentation</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>generate-resources</phase>
+            <configuration>
+              <outputDirectory>${project.build.directory}/generated-resources/swagger-api-documentation</outputDirectory>
+              <includeArtifactIds>sal-rest-docgen</includeArtifactIds>
+              <includes>**/explorer/css/**/*, **/explorer/images/**/*, **/explorer/lib/**/*, **/explorer/static/**/*,</includes>
+              <excludeTransitive>true</excludeTransitive>
+              <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.opendaylight.yangtools</groupId>
+        <artifactId>yang-maven-plugin</artifactId>
+        <version>${yangtools.version}</version>
+        <dependencies>
+          <dependency>
+            <groupId>org.opendaylight.yangtools</groupId>
+            <artifactId>maven-sal-api-gen-plugin</artifactId>
+            <version>${yangtools.version}</version>
+            <type>jar</type>
+          </dependency>
+          <dependency>
+            <groupId>org.opendaylight.yangtools</groupId>
+            <artifactId>yang-binding</artifactId>
+            <version>${yangtools.version}</version>
+            <type>jar</type>
+          </dependency>
+          <dependency>
+            <groupId>org.opendaylight.controller</groupId>
+            <artifactId>sal-rest-docgen</artifactId>
+            <version>${mdsal.version}</version>
+            <type>jar</type>
+          </dependency>
+        </dependencies>
+        <executions>
+          <execution>
+            <goals>
+              <goal>generate-sources</goal>
+            </goals>
+            <configuration>
+              <yangFilesRootDir>src</yangFilesRootDir>
+              <codeGenerators>
+                <generator>
+                  <codeGeneratorClass>org.opendaylight.controller.sal.rest.doc.impl.StaticDocGenerator</codeGeneratorClass>
+                  <outputBaseDir>${project.build.directory}/generated-resources/swagger-api-documentation/explorer/static</outputBaseDir>
+                </generator>
+              </codeGenerators>
+              <inspectDependencies>true</inspectDependencies>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
   <scm>
index 3ea9a48a5d759bb9c2cecf830fdc4cd40e3c9bdf..16ea1b843102c46bc637d81a61653e6bf1e6710a 100644 (file)
@@ -38,6 +38,7 @@ public class FileStorageAdapterTest {
     @Before
     public void setUp() throws Exception {
         file = Files.createTempFile("testFilePersist", ".txt").toFile();
+        file.deleteOnExit();
         if (!file.exists()) {
             return;
         }
@@ -185,6 +186,7 @@ public class FileStorageAdapterTest {
     @Test
     public void testNoLastConfig() throws Exception {
         File file = Files.createTempFile("testFilePersist", ".txt").toFile();
+        file.deleteOnExit();
         if (!file.exists()) {
             return;
         }
index 99866d576727eb7149290aa5e74cc01071a00d9b..fa670098581e9108088ed7b81ca3b9e156a88b67 100644 (file)
@@ -9,7 +9,6 @@ package org.opendaylight.controller.logback.config.loader;
 
 import java.io.File;
 import java.util.List;
-
 import org.opendaylight.controller.logback.config.loader.impl.LogbackConfigUtil;
 import org.opendaylight.controller.logback.config.loader.impl.LogbackConfigurationLoader;
 import org.osgi.framework.BundleActivator;
@@ -27,7 +26,7 @@ public class Activator implements BundleActivator {
      * logback configurations
      */
     private static final String LOGBACK_CONFIG_D = "logback.config.d";
-    private static Logger LOG = LoggerFactory.getLogger(Activator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Activator.class);
 
     @Override
     public void start(BundleContext context) {
index 2aa6b1a3484e4d8c53fb1132378403b54b2ea92b..accb6e1d038d21fb30c93b9a52b40adf78885549 100644 (file)
@@ -7,18 +7,16 @@
  */
 package org.opendaylight.controller.logback.config.loader.impl;
 
+import ch.qos.logback.classic.LoggerContext;
+import ch.qos.logback.classic.joran.JoranConfigurator;
+import ch.qos.logback.core.joran.spi.JoranException;
+import ch.qos.logback.core.util.StatusPrinter;
 import java.io.File;
 import java.net.URL;
-
 import org.slf4j.ILoggerFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import ch.qos.logback.classic.LoggerContext;
-import ch.qos.logback.classic.joran.JoranConfigurator;
-import ch.qos.logback.core.joran.spi.JoranException;
-import ch.qos.logback.core.util.StatusPrinter;
-
 /**
  * Logback configuration loader.
  * Strategy:
index adbb9f4cd63db212df666a68101f61459f76733f..44a84b7a9ed7a19577abe271cb14869a1994fd06 100644 (file)
@@ -10,7 +10,6 @@ package org.opendaylight.controller.logback.config.loader.test;
 import java.io.File;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -33,7 +32,7 @@ public class LogbackConfigurationLoaderTest {
 
     /** logback config root */
     private static final String LOGBACK_D = "/logback.d";
-    private static Logger LOG = LoggerFactory
+    private static final Logger LOG = LoggerFactory
             .getLogger(LogbackConfigurationLoaderTest.class);
 
     /**
index b273d2777ceb79312ce3c76044ffd409cda867d5..a950c818fe992eebffa9380c27db1d6a0725382c 100644 (file)
@@ -7,9 +7,6 @@
  */
 package org.opendaylight.controller.logback.config.loader.test;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import ch.qos.logback.classic.spi.LoggingEvent;
 import ch.qos.logback.core.Appender;
 import ch.qos.logback.core.Context;
@@ -17,6 +14,8 @@ import ch.qos.logback.core.LogbackException;
 import ch.qos.logback.core.filter.Filter;
 import ch.qos.logback.core.spi.FilterReply;
 import ch.qos.logback.core.status.Status;
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * dummy appender for collecting log messages
index a8052f71c94828c5cda097848bb9b33d7973f566..ec10ae9fa699602839566045d507a966c8a56b72 100644 (file)
@@ -15,7 +15,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Debugger {
 
-    private static Logger LOG = LoggerFactory.getLogger(Debugger.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Debugger.class);
 
     /**
      * all logging
index 0bcd830ad10a83283e2245b6b6b8065d6b91e715..e33b7f9d15097bc91876900ef74c4bb9cbff097c 100644 (file)
@@ -15,7 +15,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Errorer {
 
-    private static Logger LOG = LoggerFactory.getLogger(Errorer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Errorer.class);
 
     /**
      * all logging
index 44f09315cff09f4696d8e7e2e3164cb8170a90f6..06c518349c9ba4cfb90a5b5970da2710066144f9 100644 (file)
@@ -15,7 +15,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Informer {
 
-    private static Logger LOG = LoggerFactory.getLogger(Informer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Informer.class);
 
     /**
      * all logging
index 70df607d8dbca275ec24fe94d84f3e2b0a7b0924..c842c1a26037e3b53d50ea39b6e71823a73c38e4 100644 (file)
@@ -15,7 +15,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Tracer {
 
-    private static Logger LOG = LoggerFactory.getLogger(Tracer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Tracer.class);
 
     /**
      * all logging
index 8093180a8b94a321ca8156d09885a971fde37947..aae0c34512ea084939f7480b3a9e1997d8d15b21 100644 (file)
@@ -15,7 +15,7 @@ import org.slf4j.LoggerFactory;
  */
 public class Warner {
 
-    private static Logger LOG = LoggerFactory.getLogger(Warner.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Warner.class);
 
     /**
      * all logging
index 797258b4438cc61825524136ecb0b6aa4ff2aa5e..9201a94de326111856ad7d4d5246264cb9edfe46 100644 (file)
 package org.opendaylight.controller.cluster.datastore.node.utils.stream;
 
 import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 import org.opendaylight.controller.cluster.datastore.node.utils.QNameFactory;
 import org.opendaylight.yangtools.yang.common.QName;
 import org.opendaylight.yangtools.yang.data.api.Node;
 import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.AugmentationNode;
-import org.opendaylight.yangtools.yang.data.api.schema.ChoiceNode;
-import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
-import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.LeafNode;
 import org.opendaylight.yangtools.yang.data.api.schema.LeafSetEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.OrderedMapNode;
-import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListNode;
 import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.CollectionNodeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeAttrBuilder;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeBuilder;
 import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.ListNodeBuilder;
 import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeAttrBuilder;
+import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeContainerBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -59,141 +55,135 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
 
     private static final String REVISION_ARG = "?revision=";
 
-    private final DataInputStream reader;
+    private final DataInput input;
 
     private final Map<Integer, String> codedStringMap = new HashMap<>();
 
     private QName lastLeafSetQName;
 
+    private NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier,
+                                      Object, LeafNode<Object>> leafBuilder;
+
+    private NormalizedNodeAttrBuilder<NodeWithValue, Object,
+                                      LeafSetEntryNode<Object>> leafSetEntryBuilder;
+
     public NormalizedNodeInputStreamReader(InputStream stream) throws IOException {
         Preconditions.checkNotNull(stream);
-        reader = new DataInputStream(stream);
+        input = new DataInputStream(stream);
+    }
+
+    public NormalizedNodeInputStreamReader(DataInput input) throws IOException {
+        this.input = Preconditions.checkNotNull(input);
     }
 
     @Override
     public NormalizedNode<?, ?> readNormalizedNode() throws IOException {
-        NormalizedNode<?, ?> node = null;
-
         // each node should start with a byte
-        byte nodeType = reader.readByte();
+        byte nodeType = input.readByte();
 
         if(nodeType == NodeTypes.END_NODE) {
             LOG.debug("End node reached. return");
             return null;
         }
-        else if(nodeType == NodeTypes.AUGMENTATION_NODE) {
-            LOG.debug("Reading augmentation node. will create augmentation identifier");
 
-            YangInstanceIdentifier.AugmentationIdentifier identifier =
-                new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
-            DataContainerNodeBuilder<YangInstanceIdentifier.AugmentationIdentifier, AugmentationNode> augmentationBuilder =
-                Builders.augmentationBuilder().withNodeIdentifier(identifier);
-            augmentationBuilder = addDataContainerChildren(augmentationBuilder);
-            node = augmentationBuilder.build();
+        switch(nodeType) {
+            case NodeTypes.AUGMENTATION_NODE :
+                YangInstanceIdentifier.AugmentationIdentifier augIdentifier =
+                    new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
 
-        } else {
-            if(nodeType == NodeTypes.LEAF_SET_ENTRY_NODE) {
-                LOG.debug("Reading leaf set entry node. Will create NodeWithValue instance identifier");
+                LOG.debug("Reading augmentation node {} ", augIdentifier);
 
-                // Read the object value
+                return addDataContainerChildren(Builders.augmentationBuilder().
+                        withNodeIdentifier(augIdentifier)).build();
+
+            case NodeTypes.LEAF_SET_ENTRY_NODE :
                 Object value = readObject();
+                NodeWithValue leafIdentifier = new NodeWithValue(lastLeafSetQName, value);
+
+                LOG.debug("Reading leaf set entry node {}, value {}", leafIdentifier, value);
+
+                return leafSetEntryBuilder().withNodeIdentifier(leafIdentifier).withValue(value).build();
 
-                YangInstanceIdentifier.NodeWithValue nodeWithValue = new YangInstanceIdentifier.NodeWithValue(
-                        lastLeafSetQName, value);
-                node =  Builders.leafSetEntryBuilder().withNodeIdentifier(nodeWithValue).
-                        withValue(value).build();
+            case NodeTypes.MAP_ENTRY_NODE :
+                NodeIdentifierWithPredicates entryIdentifier = new NodeIdentifierWithPredicates(
+                        readQName(), readKeyValueMap());
 
-            } else if(nodeType == NodeTypes.MAP_ENTRY_NODE) {
-                LOG.debug("Reading map entry node. Will create node identifier with predicates.");
+                LOG.debug("Reading map entry node {} ", entryIdentifier);
 
-                QName qName = readQName();
-                YangInstanceIdentifier.NodeIdentifierWithPredicates nodeIdentifier =
-                    new YangInstanceIdentifier.NodeIdentifierWithPredicates(qName, readKeyValueMap());
-                DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifierWithPredicates, MapEntryNode> mapEntryBuilder
-                    = Builders.mapEntryBuilder().withNodeIdentifier(nodeIdentifier);
+                return addDataContainerChildren(Builders.mapEntryBuilder().
+                        withNodeIdentifier(entryIdentifier)).build();
 
-                mapEntryBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifierWithPredicates,
-                    MapEntryNode>)addDataContainerChildren(mapEntryBuilder);
-                node = mapEntryBuilder.build();
+            default :
+                return readNodeIdentifierDependentNode(nodeType, new NodeIdentifier(readQName()));
+        }
+    }
 
-            } else {
-                LOG.debug("Creating standard node identifier. ");
+    private NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier,
+                                      Object, LeafNode<Object>> leafBuilder() {
+        if(leafBuilder == null) {
+            leafBuilder = Builders.leafBuilder();
+        }
 
-                QName qName = readQName();
-                YangInstanceIdentifier.NodeIdentifier identifier = new YangInstanceIdentifier.NodeIdentifier(qName);
-                node = readNodeIdentifierDependentNode(nodeType, identifier);
+        return leafBuilder;
+    }
 
-            }
+    private NormalizedNodeAttrBuilder<NodeWithValue, Object,
+                                      LeafSetEntryNode<Object>> leafSetEntryBuilder() {
+        if(leafSetEntryBuilder == null) {
+            leafSetEntryBuilder = Builders.leafSetEntryBuilder();
         }
-        return node;
+
+        return leafSetEntryBuilder;
     }
 
-    private NormalizedNode<?, ?> readNodeIdentifierDependentNode(byte nodeType, YangInstanceIdentifier.NodeIdentifier identifier)
+    private NormalizedNode<?, ?> readNodeIdentifierDependentNode(byte nodeType, NodeIdentifier identifier)
         throws IOException {
 
         switch(nodeType) {
             case NodeTypes.LEAF_NODE :
-                LOG.debug("Read leaf node");
+                LOG.debug("Read leaf node {}", identifier);
                 // Read the object value
-                NormalizedNodeAttrBuilder leafBuilder = Builders.leafBuilder();
-                return leafBuilder.withNodeIdentifier(identifier).withValue(readObject()).build();
+                return leafBuilder().withNodeIdentifier(identifier).withValue(readObject()).build();
 
             case NodeTypes.ANY_XML_NODE :
                 LOG.debug("Read xml node");
                 Node<?> value = (Node<?>) readObject();
-                return Builders.anyXmlBuilder().withValue(value).build();
+                return Builders.anyXmlBuilder().withValue((Node<?>) readObject()).build();
 
             case NodeTypes.MAP_NODE :
-                LOG.debug("Read map node");
-                CollectionNodeBuilder<MapEntryNode, MapNode> mapBuilder = Builders.mapBuilder().withNodeIdentifier(identifier);
-                mapBuilder = addMapNodeChildren(mapBuilder);
-                return mapBuilder.build();
+                LOG.debug("Read map node {}", identifier);
+                return addDataContainerChildren(Builders.mapBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.CHOICE_NODE :
-                LOG.debug("Read choice node");
-                DataContainerNodeBuilder<YangInstanceIdentifier.NodeIdentifier, ChoiceNode> choiceBuilder =
-                    Builders.choiceBuilder().withNodeIdentifier(identifier);
-                choiceBuilder = addDataContainerChildren(choiceBuilder);
-                return choiceBuilder.build();
+                LOG.debug("Read choice node {}", identifier);
+                return addDataContainerChildren(Builders.choiceBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.ORDERED_MAP_NODE :
-                LOG.debug("Reading ordered map node");
-                CollectionNodeBuilder<MapEntryNode, OrderedMapNode> orderedMapBuilder =
-                    Builders.orderedMapBuilder().withNodeIdentifier(identifier);
-                orderedMapBuilder = addMapNodeChildren(orderedMapBuilder);
-                return orderedMapBuilder.build();
+                LOG.debug("Reading ordered map node {}", identifier);
+                return addDataContainerChildren(Builders.orderedMapBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.UNKEYED_LIST :
-                LOG.debug("Read unkeyed list node");
-                CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> unkeyedListBuilder =
-                    Builders.unkeyedListBuilder().withNodeIdentifier(identifier);
-                unkeyedListBuilder = addUnkeyedListChildren(unkeyedListBuilder);
-                return unkeyedListBuilder.build();
+                LOG.debug("Read unkeyed list node {}", identifier);
+                return addDataContainerChildren(Builders.unkeyedListBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.UNKEYED_LIST_ITEM :
-                LOG.debug("Read unkeyed list item node");
-                DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, UnkeyedListEntryNode> unkeyedListEntryBuilder
-                    = Builders.unkeyedListEntryBuilder().withNodeIdentifier(identifier);
-
-                unkeyedListEntryBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, UnkeyedListEntryNode>)
-                    addDataContainerChildren(unkeyedListEntryBuilder);
-                return unkeyedListEntryBuilder.build();
+                LOG.debug("Read unkeyed list item node {}", identifier);
+                return addDataContainerChildren(Builders.unkeyedListEntryBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.CONTAINER_NODE :
-                LOG.debug("Read container node");
-                DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, ContainerNode> containerBuilder =
-                    Builders.containerBuilder().withNodeIdentifier(identifier);
-
-                containerBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, ContainerNode>)
-                    addDataContainerChildren(containerBuilder);
-                return containerBuilder.build();
+                LOG.debug("Read container node {}", identifier);
+                return addDataContainerChildren(Builders.containerBuilder().
+                        withNodeIdentifier(identifier)).build();
 
             case NodeTypes.LEAF_SET :
-                LOG.debug("Read leaf set node");
-                ListNodeBuilder<Object, LeafSetEntryNode<Object>> leafSetBuilder =
-                    Builders.leafSetBuilder().withNodeIdentifier(identifier);
-                leafSetBuilder = addLeafSetChildren(identifier.getNodeType(), leafSetBuilder);
-                return leafSetBuilder.build();
+                LOG.debug("Read leaf set node {}", identifier);
+                return addLeafSetChildren(identifier.getNodeType(),
+                        Builders.leafSetBuilder().withNodeIdentifier(identifier)).build();
 
             default :
                 return null;
@@ -205,12 +195,13 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
         String localName = readCodedString();
         String namespace = readCodedString();
         String revision = readCodedString();
-        String qName;
+
         // Not using stringbuilder as compiler optimizes string concatenation of +
-        if(revision != null){
-            qName = "(" + namespace+ REVISION_ARG + revision + ")" +localName;
+        String qName;
+        if(!Strings.isNullOrEmpty(revision)) {
+            qName = "(" + namespace + REVISION_ARG + revision + ")" +localName;
         } else {
-            qName = "(" + namespace + ")" +localName;
+            qName = "(" + namespace + ")" + localName;
         }
 
         return QNameFactory.create(qName);
@@ -218,33 +209,33 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
 
 
     private String readCodedString() throws IOException {
-        boolean readFromMap = reader.readBoolean();
-        if(readFromMap) {
-            return codedStringMap.get(reader.readInt());
-        } else {
-            String value = reader.readUTF();
-            if(value != null) {
-                codedStringMap.put(Integer.valueOf(codedStringMap.size()), value);
-            }
+        byte valueType = input.readByte();
+        if(valueType == NormalizedNodeOutputStreamWriter.IS_CODE_VALUE) {
+            return codedStringMap.get(input.readInt());
+        } else if(valueType == NormalizedNodeOutputStreamWriter.IS_STRING_VALUE) {
+            String value = input.readUTF();
+            codedStringMap.put(Integer.valueOf(codedStringMap.size()), value);
             return value;
         }
+
+        return null;
     }
 
     private Set<QName> readQNameSet() throws IOException{
         // Read the children count
-        int count = reader.readInt();
+        int count = input.readInt();
         Set<QName> children = new HashSet<>(count);
-        for(int i = 0; i<count; i++) {
+        for(int i = 0; i < count; i++) {
             children.add(readQName());
         }
         return children;
     }
 
     private Map<QName, Object> readKeyValueMap() throws IOException {
-        int count = reader.readInt();
+        int count = input.readInt();
         Map<QName, Object> keyValueMap = new HashMap<>(count);
 
-        for(int i = 0; i<count; i++) {
+        for(int i = 0; i < count; i++) {
             keyValueMap.put(readQName(), readObject());
         }
 
@@ -252,65 +243,69 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
     }
 
     private Object readObject() throws IOException {
-        byte objectType = reader.readByte();
+        byte objectType = input.readByte();
         switch(objectType) {
             case ValueTypes.BITS_TYPE:
                 return readObjSet();
 
             case ValueTypes.BOOL_TYPE :
-                return reader.readBoolean();
+                return input.readBoolean();
 
             case ValueTypes.BYTE_TYPE :
-                return reader.readByte();
+                return input.readByte();
 
             case ValueTypes.INT_TYPE :
-                return reader.readInt();
+                return input.readInt();
 
             case ValueTypes.LONG_TYPE :
-                return reader.readLong();
+                return input.readLong();
 
             case ValueTypes.QNAME_TYPE :
                 return readQName();
 
             case ValueTypes.SHORT_TYPE :
-                return reader.readShort();
+                return input.readShort();
 
             case ValueTypes.STRING_TYPE :
-                return reader.readUTF();
+                return input.readUTF();
 
             case ValueTypes.BIG_DECIMAL_TYPE :
-                return new BigDecimal(reader.readUTF());
+                return new BigDecimal(input.readUTF());
 
             case ValueTypes.BIG_INTEGER_TYPE :
-                return new BigInteger(reader.readUTF());
+                return new BigInteger(input.readUTF());
 
             case ValueTypes.YANG_IDENTIFIER_TYPE :
-                int size = reader.readInt();
-
-                List<YangInstanceIdentifier.PathArgument> pathArguments = new ArrayList<>(size);
-
-                for(int i=0; i<size; i++) {
-                    pathArguments.add(readPathArgument());
-                }
-                return YangInstanceIdentifier.create(pathArguments);
+            return readYangInstanceIdentifier();
 
             default :
                 return null;
         }
     }
 
+    public YangInstanceIdentifier readYangInstanceIdentifier() throws IOException {
+        int size = input.readInt();
+
+        List<PathArgument> pathArguments = new ArrayList<>(size);
+
+        for(int i = 0; i < size; i++) {
+            pathArguments.add(readPathArgument());
+        }
+        return YangInstanceIdentifier.create(pathArguments);
+    }
+
     private Set<String> readObjSet() throws IOException {
-        int count = reader.readInt();
+        int count = input.readInt();
         Set<String> children = new HashSet<>(count);
-        for(int i = 0; i<count; i++) {
+        for(int i = 0; i < count; i++) {
             children.add(readCodedString());
         }
         return children;
     }
 
-    private YangInstanceIdentifier.PathArgument readPathArgument() throws IOException {
+    private PathArgument readPathArgument() throws IOException {
         // read Type
-        int type = reader.readByte();
+        int type = input.readByte();
 
         switch(type) {
 
@@ -318,22 +313,22 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
                 return new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
 
             case PathArgumentTypes.NODE_IDENTIFIER :
-            return new YangInstanceIdentifier.NodeIdentifier(readQName());
+            return new NodeIdentifier(readQName());
 
             case PathArgumentTypes.NODE_IDENTIFIER_WITH_PREDICATES :
-            return new YangInstanceIdentifier.NodeIdentifierWithPredicates(readQName(), readKeyValueMap());
+            return new NodeIdentifierWithPredicates(readQName(), readKeyValueMap());
 
             case PathArgumentTypes.NODE_IDENTIFIER_WITH_VALUE :
-            return new YangInstanceIdentifier.NodeWithValue(readQName(), readObject());
+            return new NodeWithValue(readQName(), readObject());
 
             default :
                 return null;
         }
     }
 
+    @SuppressWarnings("unchecked")
     private ListNodeBuilder<Object, LeafSetEntryNode<Object>> addLeafSetChildren(QName nodeType,
-            ListNodeBuilder<Object, LeafSetEntryNode<Object>> builder)
-        throws IOException {
+            ListNodeBuilder<Object, LeafSetEntryNode<Object>> builder) throws IOException {
 
         LOG.debug("Reading children of leaf set");
 
@@ -348,53 +343,17 @@ public class NormalizedNodeInputStreamReader implements NormalizedNodeStreamRead
         return builder;
     }
 
-    private CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> addUnkeyedListChildren(
-        CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> builder)
-        throws IOException{
-
-        LOG.debug("Reading children of unkeyed list");
-        UnkeyedListEntryNode child = (UnkeyedListEntryNode)readNormalizedNode();
-
-        while(child != null) {
-            builder.withChild(child);
-            child = (UnkeyedListEntryNode)readNormalizedNode();
-        }
-        return builder;
-    }
-
-    private DataContainerNodeBuilder addDataContainerChildren(DataContainerNodeBuilder builder)
-        throws IOException {
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    private NormalizedNodeContainerBuilder addDataContainerChildren(
+            NormalizedNodeContainerBuilder builder) throws IOException {
         LOG.debug("Reading data container (leaf nodes) nodes");
 
-        DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?> child =
-            (DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?>) readNormalizedNode();
+        NormalizedNode<?, ?> child = readNormalizedNode();
 
         while(child != null) {
-            builder.withChild(child);
-            child =
-                (DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?>) readNormalizedNode();
+            builder.addChild(child);
+            child = readNormalizedNode();
         }
         return builder;
     }
-
-
-    private CollectionNodeBuilder addMapNodeChildren(CollectionNodeBuilder builder)
-        throws IOException {
-        LOG.debug("Reading map node children");
-        MapEntryNode child = (MapEntryNode)readNormalizedNode();
-
-        while(child != null){
-            builder.withChild(child);
-            child = (MapEntryNode)readNormalizedNode();
-        }
-
-        return builder;
-    }
-
-
-    @Override
-    public void close() throws IOException {
-        reader.close();
-    }
-
 }
index 08567fd79ee9e4980dc6a359ae6fab422040b03d..46768d5112425effba48a8a8513f288f20aaf71a 100644 (file)
@@ -17,7 +17,7 @@ import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
 import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
+import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -40,17 +40,25 @@ import java.util.Set;
  *
  */
 
-public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWriter{
+public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWriter {
 
     private static final Logger LOG = LoggerFactory.getLogger(NormalizedNodeOutputStreamWriter.class);
 
-    private final DataOutputStream writer;
+    static final byte IS_CODE_VALUE = 1;
+    static final byte IS_STRING_VALUE = 2;
+    static final byte IS_NULL_VALUE = 3;
+
+    private final DataOutput output;
 
     private final Map<String, Integer> stringCodeMap = new HashMap<>();
 
     public NormalizedNodeOutputStreamWriter(OutputStream stream) throws IOException {
         Preconditions.checkNotNull(stream);
-        writer = new DataOutputStream(stream);
+        output = new DataOutputStream(stream);
+    }
+
+    public NormalizedNodeOutputStreamWriter(DataOutput output) throws IOException {
+        this.output = Preconditions.checkNotNull(output);
     }
 
     @Override
@@ -74,7 +82,7 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
     public void leafSetEntryNode(Object value) throws IOException, IllegalArgumentException {
         LOG.debug("Writing a new leaf set entry node");
 
-        writer.writeByte(NodeTypes.LEAF_SET_ENTRY_NODE);
+        output.writeByte(NodeTypes.LEAF_SET_ENTRY_NODE);
         writeObject(value);
     }
 
@@ -142,7 +150,7 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
         Preconditions.checkNotNull(identifier, "Node identifier should not be null");
         LOG.debug("Starting a new augmentation node");
 
-        writer.writeByte(NodeTypes.AUGMENTATION_NODE);
+        output.writeByte(NodeTypes.AUGMENTATION_NODE);
         writeQNameSet(identifier.getPossibleChildNames());
     }
 
@@ -160,24 +168,22 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
     public void endNode() throws IOException, IllegalStateException {
         LOG.debug("Ending the node");
 
-        writer.writeByte(NodeTypes.END_NODE);
+        output.writeByte(NodeTypes.END_NODE);
     }
 
     @Override
     public void close() throws IOException {
-        writer.close();
     }
 
     @Override
     public void flush() throws IOException {
-        writer.flush();
     }
 
     private void startNode(final QName qName, byte nodeType) throws IOException {
 
         Preconditions.checkNotNull(qName, "QName of node identifier should not be null.");
         // First write the type of node
-        writer.writeByte(nodeType);
+        output.writeByte(nodeType);
         // Write Start Tag
         writeQName(qName);
     }
@@ -191,22 +197,23 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
 
     private void writeCodedString(String key) throws IOException {
         Integer value = stringCodeMap.get(key);
-
         if(value != null) {
-            writer.writeBoolean(true);
-            writer.writeInt(value);
+            output.writeByte(IS_CODE_VALUE);
+            output.writeInt(value);
         } else {
             if(key != null) {
+                output.writeByte(IS_STRING_VALUE);
                 stringCodeMap.put(key, Integer.valueOf(stringCodeMap.size()));
+                output.writeUTF(key);
+            } else {
+                output.writeByte(IS_NULL_VALUE);
             }
-            writer.writeBoolean(false);
-            writer.writeUTF(key);
         }
     }
 
     private void writeObjSet(Set<?> set) throws IOException {
         if(!set.isEmpty()){
-            writer.writeInt(set.size());
+            output.writeInt(set.size());
             for(Object o : set){
                 if(o instanceof String){
                     writeCodedString(o.toString());
@@ -216,14 +223,14 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
                 }
             }
         } else {
-            writer.writeInt(0);
+            output.writeInt(0);
         }
     }
 
-    private void writeYangInstanceIdentifier(YangInstanceIdentifier identifier) throws IOException {
+    public void writeYangInstanceIdentifier(YangInstanceIdentifier identifier) throws IOException {
         Iterable<YangInstanceIdentifier.PathArgument> pathArguments = identifier.getPathArguments();
         int size = Iterables.size(pathArguments);
-        writer.writeInt(size);
+        output.writeInt(size);
 
         for(YangInstanceIdentifier.PathArgument pathArgument : pathArguments) {
             writePathArgument(pathArgument);
@@ -234,7 +241,7 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
 
         byte type = PathArgumentTypes.getSerializablePathArgumentType(pathArgument);
 
-        writer.writeByte(type);
+        output.writeByte(type);
 
         switch(type) {
             case PathArgumentTypes.NODE_IDENTIFIER :
@@ -278,7 +285,7 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
 
     private void writeKeyValueMap(Map<QName, Object> keyValueMap) throws IOException {
         if(keyValueMap != null && !keyValueMap.isEmpty()) {
-            writer.writeInt(keyValueMap.size());
+            output.writeInt(keyValueMap.size());
             Set<QName> qNameSet = keyValueMap.keySet();
 
             for(QName qName : qNameSet) {
@@ -286,47 +293,48 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
                 writeObject(keyValueMap.get(qName));
             }
         } else {
-            writer.writeInt(0);
+            output.writeInt(0);
         }
     }
 
     private void writeQNameSet(Set<QName> children) throws IOException {
         // Write each child's qname separately, if list is empty send count as 0
         if(children != null && !children.isEmpty()) {
-            writer.writeInt(children.size());
+            output.writeInt(children.size());
             for(QName qName : children) {
                 writeQName(qName);
             }
         } else {
             LOG.debug("augmentation node does not have any child");
-            writer.writeInt(0);
+            output.writeInt(0);
         }
     }
 
+    @SuppressWarnings("rawtypes")
     private void writeObject(Object value) throws IOException {
 
         byte type = ValueTypes.getSerializableType(value);
         // Write object type first
-        writer.writeByte(type);
+        output.writeByte(type);
 
         switch(type) {
             case ValueTypes.BOOL_TYPE:
-                writer.writeBoolean((Boolean) value);
+                output.writeBoolean((Boolean) value);
                 break;
             case ValueTypes.QNAME_TYPE:
                 writeQName((QName) value);
                 break;
             case ValueTypes.INT_TYPE:
-                writer.writeInt((Integer) value);
+                output.writeInt((Integer) value);
                 break;
             case ValueTypes.BYTE_TYPE:
-                writer.writeByte((Byte) value);
+                output.writeByte((Byte) value);
                 break;
             case ValueTypes.LONG_TYPE:
-                writer.writeLong((Long) value);
+                output.writeLong((Long) value);
                 break;
             case ValueTypes.SHORT_TYPE:
-                writer.writeShort((Short) value);
+                output.writeShort((Short) value);
                 break;
             case ValueTypes.BITS_TYPE:
                 writeObjSet((Set<?>) value);
@@ -335,7 +343,7 @@ public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWri
                 writeYangInstanceIdentifier((YangInstanceIdentifier) value);
                 break;
             default:
-                writer.writeUTF(value.toString());
+                output.writeUTF(value.toString());
                 break;
         }
     }
index c619afd7ee9f5c409f76893d5df0ed94b605263e..d2d5bf7c1dc4243dcffac95716f25879bf772ff7 100644 (file)
 
 package org.opendaylight.controller.cluster.datastore.node.utils.stream;
 
-
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-
 import java.io.IOException;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
 
-
-public interface NormalizedNodeStreamReader extends AutoCloseable {
+/**
+ * Interface for a class that can read serialized NormalizedNode instances from a stream.
+ */
+public interface NormalizedNodeStreamReader {
 
     NormalizedNode<?, ?> readNormalizedNode() throws IOException;
 }
index ae548dba23aa781420efbde6c8ce1522f365a1c1..8854fc73b5ebffb656f07ea83b4a7443ba69d81e 100644 (file)
@@ -7,29 +7,45 @@
  *  and is available at http://www.eclipse.org/legal/epl-v10.html
  *
  */
-
 package org.opendaylight.controller.cluster.datastore.node.utils.stream;
 
-
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import org.apache.commons.lang.SerializationUtils;
 import org.junit.Assert;
 import org.junit.Test;
 import org.opendaylight.controller.cluster.datastore.util.TestModel;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
 import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
 import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
+import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 
 public class NormalizedNodeStreamReaderWriterTest {
 
-    final NormalizedNode<?, ?> input = TestModel.createTestContainer();
-
     @Test
     public void testNormalizedNodeStreamReaderWriter() throws IOException {
 
+        testNormalizedNodeStreamReaderWriter(TestModel.createTestContainer());
+
+        QName toaster = QName.create("http://netconfcentral.org/ns/toaster","2009-11-20","toaster");
+        QName darknessFactor = QName.create("http://netconfcentral.org/ns/toaster","2009-11-20","darknessFactor");
+        ContainerNode toasterNode = Builders.containerBuilder().
+                withNodeIdentifier(new NodeIdentifier(toaster)).
+                withChild(ImmutableNodes.leafNode(darknessFactor, "1000")).build();
+
+        testNormalizedNodeStreamReaderWriter(Builders.containerBuilder().
+                withNodeIdentifier(new NodeIdentifier(SchemaContext.NAME)).
+                withChild(toasterNode).build());
+    }
+
+    private void testNormalizedNodeStreamReaderWriter(NormalizedNode<?, ?> input) throws IOException {
+
         byte[] byteData = null;
 
         try(ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
@@ -41,22 +57,20 @@ public class NormalizedNodeStreamReaderWriterTest {
 
         }
 
-        try(NormalizedNodeInputStreamReader reader = new NormalizedNodeInputStreamReader(
-                new ByteArrayInputStream(byteData))) {
-
-            NormalizedNode<?,?> node = reader.readNormalizedNode();
-            Assert.assertEquals(input, node);
+        NormalizedNodeInputStreamReader reader = new NormalizedNodeInputStreamReader(
+                new ByteArrayInputStream(byteData));
 
-        }
+        NormalizedNode<?,?> node = reader.readNormalizedNode();
+        Assert.assertEquals(input, node);
     }
 
     @Test
     public void testWithSerializable() {
-        SampleNormalizedNodeSerializable serializable = new SampleNormalizedNodeSerializable(input);
+        NormalizedNode<?, ?> input = TestModel.createTestContainer();
+        SampleNormalizedNodeSerializable serializable = new SampleNormalizedNodeSerializable(input );
         SampleNormalizedNodeSerializable clone = (SampleNormalizedNodeSerializable)SerializationUtils.clone(serializable);
 
         Assert.assertEquals(input, clone.getInput());
 
     }
-
 }
index 10a2ad90a5bfebed8d15926dc8bd166623e0ff9c..6539015756f7407b5caa33f059680d1ef6093718 100644 (file)
@@ -8,16 +8,16 @@
 
 package org.opendaylight.controller.cluster.datastore.node.utils.stream;
 
-
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
-
+import java.io.DataInput;
+import java.io.DataOutput;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 import java.net.URISyntaxException;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
 
 public class SampleNormalizedNodeSerializable implements Serializable {
     private static final long serialVersionUID = 1L;
@@ -33,12 +33,12 @@ public class SampleNormalizedNodeSerializable implements Serializable {
     }
 
     private void readObject(final ObjectInputStream stream) throws IOException, ClassNotFoundException, URISyntaxException {
-        NormalizedNodeStreamReader reader = new NormalizedNodeInputStreamReader(stream);
+        NormalizedNodeStreamReader reader = new NormalizedNodeInputStreamReader((DataInput)stream);
         this.input = reader.readNormalizedNode();
     }
 
     private void writeObject(final ObjectOutputStream stream) throws IOException {
-        NormalizedNodeStreamWriter writer = new NormalizedNodeOutputStreamWriter(stream);
+        NormalizedNodeStreamWriter writer = new NormalizedNodeOutputStreamWriter((DataOutput)stream);
         NormalizedNodeWriter normalizedNodeWriter = NormalizedNodeWriter.forStreamWriter(writer);
 
         normalizedNodeWriter.write(this.input);
index 3d3c52d7d1237375e5483d2eccadde7d29736f07..53f1b2e69274465a01dc6d34000eafe446b3bdb0 100644 (file)
       <artifactId>yang-parser-impl</artifactId>
     </dependency>
 
+    <dependency>
+      <groupId>org.opendaylight.yangtools</groupId>
+      <artifactId>yang-maven-plugin-spi</artifactId>
+    </dependency>
+
     <dependency>
       <groupId>org.osgi</groupId>
       <artifactId>org.osgi.core</artifactId>
 
           <instructions>
             <Bundle-Name>MD SAL Rest Api Doc Generator</Bundle-Name>
-            <Import-Package>*,
-              com.sun.jersey.spi.container.servlet, org.eclipse.jetty.servlets</Import-Package>
+            <Import-Package>!org.apache.maven.plugin.logging,
+              !org.apache.maven.project,
+              !org.opendaylight.yangtools.yang2sources.spi,
+              *,
+              com.sun.jersey.spi.container.servlet, org.eclipse.jetty.servlets
+            </Import-Package>
             <Bundle-Activator>org.opendaylight.controller.sal.rest.doc.DocProvider</Bundle-Activator>
             <Web-ContextPath>/apidoc</Web-ContextPath>
           </instructions>
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/StaticDocGenerator.java b/opendaylight/md-sal/sal-rest-docgen/src/main/java/org/opendaylight/controller/sal/rest/doc/impl/StaticDocGenerator.java
new file mode 100644 (file)
index 0000000..b658102
--- /dev/null
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others.  All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.rest.doc.impl;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+
+import org.apache.maven.plugin.logging.Log;
+import org.apache.maven.project.MavenProject;
+import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
+import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
+import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.opendaylight.yangtools.yang2sources.spi.CodeGenerator;
+
+import javax.ws.rs.core.UriInfo;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation.
+ */
+public class StaticDocGenerator extends ApiDocGenerator implements CodeGenerator {
+
+    private static final String DEFAULT_OUTPUT_BASE_DIR_PATH = "target" + File.separator + "generated-resources"
+        + File.separator + "swagger-api-documentation";
+
+    private static Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
+
+    private MavenProject mavenProject;
+    private File projectBaseDir;
+    private Map<String, String> additionalConfig;
+    private File resourceBaseDir;
+    private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+
+    /**
+     *
+     * @param context
+     * @param outputDir
+     * @param yangModules
+     * @return
+     * @throws IOException
+     */
+    @Override
+    public Collection<File> generateSources(SchemaContext context, File outputDir, Set<Module> yangModules) throws IOException {
+        List<File> result = new ArrayList<>();
+
+        // Create Base Directory
+        final File outputBaseDir;
+        if (outputDir == null) {
+            outputBaseDir = new File(DEFAULT_OUTPUT_BASE_DIR_PATH);
+        }
+        else outputBaseDir = outputDir;
+        outputBaseDir.mkdirs();
+
+        // Create Resources directory
+        File resourcesDir = new File(outputBaseDir, "resources");
+        resourcesDir.mkdirs();
+
+        // Create JS file
+        File resourcesJsFile = new File(outputBaseDir, "resources.js");
+        resourcesJsFile.createNewFile();
+        BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(resourcesJsFile));
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+
+        // Write resource listing to JS file
+        ResourceList resourceList = super.getResourceListing(null, context, "");
+        String resourceListJson = mapper.writeValueAsString(resourceList);
+        resourceListJson = resourceListJson.replace("\'", "\\\'").replace("\\n", "\\\\n");
+        bufferedWriter.write("function getSpec() {\n\treturn \'" + resourceListJson + "\';\n}\n\n");
+
+        // Write resources/APIs to JS file and to disk
+        bufferedWriter.write("function jsonFor(resource) {\n\tswitch(resource) {\n");
+        for (Resource resource : resourceList.getApis()) {
+            int revisionIndex = resource.getPath().indexOf('(');
+            String name = resource.getPath().substring(0, revisionIndex);
+            String revision = resource.getPath().substring(revisionIndex + 1, resource.getPath().length() - 1);
+            ApiDeclaration apiDeclaration = super.getApiDeclaration(name, revision, null, context, "");
+            String json = mapper.writeValueAsString(apiDeclaration);
+            // Manually insert models because org.json.JSONObject cannot be serialized by ObjectMapper
+            json = json.replace("\"models\":{}", "\"models\":" + apiDeclaration.getModels().toString().replace("\\\"", "\""));
+            // Escape single quotes and new lines
+            json = json.replace("\'", "\\\'").replace("\\n", "\\\\n");
+            bufferedWriter.write("\t\tcase \"" + name + "(" + revision + ")\": return \'" + json + "\';\n");
+
+            File resourceFile = new File(resourcesDir, name + "(" + revision + ").json");
+            BufferedWriter resourceFileWriter = new BufferedWriter(new FileWriter(resourceFile));
+            resourceFileWriter.write(json);
+            resourceFileWriter.close();
+            result.add(resourceFile);
+        }
+        bufferedWriter.write("\t}\n\treturn \"\";\n}");
+        bufferedWriter.close();
+
+        result.add(resourcesJsFile);
+        return result;
+    }
+
+    @Override
+    protected String generatePath(UriInfo uriInfo, String name, String revision) {
+        if (uriInfo == null) {
+            return name + "(" + revision + ")";
+        }
+        return super.generatePath(uriInfo, name, revision);
+    }
+
+    @Override
+    protected String createBasePathFromUriInfo(UriInfo uriInfo) {
+        if (uriInfo == null) {
+            return RESTCONF_CONTEXT_ROOT;
+        }
+        return super.createBasePathFromUriInfo(uriInfo);
+    }
+
+    @Override
+    public void setLog(Log log) {
+    }
+
+    @Override
+    public void setAdditionalConfig(Map<String, String> additionalConfig) {
+        this.additionalConfig = additionalConfig;
+    }
+
+    @Override
+    public void setResourceBaseDir(File resourceBaseDir) {
+        this.resourceBaseDir = resourceBaseDir;
+    }
+
+    @Override
+    public void setMavenProject(MavenProject mavenProject) {
+        this.mavenProject = mavenProject;
+        this.projectBaseDir = mavenProject.getBasedir();
+    }
+}
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/README.txt b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/README.txt
new file mode 100644 (file)
index 0000000..7e72e83
--- /dev/null
@@ -0,0 +1,79 @@
+This component offers Swagger documentation of the RestConf APIs.
+
+This Swagger documentation can be accessed in two ways:
+I. Running server
+Open a browser and go to http://<host>:8181/apidoc/explorer/index.html
+
+II. Static documentation generation
+By adding a reference to the StaticDocGenerator class in any pom.xml,
+static documentation will be generated.  This static documentation will
+document all the RestConf APIs for the YANG files in that artifact and
+all the YANG files in that artifact's dependencies.
+
+In order to generate static documentation for all resources,
+this should be placed in a downstream project.
+
+Below is what you would add to the <plugins> section under <build>.
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.8</version>
+        <executions>
+          <execution>
+            <id>unpack-static-documentation</id>
+            <goals>
+              <goal>unpack-dependencies</goal>
+            </goals>
+            <phase>generate-resources</phase>
+            <configuration>
+              <outputDirectory>${project.build.directory}/generated-resources/swagger-api-documentation</outputDirectory>
+              <includeArtifactIds>sal-rest-docgen</includeArtifactIds>
+              <includes>**/explorer/css/**/*, **/explorer/images/**/*, **/explorer/lib/**/*, **/explorer/static/**/*,</includes>
+              <excludeTransitive>true</excludeTransitive>
+              <ignorePermissions>false</ignorePermissions>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.opendaylight.yangtools</groupId>
+        <artifactId>yang-maven-plugin</artifactId>
+        <version>${yangtools.version}</version>
+        <dependencies>
+          <dependency>
+            <groupId>org.opendaylight.yangtools</groupId>
+            <artifactId>maven-sal-api-gen-plugin</artifactId>
+            <version>${yangtools.version}</version>
+            <type>jar</type>
+          </dependency>
+          <dependency>
+            <groupId>org.opendaylight.yangtools</groupId>
+            <artifactId>yang-binding</artifactId>
+            <version>${yangtools.version}</version>
+            <type>jar</type>
+          </dependency>
+          <dependency>
+            <groupId>org.opendaylight.controller</groupId>
+            <artifactId>sal-rest-docgen</artifactId>
+            <version>${mdsal.version}</version>
+            <type>jar</type>
+          </dependency>
+        </dependencies>
+        <executions>
+          <execution>
+            <goals>
+              <goal>generate-sources</goal>
+            </goals>
+            <configuration>
+              <yangFilesRootDir>src</yangFilesRootDir>
+              <codeGenerators>
+                <generator>
+                  <codeGeneratorClass>org.opendaylight.controller.sal.rest.doc.impl.StaticDocGenerator</codeGeneratorClass>
+                  <outputBaseDir>${project.build.directory}/generated-resources/swagger-api-documentation/explorer/static</outputBaseDir>
+                </generator>
+              </codeGenerators>
+              <inspectDependencies>true</inspectDependencies>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/fonts.css b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/fonts.css
new file mode 100644 (file)
index 0000000..4825d09
--- /dev/null
@@ -0,0 +1,16 @@
+/* latin */
+@font-face {
+    font-family: 'Droid Sans';
+    font-style: normal;
+    font-weight: 400;
+    src: local('Droid Sans'), local('DroidSans'), url(http://fonts.gstatic.com/s/droidsans/v6/s-BiyweUPV0v-yRb-cjciAsYbbCjybiHxArTLjt7FRU.woff2) format('woff2');
+    unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2212, U+2215, U+E0FF, U+EFFD, U+F000;
+}
+/* latin */
+@font-face {
+    font-family: 'Droid Sans';
+    font-style: normal;
+    font-weight: 700;
+    src: local('Droid Sans Bold'), local('DroidSans-Bold'), url(http://fonts.gstatic.com/s/droidsans/v6/EFpQQyG9GqCrobXxL-KRMX9tREKe1lLHLCwOC24WjMs.woff2) format('woff2');
+    unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2212, U+2215, U+E0FF, U+EFFD, U+F000;
+}
\ No newline at end of file
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/index.html b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/index.html
new file mode 100644 (file)
index 0000000..6a7f6aa
--- /dev/null
@@ -0,0 +1,109 @@
+<!DOCTYPE html>
+<html>
+<head>
+    <title>RestConf Documentation</title>
+    <link href='fonts.css'
+          rel='stylesheet' type='text/css' />   <!--original location: //fonts.googleapis.com/css?family=Droid+Sans:400,700 -->
+    <link href='../css/highlight.default.css' media='screen' rel='stylesheet'
+          type='text/css' />
+    <link href='../css/screen.css' media='screen' rel='stylesheet'
+          type='text/css' />
+    <link rel="stylesheet" type="text/css" href="opendaylight.css">
+    <link rel="stylesheet" type="text/css"
+          href="../css/ui-lightness/jquery-ui-1.10.4.custom.min.css">
+    <script type="text/javascript" src="../lib/shred.bundle.js"></script>
+    <script src='../lib/jquery-1.8.0.min.js' type='text/javascript'></script>
+    <script src='../lib/jquery-ui-1.11.0.min.js' type="text/javascript"></script>
+    <script src='../lib/jquery.slideto.min.js' type='text/javascript'></script>
+    <script src='../lib/jquery.wiggle.min.js' type='text/javascript'></script>
+    <script src='../lib/jquery.ba-bbq.min.js' type='text/javascript'></script>
+    <script src='../lib/handlebars-1.0.0.js' type='text/javascript'></script>
+    <script src='../lib/underscore-min.js' type='text/javascript'></script>
+    <script src='../lib/backbone-min.js' type='text/javascript'></script>
+    <script src='swagger.js' type='text/javascript'></script>
+    <script src='swagger-ui.js' type='text/javascript'></script>
+    <script src='../lib/odl/list_mounts.js' type='text/javascript'></script>
+    <script src='../lib/highlight.7.3.pack.js' type='text/javascript'></script>
+    <script src='../lib/odl/swagger.js' type='text/javascript'></script>
+    <script src='resources.js' type='text/javascript'></script>
+
+    <script type="text/javascript">
+
+        //reloads the swagger UI documentation for the specified mount.
+        var loadMount = function(mountIndex, mountPath) {
+            $("#message").empty();
+            $("#message").append( "<p>Loading...</p>" );
+            loadSwagger("/apidoc/apis/mounts/" + mountIndex,
+                    "swagger-ui-container");
+            $("#message").empty();
+            $("#message").append( "<h2><b>Showing mount points for " + mountPath + "</b></h2>");
+        }
+
+        //clears the swagger UI and adds text prompting use to select a mount point.
+        var selectAMount = function(string) {
+            $("#swagger-ui-container").empty();
+            $("#message").empty();
+            $("#message").append("<p>Select a mount point.</p>");
+        }
+
+        //loads the root swagger documenation (which comes from RestConf)
+        var loadRootSwagger = function() {
+            $("#message").empty();
+            loadSwagger("/apidoc/apis", "swagger-ui-container");
+        }
+
+        //main method to initialize the mount list / swagger docs / tabs on page load
+        $(function() {
+            $("#tabs").tabs();
+
+            loadMountList($("#mountlist"));
+
+            loadRootSwagger();
+        });
+    </script>
+</head>
+
+<body>
+<div>
+    <!-- style="background-color: #FCA000;" -->
+    <div class="swagger-ui-wrap ui-tabs">
+        <table>
+            <tr>
+                <td><img src="../images/logo_small.png" /></td>
+                <td><h1 width="100%">OpenDaylight RestConf API
+                    Documentation</h1></td>
+            </tr>
+        </table>
+    </div>
+</div>
+
+<div class="navbar-inner">
+    <div class="brand"></div>
+</div>
+
+<!--  defines the div shells which represent the jquery tabs -->
+<div id="tabs" class="swagger-ui-wrap">
+    <ul>
+        <li><a href="#Controller" onclick="loadRootSwagger()">Controller
+            Resources</a></li>
+    </ul>
+
+    <div id="Controller">
+        <div>
+            <h3>Below are the list of APIs supported by the Controller.</h3>
+        </div>
+    </div>
+</div>
+
+<div class="swagger-ui-wrap"><hr/></div>
+
+<!-- messages -->
+<div id="message" class="swagger-ui-wrap"></div>
+
+<!-- the swagger is always loaded in this div -->
+<div id="swagger-ui-container" class="swagger-ui-wrap"></div>
+
+<div id="message-bar" class="swagger-ui-wrap">&nbsp;</div>
+</body>
+
+</html>
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/opendaylight.css b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/opendaylight.css
new file mode 100644 (file)
index 0000000..7579388
--- /dev/null
@@ -0,0 +1,92 @@
+.fuelux [class^="icon-"],
+.fuelux [class*=" icon-"] {
+    display: inline-block;
+    width: 14px;
+    height: 14px;
+    margin-top: 1px;
+    *margin-right: .3em;
+    line-height: 14px;
+    vertical-align: text-top;
+    background-position: 14px 14px;
+    background-repeat: no-repeat;
+}
+
+.fuelux .icon-white,
+.fuelux .nav-pills > .active > a > [class^="icon-"],
+.fuelux .nav-pills > .active > a > [class*=" icon-"],
+.fuelux .nav-list > .active > a > [class^="icon-"],
+.fuelux .nav-list > .active > a > [class*=" icon-"],
+.fuelux .navbar-inverse .nav > .active > a > [class^="icon-"],
+.fuelux .navbar-inverse .nav > .active > a > [class*=" icon-"],
+.fuelux .dropdown-menu > li > a:hover > [class^="icon-"],
+.fuelux .dropdown-menu > li > a:focus > [class^="icon-"],
+.fuelux .dropdown-menu > li > a:hover > [class*=" icon-"],
+.fuelux .dropdown-menu > li > a:focus > [class*=" icon-"],
+.fuelux .dropdown-menu > .active > a > [class^="icon-"],
+.fuelux .dropdown-menu > .active > a > [class*=" icon-"],
+.fuelux .dropdown-submenu:hover > a > [class^="icon-"],
+.fuelux .dropdown-submenu:focus > a > [class^="icon-"],
+.fuelux .dropdown-submenu:hover > a > [class*=" icon-"],
+.fuelux .dropdown-submenu:focus > a > [class*=" icon-"] {
+}
+
+.fuelux .icon-search {
+    background:url('../img/search.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+.fuelux .icon-remove {
+    background:url('../img/searchremove.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.fuelux .icon-chevron-up {
+    background:url('../img/sort_up.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.fuelux .icon-chevron-down {
+    background:url('../img/sort_down.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.fuelux .icon-chevron-left {
+    background:url('../img/nextpageleft.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.fuelux .icon-chevron-right {
+    background:url('../img/nextpageright.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.fuelux .icon-chevron-right {
+    background:url('../img/nextpageright.png') 0 0 no-repeat;
+    height: 16px;
+    line-height: 16px;
+    width: 16px;
+}
+
+.tip {
+    background-color: white;
+    border: 1px solid #CCCCCC;
+    box-shadow: 2px 2px 8px #555555;
+    color: #111111;
+    font-family: Verdana,Geneva,Arial,Helvetica,sans-serif;
+    font-size: 14px;
+    opacity: 0.9;
+    padding: 7px;
+    width: 260px;
+    border-radius: 8px;
+}
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger-ui.js b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger-ui.js
new file mode 100644 (file)
index 0000000..aabab7c
--- /dev/null
@@ -0,0 +1,2114 @@
+$(function() {
+
+  // Helper function for vertically aligning DOM elements
+  // http://www.seodenver.com/simple-vertical-align-plugin-for-jquery/
+  $.fn.vAlign = function() {
+    return this.each(function(i){
+      var ah = $(this).height();
+      var ph = $(this).parent().height();
+      var mh = (ph - ah) / 2;
+      $(this).css('margin-top', mh);
+    });
+  };
+
+  $.fn.stretchFormtasticInputWidthToParent = function() {
+    return this.each(function(i){
+      var p_width = $(this).closest("form").innerWidth();
+      var p_padding = parseInt($(this).closest("form").css('padding-left') ,10) + parseInt($(this).closest("form").css('padding-right'), 10);
+      var this_padding = parseInt($(this).css('padding-left'), 10) + parseInt($(this).css('padding-right'), 10);
+      $(this).css('width', p_width - p_padding - this_padding);
+    });
+  };
+
+  $('form.formtastic li.string input, form.formtastic textarea').stretchFormtasticInputWidthToParent();
+
+  // Vertically center these paragraphs
+  // Parent may need a min-height for this to work..
+  $('ul.downplayed li div.content p').vAlign();
+
+  // When a sandbox form is submitted..
+  $("form.sandbox").submit(function(){
+
+    var error_free = true;
+
+    // Cycle through the forms required inputs
+    $(this).find("input.required").each(function() {
+
+      // Remove any existing error styles from the input
+      $(this).removeClass('error');
+
+      // Tack the error style on if the input is empty..
+      if ($(this).val() == '') {
+        $(this).addClass('error');
+        $(this).wiggle();
+        error_free = false;
+      }
+
+    });
+
+    return error_free;
+  });
+
+});
+
+function clippyCopiedCallback(a) {
+  $('#api_key_copied').fadeIn().delay(1000).fadeOut();
+
+  // var b = $("#clippy_tooltip_" + a);
+  // b.length != 0 && (b.attr("title", "copied!").trigger("tipsy.reload"), setTimeout(function() {
+  //   b.attr("title", "copy to clipboard")
+  // },
+  // 500))
+}
+
+// Logging function that accounts for browsers that don't have window.console
+function log() {
+  if (window.console) console.log.apply(console,arguments);
+}
+// Handle browsers that do console incorrectly (IE9 and below, see http://stackoverflow.com/a/5539378/7913)
+if (Function.prototype.bind && console && typeof console.log == "object") {
+  [
+    "log","info","warn","error","assert","dir","clear","profile","profileEnd"
+  ].forEach(function (method) {
+      console[method] = this.bind(console[method], console);
+    }, Function.prototype.call);
+}
+
+var Docs = {
+
+  shebang: function() {
+
+    // If shebang has an operation nickname in it..
+    // e.g. /docs/#!/words/get_search
+    var fragments = $.param.fragment().split('/');
+    fragments.shift(); // get rid of the bang
+
+    switch (fragments.length) {
+      case 1:
+        // Expand all operations for the resource and scroll to it
+//                             log('shebang resource:' + fragments[0]);
+        var dom_id = 'resource_' + fragments[0];
+
+        Docs.expandEndpointListForResource(fragments[0]);
+        $("#"+dom_id).slideto({highlight: false});
+        break;
+      case 2:
+        // Refer to the endpoint DOM element, e.g. #words_get_search
+//                             log('shebang endpoint: ' + fragments.join('_'));
+
+        // Expand Resource
+        Docs.expandEndpointListForResource(fragments[0]);
+        $("#"+dom_id).slideto({highlight: false});
+
+        // Expand operation
+        var li_dom_id = fragments.join('_');
+        var li_content_dom_id = li_dom_id + "_content";
+
+//                log("li_dom_id " + li_dom_id);
+//                log("li_content_dom_id " + li_content_dom_id);
+
+        Docs.expandOperation($('#'+li_content_dom_id));
+        $('#'+li_dom_id).slideto({highlight: false});
+        break;
+    }
+
+  },
+
+  toggleEndpointListForResource: function(resource) {
+    var elem = $('li#resource_' + Docs.escapeResourceName(resource) + ' ul.endpoints');
+    if (elem.is(':visible')) {
+      Docs.collapseEndpointListForResource(resource);
+    } else {
+      Docs.expandEndpointListForResource(resource);
+    }
+  },
+
+  // Expand resource
+  expandEndpointListForResource: function(resource) {
+    var resource = Docs.escapeResourceName(resource);
+    if (resource == '') {
+      $('.resource ul.endpoints').slideDown();
+      return;
+    }
+
+    $('li#resource_' + resource).addClass('active');
+
+    var elem = $('li#resource_' + resource + ' ul.endpoints');
+    elem.slideDown();
+  },
+
+  // Collapse resource and mark as explicitly closed
+  collapseEndpointListForResource: function(resource) {
+    var resource = Docs.escapeResourceName(resource);
+    $('li#resource_' + resource).removeClass('active');
+
+    var elem = $('li#resource_' + resource + ' ul.endpoints');
+    elem.slideUp();
+  },
+
+  expandOperationsForResource: function(resource) {
+    // Make sure the resource container is open..
+    Docs.expandEndpointListForResource(resource);
+
+    if (resource == '') {
+      $('.resource ul.endpoints li.operation div.content').slideDown();
+      return;
+    }
+
+    $('li#resource_' + Docs.escapeResourceName(resource) + ' li.operation div.content').each(function() {
+      Docs.expandOperation($(this));
+    });
+  },
+
+  collapseOperationsForResource: function(resource) {
+    // Make sure the resource container is open..
+    Docs.expandEndpointListForResource(resource);
+
+    $('li#resource_' + Docs.escapeResourceName(resource) + ' li.operation div.content').each(function() {
+      Docs.collapseOperation($(this));
+    });
+  },
+
+  escapeResourceName: function(resource) {
+    return resource.replace(/[!"#$%&'()*+,.\/:;<=>?@\[\\\]\^`{|}~]/g, "\\$&");
+  },
+
+  expandOperation: function(elem) {
+    elem.slideDown();
+  },
+
+  collapseOperation: function(elem) {
+    elem.slideUp();
+  }
+
+};
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n  ";
+      stack1 = helpers.each.call(depth0, depth0.produces, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n    <option value=\"";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\">";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "</option>\n   ";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+
+      return "\n  <option value=\"application/json\">application/json</option>\n";
+    }
+
+    buffer += "<label for=\"contentType\"></label>\n<select name=\"contentType\">\n";
+    stack1 = helpers['if'].call(depth0, depth0.produces, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</select>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['main'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1, stack2;
+      buffer += "\n    <div class=\"info_title\">"
+        + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.title)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+        + "</div>\n    <div class=\"info_description\">";
+      stack2 = ((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.description)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1);
+      if(stack2 || stack2 === 0) { buffer += stack2; }
+      buffer += "</div>\n    ";
+      stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.termsOfServiceUrl), {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+      if(stack2 || stack2 === 0) { buffer += stack2; }
+      buffer += "\n    ";
+      stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.contact), {hash:{},inverse:self.noop,fn:self.program(4, program4, data),data:data});
+      if(stack2 || stack2 === 0) { buffer += stack2; }
+      buffer += "\n    ";
+      stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.license), {hash:{},inverse:self.noop,fn:self.program(6, program6, data),data:data});
+      if(stack2 || stack2 === 0) { buffer += stack2; }
+      buffer += "\n  ";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "<div class=\"info_tos\"><a href=\""
+        + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.termsOfServiceUrl)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+        + "\">Terms of service</a></div>";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "<div class='info_contact'><a href=\"mailto:"
+        + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.contact)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+        + "\">Contact the developer</a></div>";
+      return buffer;
+    }
+
+    function program6(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "<div class='info_license'><a href='"
+        + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.licenseUrl)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+        + "'>"
+        + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.license)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+        + "</a></div>";
+      return buffer;
+    }
+
+    function program8(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        , <span style=\"font-variant: small-caps\">api version</span>: ";
+      if (stack1 = helpers.apiVersion) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.apiVersion; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "\n        ";
+      return buffer;
+    }
+
+    buffer += "<div class='info' id='api_info'>\n  ";
+    stack1 = helpers['if'].call(depth0, depth0.info, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</div>\n<div class='container' id='resources_container'>\n    <ul id='resources'>\n    </ul>\n\n    <div class=\"footer\">\n        <br>\n        <br>\n        <h4 style=\"color: #999\">[ <span style=\"font-variant: small-caps\">base url</span>: ";
+    if (stack1 = helpers.basePath) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.basePath; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "\n        ";
+    stack1 = helpers['if'].call(depth0, depth0.apiVersion, {hash:{},inverse:self.noop,fn:self.program(8, program8, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "]</h4>\n    </div>\n</div>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['operation'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        <h4>Implementation Notes</h4>\n        <p>";
+      if (stack1 = helpers.notes) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.notes; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "</p>\n        ";
+      return buffer;
+    }
+
+    function program3(depth0,data) {
+
+
+      return "\n          <h4>Response Class</h4>\n          <p><span class=\"model-signature\" /></p>\n          <br/>\n          <div class=\"response-content-type\" />\n        ";
+    }
+
+    function program5(depth0,data) {
+
+
+      return "\n          <h4>Parameters</h4>\n          <table class='fullwidth'>\n          <thead>\n            <tr>\n            <th style=\"width: 100px; max-width: 100px\">Parameter</th>\n            <th style=\"width: 310px; max-width: 310px\">Value</th>\n            <th style=\"width: 200px; max-width: 200px\">Description</th>\n            <th style=\"width: 100px; max-width: 100px\">Parameter Type</th>\n            <th style=\"width: 220px; max-width: 230px\">Data Type</th>\n            </tr>\n          </thead>\n          <tbody class=\"operation-params\">\n\n          </tbody>\n          </table>\n          ";
+    }
+
+    function program7(depth0,data) {
+
+
+      return "\n          <div style='margin:0;padding:0;display:inline'></div>\n          <h4>Error Status Codes</h4>\n          <table class='fullwidth'>\n            <thead>\n            <tr>\n              <th>HTTP Status Code</th>\n              <th>Reason</th>\n            </tr>\n            </thead>\n            <tbody class=\"operation-status\">\n            \n            </tbody>\n          </table>\n          ";
+    }
+
+    function program9(depth0,data) {
+
+
+      return "\n          ";
+    }
+
+    function program11(depth0,data) {
+
+
+      return "\n          <div class='sandbox_header'>\n            <a href='#' class='response_hider' style='display:none'>Hide Response</a>\n            <img alt='Throbber' class='response_throbber' src='../images/throbber.gif' style='display:none' />\n          </div>\n          ";
+    }
+
+    buffer += "\n  <ul class='operations' >\n    <li class='";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + " operation' id='";
+    if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "'>\n      <div class='heading'>\n        <h3>\n          <span class='http_method'>\n          <a href='#!/";
+    if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "/";
+    if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "' class=\"toggleOperation\">";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</a>\n          </span>\n          <span class='path'>\n          <a href='#!/";
+    if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "/";
+    if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "' class=\"toggleOperation\">";
+    if (stack1 = helpers.path) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.path; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</a>\n          </span>\n        </h3>\n        <ul class='options'>\n          <li>\n          <a href='#!/";
+    if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "/";
+    if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "' class=\"toggleOperation\">";
+    if (stack1 = helpers.summary) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.summary; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</a>\n          </li>\n        </ul>\n      </div>\n      <div class='content' id='";
+    if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_";
+    if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_content' style='display:none'>\n        ";
+    stack1 = helpers['if'].call(depth0, depth0.notes, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n        ";
+    stack1 = helpers['if'].call(depth0, depth0.type, {hash:{},inverse:self.noop,fn:self.program(3, program3, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n        <form accept-charset='UTF-8' class='sandbox'>\n          <div style='margin:0;padding:0;display:inline'></div>\n          ";
+    stack1 = helpers['if'].call(depth0, depth0.parameters, {hash:{},inverse:self.noop,fn:self.program(5, program5, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n          ";
+    stack1 = helpers['if'].call(depth0, depth0.responseMessages, {hash:{},inverse:self.noop,fn:self.program(7, program7, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n          ";
+    stack1 = helpers['if'].call(depth0, depth0.isReadOnly, {hash:{},inverse:self.program(11, program11, data),fn:self.program(9, program9, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n        </form>\n        <div class='response' style='display:none'>\n          <h4>Request URL</h4>\n          <div class='block request_url'></div>\n          <h4>Response Body</h4>\n          <div class='block response_body'></div>\n          <h4>Response Code</h4>\n          <div class='block response_code'></div>\n          <h4>Response Headers</h4>\n          <div class='block response_headers'></div>\n        </div>\n      </div>\n    </li>\n  </ul>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['param'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(4, program4, data),fn:self.program(2, program2, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    <input type=\"file\" name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'/>\n                       <div class=\"parameter-content-type\" />\n              ";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(7, program7, data),fn:self.program(5, program5, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n            ";
+      return buffer;
+    }
+    function program5(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <textarea class='body-textarea' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "</textarea>\n                       ";
+      return buffer;
+    }
+
+    function program7(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <textarea class='body-textarea' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'></textarea>\n                             <br />\n                                <div class=\"parameter-content-type\" />\n                      ";
+      return buffer;
+    }
+
+    function program9(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(12, program12, data),fn:self.program(10, program10, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program10(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    <input class='parameter' minlength='0' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "' placeholder='' type='text' value='";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'/>\n               ";
+      return buffer;
+    }
+
+    function program12(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    <input class='parameter' minlength='0' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "' placeholder='' type='text' value=''/>\n           ";
+      return buffer;
+    }
+
+    buffer += "<td class='code'>";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>\n\n       ";
+    stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(9, program9, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n\n</td>\n<td>";
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td>";
+    if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td>\n   <span class=\"model-signature\"></span>\n</td>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['param_list'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, stack2, self=this, functionType="function", escapeExpression=this.escapeExpression;
+
+    function program1(depth0,data) {
+
+
+      return " multiple='multiple'";
+    }
+
+    function program3(depth0,data) {
+
+
+      return "\n    ";
+    }
+
+    function program5(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n      ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(8, program8, data),fn:self.program(6, program6, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program6(depth0,data) {
+
+
+      return "\n      ";
+    }
+
+    function program8(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        ";
+      stack1 = helpers['if'].call(depth0, depth0.allowMultiple, {hash:{},inverse:self.program(11, program11, data),fn:self.program(9, program9, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n      ";
+      return buffer;
+    }
+    function program9(depth0,data) {
+
+
+      return "\n         ";
+    }
+
+    function program11(depth0,data) {
+
+
+      return "\n          <option selected=\"\" value=''></option>\n         ";
+    }
+
+    function program13(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n      ";
+      stack1 = helpers['if'].call(depth0, depth0.isDefault, {hash:{},inverse:self.program(16, program16, data),fn:self.program(14, program14, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program14(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        <option selected=\"\" value='";
+      if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + " (default)</option>\n      ";
+      return buffer;
+    }
+
+    function program16(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        <option value='";
+      if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "</option>\n      ";
+      return buffer;
+    }
+
+    buffer += "<td class='code'>";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>\n  <select ";
+    stack1 = helpers['if'].call(depth0, depth0.allowMultiple, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += " class='parameter' name='";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "'>\n    ";
+    stack1 = helpers['if'].call(depth0, depth0.required, {hash:{},inverse:self.program(5, program5, data),fn:self.program(3, program3, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n    ";
+    stack2 = helpers.each.call(depth0, ((stack1 = depth0.allowableValues),stack1 == null || stack1 === false ? stack1 : stack1.descriptiveValues), {hash:{},inverse:self.noop,fn:self.program(13, program13, data),data:data});
+    if(stack2 || stack2 === 0) { buffer += stack2; }
+    buffer += "\n  </select>\n</td>\n<td>";
+    if (stack2 = helpers.description) { stack2 = stack2.call(depth0, {hash:{},data:data}); }
+    else { stack2 = depth0.description; stack2 = typeof stack2 === functionType ? stack2.apply(depth0) : stack2; }
+    if(stack2 || stack2 === 0) { buffer += stack2; }
+    buffer += "</td>\n<td>";
+    if (stack2 = helpers.paramType) { stack2 = stack2.call(depth0, {hash:{},data:data}); }
+    else { stack2 = depth0.paramType; stack2 = typeof stack2 === functionType ? stack2.apply(depth0) : stack2; }
+    if(stack2 || stack2 === 0) { buffer += stack2; }
+    buffer += "</td>\n<td><span class=\"model-signature\"></span></td>";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['param_readonly'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        <textarea class='body-textarea' readonly='readonly' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "</textarea>\n    ";
+      return buffer;
+    }
+
+    function program3(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(6, program6, data),fn:self.program(4, program4, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program4(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "\n        ";
+      return buffer;
+    }
+
+    function program6(depth0,data) {
+
+
+      return "\n            (empty)\n        ";
+    }
+
+    buffer += "<td class='code'>";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>\n    ";
+    stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(3, program3, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</td>\n<td>";
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td>";
+    if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['param_readonly_required'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        <textarea class='body-textarea'  readonly='readonly' placeholder='(required)' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "</textarea>\n    ";
+      return buffer;
+    }
+
+    function program3(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n        ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(6, program6, data),fn:self.program(4, program4, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program4(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "\n        ";
+      return buffer;
+    }
+
+    function program6(depth0,data) {
+
+
+      return "\n            (empty)\n        ";
+    }
+
+    buffer += "<td class='code required'>";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>\n    ";
+    stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(3, program3, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</td>\n<td>";
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td>";
+    if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['param_required'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(4, program4, data),fn:self.program(2, program2, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    <input type=\"file\" name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'/>\n               ";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(7, program7, data),fn:self.program(5, program5, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n            ";
+      return buffer;
+    }
+    function program5(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <textarea class='body-textarea' placeholder='(required)' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'>";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "</textarea>\n                       ";
+      return buffer;
+    }
+
+    function program7(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <textarea class='body-textarea' placeholder='(required)' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'></textarea>\n                             <br />\n                                <div class=\"parameter-content-type\" />\n                      ";
+      return buffer;
+    }
+
+    function program9(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n            ";
+      stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(12, program12, data),fn:self.program(10, program10, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n    ";
+      return buffer;
+    }
+    function program10(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    <input class='parameter' class='required' type='file' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'/>\n               ";
+      return buffer;
+    }
+
+    function program12(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                    ";
+      stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(15, program15, data),fn:self.program(13, program13, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n            ";
+      return buffer;
+    }
+    function program13(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <input class='parameter required' minlength='1' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "' placeholder='(required)' type='text' value='";
+      if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "'/>\n                       ";
+      return buffer;
+    }
+
+    function program15(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n                            <input class='parameter required' minlength='1' name='";
+      if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+      else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+      buffer += escapeExpression(stack1)
+        + "' placeholder='(required)' type='text' value=''/>\n                 ";
+      return buffer;
+    }
+
+    buffer += "<td class='code required'>";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>\n ";
+    stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(9, program9, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</td>\n<td>\n <strong>";
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</strong>\n</td>\n<td>";
+    if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['parameter_content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n  ";
+      stack1 = helpers.each.call(depth0, depth0.consumes, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n  <option value=\"";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\">";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "</option>\n  ";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+
+      return "\n  <option value=\"application/json\">application/json</option>\n";
+    }
+
+    stack1 = helpers['if'].call(depth0, depth0.consumes, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</select>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['resource'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, options, functionType="function", escapeExpression=this.escapeExpression, self=this, blockHelperMissing=helpers.blockHelperMissing;
+
+    function program1(depth0,data) {
+
+
+      return " : ";
+    }
+
+    buffer += "<div class='heading'>\n  <h2>\n    <a href='#!/";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "' onclick=\"Docs.toggleEndpointListForResource('";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "');\">";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</a> ";
+    options = {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data};
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, options); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if (!helpers.description) { stack1 = blockHelperMissing.call(depth0, stack1, options); }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n  </h2>\n  <ul class='options'>\n    <li>\n      <a href='#!/";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "' id='endpointListTogger_";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "'\n         onclick=\"Docs.toggleEndpointListForResource('";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "');\">Show/Hide</a>\n    </li>\n    <li>\n      <a href='#' onclick=\"Docs.collapseOperationsForResource('";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "'); return false;\">\n        List Operations\n      </a>\n    </li>\n    <li>\n      <a href='#' onclick=\"Docs.expandOperationsForResource('";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "'); return false;\">\n        Expand Operations\n      </a>\n    </li>\n    <li>\n      <a href='";
+    stack1 = "resources/" + depth0.path + ".json";
+    buffer += escapeExpression(stack1)
+      + "'>Raw</a>\n    </li>\n  </ul>\n</div>\n<ul class='endpoints' id='";
+    if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "_endpoint_list' style='display:none'>\n\n</ul>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['response_content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", self=this;
+
+    function program1(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n  ";
+      stack1 = helpers.each.call(depth0, depth0.produces, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\n";
+      return buffer;
+    }
+    function program2(depth0,data) {
+
+      var buffer = "", stack1;
+      buffer += "\n  <option value=\"";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "\">";
+      stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+      if(stack1 || stack1 === 0) { buffer += stack1; }
+      buffer += "</option>\n  ";
+      return buffer;
+    }
+
+    function program4(depth0,data) {
+
+
+      return "\n  <option value=\"application/json\">application/json</option>\n";
+    }
+
+    stack1 = helpers['if'].call(depth0, depth0.produces, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n</select>\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['signature'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression;
+
+
+    buffer += "<div>\n<ul class=\"signature-nav\">\n    <li><a class=\"description-link\" href=\"#\">Model</a></li>\n    <li><a class=\"snippet-link\" href=\"#\">Model Schema</a></li>\n</ul>\n<div>\n\n<div class=\"signature-container\">\n    <div class=\"description\">\n        ";
+    if (stack1 = helpers.signature) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.signature; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "\n    </div>\n\n    <div class=\"snippet\">\n        <pre><code>";
+    if (stack1 = helpers.sampleJSON) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.sampleJSON; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</code></pre>\n        <small class=\"notice\"></small>\n    </div>\n</div>\n\n";
+    return buffer;
+  });
+})();
+
+(function() {
+  var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+  templates['status_code'] = template(function (Handlebars,depth0,helpers,partials,data) {
+    this.compilerInfo = [4,'>= 1.0.0'];
+    helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+    var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression;
+
+
+    buffer += "<td width='15%' class='code'>";
+    if (stack1 = helpers.code) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.code; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    buffer += escapeExpression(stack1)
+      + "</td>\n<td>";
+    if (stack1 = helpers.message) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+    else { stack1 = depth0.message; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+    if(stack1 || stack1 === 0) { buffer += stack1; }
+    buffer += "</td>\n";
+    return buffer;
+  });
+})();
+
+
+
+// Generated by CoffeeScript 1.6.3
+(function() {
+  var ContentTypeView, HeaderView, MainView, OperationView, ParameterContentTypeView, ParameterView, ResourceView, ResponseContentTypeView, SignatureView, StatusCodeView, SwaggerUi, _ref, _ref1, _ref10, _ref2, _ref3, _ref4, _ref5, _ref6, _ref7, _ref8, _ref9,
+    __hasProp = {}.hasOwnProperty,
+    __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
+
+  SwaggerUi = (function(_super) {
+    __extends(SwaggerUi, _super);
+
+    function SwaggerUi() {
+      _ref = SwaggerUi.__super__.constructor.apply(this, arguments);
+      return _ref;
+    }
+
+    SwaggerUi.prototype.dom_id = "swagger_ui";
+
+    SwaggerUi.prototype.options = null;
+
+    SwaggerUi.prototype.api = null;
+
+    SwaggerUi.prototype.headerView = null;
+
+    SwaggerUi.prototype.mainView = null;
+
+    SwaggerUi.prototype.initialize = function(options) {
+      var _this = this;
+      if (options == null) {
+        options = {};
+      }
+      if (options.dom_id != null) {
+        this.dom_id = options.dom_id;
+        delete options.dom_id;
+      }
+      if ($('#' + this.dom_id) == null) {
+        $('body').append('<div id="' + this.dom_id + '"></div>');
+      }
+      this.options = options;
+      this.options.success = function() {
+        return _this.render();
+      };
+      this.options.progress = function(d) {
+        return _this.showMessage(d);
+      };
+      this.options.failure = function(d) {
+        return _this.onLoadFailure(d);
+      };
+      this.headerView = new HeaderView({
+        el: $('#header')
+      });
+      return this.headerView.on('update-swagger-ui', function(data) {
+        return _this.updateSwaggerUi(data);
+      });
+    };
+
+    SwaggerUi.prototype.updateSwaggerUi = function(data) {
+      this.options.url = data.url;
+      return this.load();
+    };
+
+    SwaggerUi.prototype.load = function() {
+      var url, _ref1;
+      if ((_ref1 = this.mainView) != null) {
+        _ref1.clear();
+      }
+      url = this.options.url;
+      if (url.indexOf("http") !== 0) {
+        url = this.buildUrl(window.location.href.toString(), url);
+      }
+      this.options.url = url;
+      this.headerView.update(url);
+      this.api = new SwaggerApi(this.options);
+      this.api.build();
+      return this.api;
+    };
+
+    SwaggerUi.prototype.render = function() {
+      var _this = this;
+      this.showMessage('Finished Loading Resource Information. Rendering Swagger UI...');
+      this.mainView = new MainView({
+        model: this.api,
+        el: $('#' + this.dom_id)
+      }).render();
+      this.showMessage();
+      switch (this.options.docExpansion) {
+        case "full":
+          Docs.expandOperationsForResource('');
+          break;
+        case "list":
+          Docs.collapseOperationsForResource('');
+      }
+      if (this.options.onComplete) {
+        this.options.onComplete(this.api, this);
+      }
+      return setTimeout(function() {
+        return Docs.shebang();
+      }, 400);
+    };
+
+    SwaggerUi.prototype.buildUrl = function(base, url) {
+      var parts;
+      console.log("base is " + base);
+      parts = base.split("/");
+      base = parts[0] + "//" + parts[2];
+      if (url.indexOf("/") === 0) {
+        return base + url;
+      } else {
+        return base + "/" + url;
+      }
+    };
+
+    SwaggerUi.prototype.showMessage = function(data) {
+      if (data == null) {
+        data = '';
+      }
+      $('#message-bar').removeClass('message-fail');
+      $('#message-bar').addClass('message-success');
+      return $('#message-bar').html(data);
+    };
+
+    SwaggerUi.prototype.onLoadFailure = function(data) {
+      var val;
+      if (data == null) {
+        data = '';
+      }
+      $('#message-bar').removeClass('message-success');
+      $('#message-bar').addClass('message-fail');
+      val = $('#message-bar').html(data);
+      if (this.options.onFailure != null) {
+        this.options.onFailure(data);
+      }
+      return val;
+    };
+
+    return SwaggerUi;
+
+  })(Backbone.Router);
+
+  window.SwaggerUi = SwaggerUi;
+
+  HeaderView = (function(_super) {
+    __extends(HeaderView, _super);
+
+    function HeaderView() {
+      _ref1 = HeaderView.__super__.constructor.apply(this, arguments);
+      return _ref1;
+    }
+
+    HeaderView.prototype.events = {
+      'click #show-pet-store-icon': 'showPetStore',
+      'click #show-wordnik-dev-icon': 'showWordnikDev',
+      'click #explore': 'showCustom',
+      'keyup #input_baseUrl': 'showCustomOnKeyup',
+      'keyup #input_apiKey': 'showCustomOnKeyup'
+    };
+
+    HeaderView.prototype.initialize = function() {};
+
+    HeaderView.prototype.showPetStore = function(e) {
+      return this.trigger('update-swagger-ui', {
+        url: "http://petstore.swagger.wordnik.com/api/api-docs"
+      });
+    };
+
+    HeaderView.prototype.showWordnikDev = function(e) {
+      return this.trigger('update-swagger-ui', {
+        url: "http://api.wordnik.com/v4/resources.json"
+      });
+    };
+
+    HeaderView.prototype.showCustomOnKeyup = function(e) {
+      if (e.keyCode === 13) {
+        return this.showCustom();
+      }
+    };
+
+    HeaderView.prototype.showCustom = function(e) {
+      if (e != null) {
+        e.preventDefault();
+      }
+      return this.trigger('update-swagger-ui', {
+        url: $('#input_baseUrl').val(),
+        apiKey: $('#input_apiKey').val()
+      });
+    };
+
+    HeaderView.prototype.update = function(url, apiKey, trigger) {
+      if (trigger == null) {
+        trigger = false;
+      }
+      $('#input_baseUrl').val(url);
+      if (trigger) {
+        return this.trigger('update-swagger-ui', {
+          url: url
+        });
+      }
+    };
+
+    return HeaderView;
+
+  })(Backbone.View);
+
+  MainView = (function(_super) {
+    __extends(MainView, _super);
+
+    function MainView() {
+      _ref2 = MainView.__super__.constructor.apply(this, arguments);
+      return _ref2;
+    }
+
+    MainView.prototype.initialize = function() {};
+
+    MainView.prototype.render = function() {
+      var resource, _i, _len, _ref3;
+      $(this.el).html(Handlebars.templates.main(this.model));
+      _ref3 = this.model.apisArray;
+      for (_i = 0, _len = _ref3.length; _i < _len; _i++) {
+        resource = _ref3[_i];
+        this.addResource(resource);
+      }
+      return this;
+    };
+
+    MainView.prototype.addResource = function(resource) {
+      var resourceView;
+      resourceView = new ResourceView({
+        model: resource,
+        tagName: 'li',
+        id: 'resource_' + resource.name,
+        className: 'resource'
+      });
+      return $('#resources').append(resourceView.render().el);
+    };
+
+    MainView.prototype.clear = function() {
+      return $(this.el).html('');
+    };
+
+    return MainView;
+
+  })(Backbone.View);
+
+  ResourceView = (function(_super) {
+    __extends(ResourceView, _super);
+
+    function ResourceView() {
+      _ref3 = ResourceView.__super__.constructor.apply(this, arguments);
+      return _ref3;
+    }
+
+    ResourceView.prototype.initialize = function() {};
+
+    ResourceView.prototype.render = function() {
+      var operation, _i, _len, _ref4;
+      console.log(this.model.description);
+      $(this.el).html(Handlebars.templates.resource(this.model));
+      this.number = 0;
+      _ref4 = this.model.operationsArray;
+      for (_i = 0, _len = _ref4.length; _i < _len; _i++) {
+        operation = _ref4[_i];
+        this.addOperation(operation);
+      }
+      return this;
+    };
+
+    ResourceView.prototype.addOperation = function(operation) {
+      var operationView;
+      operation.number = this.number;
+      operationView = new OperationView({
+        model: operation,
+        tagName: 'li',
+        className: 'endpoint'
+      });
+      $('.endpoints', $(this.el)).append(operationView.render().el);
+      return this.number++;
+    };
+
+    return ResourceView;
+
+  })(Backbone.View);
+
+  OperationView = (function(_super) {
+    __extends(OperationView, _super);
+
+    function OperationView() {
+      _ref4 = OperationView.__super__.constructor.apply(this, arguments);
+      return _ref4;
+    }
+
+    OperationView.prototype.invocationUrl = null;
+
+    OperationView.prototype.events = {
+      'submit .sandbox': 'submitOperation',
+      'click .submit': 'submitOperation',
+      'click .response_hider': 'hideResponse',
+      'click .toggleOperation': 'toggleOperationContent'
+    };
+
+    OperationView.prototype.initialize = function() {};
+
+    OperationView.prototype.render = function() {
+      var contentTypeModel, isMethodSubmissionSupported, param, responseContentTypeView, responseSignatureView, signatureModel, statusCode, type, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7;
+      isMethodSubmissionSupported = true;
+      if (!isMethodSubmissionSupported) {
+        this.model.isReadOnly = true;
+      }
+      $(this.el).html(Handlebars.templates.operation(this.model));
+      if (this.model.responseClassSignature && this.model.responseClassSignature !== 'string') {
+        signatureModel = {
+          sampleJSON: this.model.responseSampleJSON,
+          isParam: false,
+          signature: this.model.responseClassSignature
+        };
+        responseSignatureView = new SignatureView({
+          model: signatureModel,
+          tagName: 'div'
+        });
+        $('.model-signature', $(this.el)).append(responseSignatureView.render().el);
+      } else {
+        $('.model-signature', $(this.el)).html(this.model.type);
+      }
+      contentTypeModel = {
+        isParam: false
+      };
+      contentTypeModel.consumes = this.model.consumes;
+      contentTypeModel.produces = this.model.produces;
+      _ref5 = this.model.parameters;
+      for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+        param = _ref5[_i];
+        type = param.type || param.dataType;
+        if (type.toLowerCase() === 'file') {
+          if (!contentTypeModel.consumes) {
+            console.log("set content type ");
+            contentTypeModel.consumes = 'multipart/form-data';
+          }
+        }
+      }
+      responseContentTypeView = new ResponseContentTypeView({
+        model: contentTypeModel
+      });
+      $('.response-content-type', $(this.el)).append(responseContentTypeView.render().el);
+      _ref6 = this.model.parameters;
+      for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+        param = _ref6[_j];
+        this.addParameter(param, contentTypeModel.consumes);
+      }
+      _ref7 = this.model.responseMessages;
+      for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+        statusCode = _ref7[_k];
+        this.addStatusCode(statusCode);
+      }
+      return this;
+    };
+
+    OperationView.prototype.addParameter = function(param, consumes) {
+      var paramView;
+      param.consumes = consumes;
+      paramView = new ParameterView({
+        model: param,
+        tagName: 'tr',
+        readOnly: this.model.isReadOnly
+      });
+      return $('.operation-params', $(this.el)).append(paramView.render().el);
+    };
+
+    OperationView.prototype.addStatusCode = function(statusCode) {
+      var statusCodeView;
+      statusCodeView = new StatusCodeView({
+        model: statusCode,
+        tagName: 'tr'
+      });
+      return $('.operation-status', $(this.el)).append(statusCodeView.render().el);
+    };
+
+    OperationView.prototype.submitOperation = function(e) {
+      var error_free, form, isFileUpload, map, o, opts, val, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7;
+      if (e != null) {
+        e.preventDefault();
+      }
+      form = $('.sandbox', $(this.el));
+      error_free = true;
+      form.find("input.required").each(function() {
+        var _this = this;
+        $(this).removeClass("error");
+        if (jQuery.trim($(this).val()) === "") {
+          $(this).addClass("error");
+          $(this).wiggle({
+            callback: function() {
+              return $(_this).focus();
+            }
+          });
+          return error_free = false;
+        }
+      });
+      if (error_free) {
+        map = {};
+        opts = {
+          parent: this
+        };
+        isFileUpload = false;
+        _ref5 = form.find("input");
+        for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+          o = _ref5[_i];
+          if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+            map[o.name] = o.value;
+          }
+          if (o.type === "file") {
+            isFileUpload = true;
+          }
+        }
+        _ref6 = form.find("textarea");
+        for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+          o = _ref6[_j];
+          if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+            map["body"] = o.value;
+          }
+        }
+        _ref7 = form.find("select");
+        for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+          o = _ref7[_k];
+          val = this.getSelectedValue(o);
+          if ((val != null) && jQuery.trim(val).length > 0) {
+            map[o.name] = val;
+          }
+        }
+        opts.responseContentType = $("div select[name=responseContentType]", $(this.el)).val();
+        opts.requestContentType = $("div select[name=parameterContentType]", $(this.el)).val();
+        $(".response_throbber", $(this.el)).show();
+        if (isFileUpload) {
+          return this.handleFileUpload(map, form);
+        } else {
+          return this.model["do"](map, opts, this.showCompleteStatus, this.showErrorStatus, this);
+        }
+      }
+    };
+
+    OperationView.prototype.success = function(response, parent) {
+      return parent.showCompleteStatus(response);
+    };
+
+    OperationView.prototype.handleFileUpload = function(map, form) {
+      var bodyParam, headerParams, o, obj, param, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7,
+        _this = this;
+      console.log("it's a file upload");
+      _ref5 = form.serializeArray();
+      for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+        o = _ref5[_i];
+        if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+          map[o.name] = o.value;
+        }
+      }
+      bodyParam = new FormData();
+      _ref6 = this.model.parameters;
+      for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+        param = _ref6[_j];
+        if (param.paramType === 'form') {
+          bodyParam.append(param.name, map[param.name]);
+        }
+      }
+      headerParams = {};
+      _ref7 = this.model.parameters;
+      for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+        param = _ref7[_k];
+        if (param.paramType === 'header') {
+          headerParams[param.name] = map[param.name];
+        }
+      }
+      console.log(headerParams);
+      $.each($('input[type~="file"]'), function(i, el) {
+        return bodyParam.append($(el).attr('name'), el.files[0]);
+      });
+      console.log(bodyParam);
+      this.invocationUrl = this.model.supportHeaderParams() ? (headerParams = this.model.getHeaderParams(map), this.model.urlify(map, false)) : this.model.urlify(map, true);
+      $(".request_url", $(this.el)).html("<pre>" + this.invocationUrl + "</pre>");
+      obj = {
+        type: this.model.method,
+        url: this.invocationUrl,
+        headers: headerParams,
+        data: bodyParam,
+        dataType: 'json',
+        contentType: false,
+        processData: false,
+        error: function(data, textStatus, error) {
+          return _this.showErrorStatus(_this.wrap(data), _this);
+        },
+        success: function(data) {
+          return _this.showResponse(data, _this);
+        },
+        complete: function(data) {
+          return _this.showCompleteStatus(_this.wrap(data), _this);
+        }
+      };
+      if (window.authorizations) {
+        window.authorizations.apply(obj);
+      }
+      jQuery.ajax(obj);
+      return false;
+    };
+
+    OperationView.prototype.wrap = function(data) {
+      var o,
+        _this = this;
+      o = {};
+      o.content = {};
+      o.content.data = data.responseText;
+      o.getHeaders = function() {
+        return {
+          "Content-Type": data.getResponseHeader("Content-Type")
+        };
+      };
+      o.request = {};
+      o.request.url = this.invocationUrl;
+      o.status = data.status;
+      return o;
+    };
+
+    OperationView.prototype.getSelectedValue = function(select) {
+      var opt, options, _i, _len, _ref5;
+      if (!select.multiple) {
+        return select.value;
+      } else {
+        options = [];
+        _ref5 = select.options;
+        for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+          opt = _ref5[_i];
+          if (opt.selected) {
+            options.push(opt.value);
+          }
+        }
+        if (options.length > 0) {
+          return options.join(",");
+        } else {
+          return null;
+        }
+      }
+    };
+
+    OperationView.prototype.hideResponse = function(e) {
+      if (e != null) {
+        e.preventDefault();
+      }
+      $(".response", $(this.el)).slideUp();
+      return $(".response_hider", $(this.el)).fadeOut();
+    };
+
+    OperationView.prototype.showResponse = function(response) {
+      var prettyJson;
+      prettyJson = JSON.stringify(response, null, "\t").replace(/\n/g, "<br>");
+      return $(".response_body", $(this.el)).html(escape(prettyJson));
+    };
+
+    OperationView.prototype.showErrorStatus = function(data, parent) {
+      return parent.showStatus(data);
+    };
+
+    OperationView.prototype.showCompleteStatus = function(data, parent) {
+      return parent.showStatus(data);
+    };
+
+    OperationView.prototype.formatXml = function(xml) {
+      var contexp, formatted, indent, lastType, lines, ln, pad, reg, transitions, wsexp, _fn, _i, _len;
+      reg = /(>)(<)(\/*)/g;
+      wsexp = /[ ]*(.*)[ ]+\n/g;
+      contexp = /(<.+>)(.+\n)/g;
+      xml = xml.replace(reg, '$1\n$2$3').replace(wsexp, '$1\n').replace(contexp, '$1\n$2');
+      pad = 0;
+      formatted = '';
+      lines = xml.split('\n');
+      indent = 0;
+      lastType = 'other';
+      transitions = {
+        'single->single': 0,
+        'single->closing': -1,
+        'single->opening': 0,
+        'single->other': 0,
+        'closing->single': 0,
+        'closing->closing': -1,
+        'closing->opening': 0,
+        'closing->other': 0,
+        'opening->single': 1,
+        'opening->closing': 0,
+        'opening->opening': 1,
+        'opening->other': 1,
+        'other->single': 0,
+        'other->closing': -1,
+        'other->opening': 0,
+        'other->other': 0
+      };
+      _fn = function(ln) {
+        var fromTo, j, key, padding, type, types, value;
+        types = {
+          single: Boolean(ln.match(/<.+\/>/)),
+          closing: Boolean(ln.match(/<\/.+>/)),
+          opening: Boolean(ln.match(/<[^!?].*>/))
+        };
+        type = ((function() {
+          var _results;
+          _results = [];
+          for (key in types) {
+            value = types[key];
+            if (value) {
+              _results.push(key);
+            }
+          }
+          return _results;
+        })())[0];
+        type = type === void 0 ? 'other' : type;
+        fromTo = lastType + '->' + type;
+        lastType = type;
+        padding = '';
+        indent += transitions[fromTo];
+        padding = ((function() {
+          var _j, _ref5, _results;
+          _results = [];
+          for (j = _j = 0, _ref5 = indent; 0 <= _ref5 ? _j < _ref5 : _j > _ref5; j = 0 <= _ref5 ? ++_j : --_j) {
+            _results.push('  ');
+          }
+          return _results;
+        })()).join('');
+        if (fromTo === 'opening->closing') {
+          return formatted = formatted.substr(0, formatted.length - 1) + ln + '\n';
+        } else {
+          return formatted += padding + ln + '\n';
+        }
+      };
+      for (_i = 0, _len = lines.length; _i < _len; _i++) {
+        ln = lines[_i];
+        _fn(ln);
+      }
+      return formatted;
+    };
+
+    OperationView.prototype.showStatus = function(data) {
+      var code, content, contentType, headers, pre, response_body;
+      content = data.content.data;
+      headers = data.getHeaders();
+      contentType = headers["Content-Type"];
+      if (content === void 0) {
+        code = $('<code />').text("no content");
+        pre = $('<pre class="json" />').append(code);
+      } else if (contentType.indexOf("application/json") === 0 || contentType.indexOf("application/hal+json") === 0) {
+        code = $('<code />').text(JSON.stringify(JSON.parse(content), null, 2));
+        pre = $('<pre class="json" />').append(code);
+      } else if (contentType.indexOf("application/xml") === 0) {
+        code = $('<code />').text(this.formatXml(content));
+        pre = $('<pre class="xml" />').append(code);
+      } else if (contentType.indexOf("text/html") === 0) {
+        code = $('<code />').html(content);
+        pre = $('<pre class="xml" />').append(code);
+      } else if (contentType.indexOf("image/") === 0) {
+        pre = $('<img>').attr('src', data.request.url);
+      } else {
+        code = $('<code />').text(content);
+        pre = $('<pre class="json" />').append(code);
+      }
+      response_body = pre;
+      $(".request_url", $(this.el)).html("<pre>" + data.request.url + "</pre>");
+      $(".response_code", $(this.el)).html("<pre>" + data.status + "</pre>");
+      $(".response_body", $(this.el)).html(response_body);
+      $(".response_headers", $(this.el)).html("<pre>" + JSON.stringify(data.getHeaders(), null, "  ").replace(/\n/g, "<br>") + "</pre>");
+      $(".response", $(this.el)).slideDown();
+      $(".response_hider", $(this.el)).show();
+      $(".response_throbber", $(this.el)).hide();
+      return hljs.highlightBlock($('.response_body', $(this.el))[0]);
+    };
+
+    OperationView.prototype.toggleOperationContent = function() {
+      var elem;
+      elem = $('#' + Docs.escapeResourceName(this.model.resourceName) + "_" + this.model.nickname + "_" + this.model.method + "_" + this.model.number + "_content");
+      if (elem.is(':visible')) {
+        return Docs.collapseOperation(elem);
+      } else {
+        return Docs.expandOperation(elem);
+      }
+    };
+
+    return OperationView;
+
+  })(Backbone.View);
+
+  StatusCodeView = (function(_super) {
+    __extends(StatusCodeView, _super);
+
+    function StatusCodeView() {
+      _ref5 = StatusCodeView.__super__.constructor.apply(this, arguments);
+      return _ref5;
+    }
+
+    StatusCodeView.prototype.initialize = function() {};
+
+    StatusCodeView.prototype.render = function() {
+      var template;
+      template = this.template();
+      $(this.el).html(template(this.model));
+      return this;
+    };
+
+    StatusCodeView.prototype.template = function() {
+      return Handlebars.templates.status_code;
+    };
+
+    return StatusCodeView;
+
+  })(Backbone.View);
+
+  ParameterView = (function(_super) {
+    __extends(ParameterView, _super);
+
+    function ParameterView() {
+      _ref6 = ParameterView.__super__.constructor.apply(this, arguments);
+      return _ref6;
+    }
+
+    ParameterView.prototype.initialize = function() {};
+
+    ParameterView.prototype.render = function() {
+      var contentTypeModel, isParam, parameterContentTypeView, responseContentTypeView, signatureModel, signatureView, template, type;
+      type = this.model.type || this.model.dataType;
+      if (this.model.paramType === 'body') {
+        this.model.isBody = true;
+      }
+      if (type.toLowerCase() === 'file') {
+        this.model.isFile = true;
+      }
+      template = this.template();
+      $(this.el).html(template(this.model));
+      signatureModel = {
+        sampleJSON: this.model.sampleJSON,
+        isParam: true,
+        signature: this.model.signature
+      };
+      if (this.model.sampleJSON) {
+        signatureView = new SignatureView({
+          model: signatureModel,
+          tagName: 'div'
+        });
+        $('.model-signature', $(this.el)).append(signatureView.render().el);
+      } else {
+        $('.model-signature', $(this.el)).html(this.model.signature);
+      }
+      isParam = false;
+      if (this.model.isBody) {
+        isParam = true;
+      }
+      contentTypeModel = {
+        isParam: isParam
+      };
+      contentTypeModel.consumes = this.model.consumes;
+      if (isParam) {
+        parameterContentTypeView = new ParameterContentTypeView({
+          model: contentTypeModel
+        });
+        $('.parameter-content-type', $(this.el)).append(parameterContentTypeView.render().el);
+      } else {
+        responseContentTypeView = new ResponseContentTypeView({
+          model: contentTypeModel
+        });
+        $('.response-content-type', $(this.el)).append(responseContentTypeView.render().el);
+      }
+      return this;
+    };
+
+    ParameterView.prototype.template = function() {
+      if (this.model.isList) {
+        return Handlebars.templates.param_list;
+      } else {
+        if (this.options.readOnly) {
+          if (this.model.required) {
+            return Handlebars.templates.param_readonly_required;
+          } else {
+            return Handlebars.templates.param_readonly;
+          }
+        } else {
+          if (this.model.required) {
+            return Handlebars.templates.param_required;
+          } else {
+            return Handlebars.templates.param;
+          }
+        }
+      }
+    };
+
+    return ParameterView;
+
+  })(Backbone.View);
+
+  SignatureView = (function(_super) {
+    __extends(SignatureView, _super);
+
+    function SignatureView() {
+      _ref7 = SignatureView.__super__.constructor.apply(this, arguments);
+      return _ref7;
+    }
+
+    SignatureView.prototype.events = {
+      'click a.description-link': 'switchToDescription',
+      'click a.snippet-link': 'switchToSnippet',
+      'mousedown .snippet': 'snippetToTextArea'
+    };
+
+    SignatureView.prototype.initialize = function() {};
+
+    SignatureView.prototype.render = function() {
+      var template;
+      template = this.template();
+      $(this.el).html(template(this.model));
+      this.switchToDescription();
+      this.isParam = this.model.isParam;
+      if (this.isParam) {
+        $('.notice', $(this.el)).text('Click to set as parameter value');
+      }
+      return this;
+    };
+
+    SignatureView.prototype.template = function() {
+      return Handlebars.templates.signature;
+    };
+
+    SignatureView.prototype.switchToDescription = function(e) {
+      if (e != null) {
+        e.preventDefault();
+      }
+      $(".snippet", $(this.el)).hide();
+      $(".description", $(this.el)).show();
+      $('.description-link', $(this.el)).addClass('selected');
+      return $('.snippet-link', $(this.el)).removeClass('selected');
+    };
+
+    SignatureView.prototype.switchToSnippet = function(e) {
+      if (e != null) {
+        e.preventDefault();
+      }
+      $(".description", $(this.el)).hide();
+      $(".snippet", $(this.el)).show();
+      $('.snippet-link', $(this.el)).addClass('selected');
+      return $('.description-link', $(this.el)).removeClass('selected');
+    };
+
+    SignatureView.prototype.snippetToTextArea = function(e) {
+      var textArea;
+      if (this.isParam) {
+        if (e != null) {
+          e.preventDefault();
+        }
+        textArea = $('textarea', $(this.el.parentNode.parentNode.parentNode));
+        if ($.trim(textArea.val()) === '') {
+          return textArea.val(this.model.sampleJSON);
+        }
+      }
+    };
+
+    return SignatureView;
+
+  })(Backbone.View);
+
+  ContentTypeView = (function(_super) {
+    __extends(ContentTypeView, _super);
+
+    function ContentTypeView() {
+      _ref8 = ContentTypeView.__super__.constructor.apply(this, arguments);
+      return _ref8;
+    }
+
+    ContentTypeView.prototype.initialize = function() {};
+
+    ContentTypeView.prototype.render = function() {
+      var template;
+      template = this.template();
+      $(this.el).html(template(this.model));
+      $('label[for=contentType]', $(this.el)).text('Response Content Type');
+      return this;
+    };
+
+    ContentTypeView.prototype.template = function() {
+      return Handlebars.templates.content_type;
+    };
+
+    return ContentTypeView;
+
+  })(Backbone.View);
+
+  ResponseContentTypeView = (function(_super) {
+    __extends(ResponseContentTypeView, _super);
+
+    function ResponseContentTypeView() {
+      _ref9 = ResponseContentTypeView.__super__.constructor.apply(this, arguments);
+      return _ref9;
+    }
+
+    ResponseContentTypeView.prototype.initialize = function() {};
+
+    ResponseContentTypeView.prototype.render = function() {
+      var template;
+      template = this.template();
+      $(this.el).html(template(this.model));
+      $('label[for=responseContentType]', $(this.el)).text('Response Content Type');
+      return this;
+    };
+
+    ResponseContentTypeView.prototype.template = function() {
+      return Handlebars.templates.response_content_type;
+    };
+
+    return ResponseContentTypeView;
+
+  })(Backbone.View);
+
+  ParameterContentTypeView = (function(_super) {
+    __extends(ParameterContentTypeView, _super);
+
+    function ParameterContentTypeView() {
+      _ref10 = ParameterContentTypeView.__super__.constructor.apply(this, arguments);
+      return _ref10;
+    }
+
+    ParameterContentTypeView.prototype.initialize = function() {};
+
+    ParameterContentTypeView.prototype.render = function() {
+      var template;
+      template = this.template();
+      $(this.el).html(template(this.model));
+      $('label[for=parameterContentType]', $(this.el)).text('Parameter content type:');
+      return this;
+    };
+
+    ParameterContentTypeView.prototype.template = function() {
+      return Handlebars.templates.parameter_content_type;
+    };
+
+    return ParameterContentTypeView;
+
+  })(Backbone.View);
+
+}).call(this);
diff --git a/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger.js b/opendaylight/md-sal/sal-rest-docgen/src/main/resources/explorer/static/swagger.js
new file mode 100644 (file)
index 0000000..a037790
--- /dev/null
@@ -0,0 +1,1199 @@
+// Generated by CoffeeScript 1.6.3
+(function() {
+  var ApiKeyAuthorization, PasswordAuthorization, SwaggerApi, SwaggerAuthorizations, SwaggerHttp, SwaggerModel, SwaggerModelProperty, SwaggerOperation, SwaggerRequest, SwaggerResource,
+    __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
+
+  SwaggerApi = (function() {
+    SwaggerApi.prototype.url = "http://api.wordnik.com/v4/resources.json";
+
+    SwaggerApi.prototype.debug = false;
+
+    SwaggerApi.prototype.basePath = null;
+
+    SwaggerApi.prototype.authorizations = null;
+
+    SwaggerApi.prototype.authorizationScheme = null;
+
+    SwaggerApi.prototype.info = null;
+
+    function SwaggerApi(url, options) {
+      if (options == null) {
+        options = {};
+      }
+      if (url) {
+        if (url.url) {
+          options = url;
+        } else {
+          this.url = url;
+        }
+      } else {
+        options = url;
+      }
+      if (options.url != null) {
+        this.url = options.url;
+      }
+      if (options.success != null) {
+        this.success = options.success;
+      }
+      this.failure = options.failure != null ? options.failure : function() {};
+      this.progress = options.progress != null ? options.progress : function() {};
+      if (options.success != null) {
+        this.build();
+      }
+    }
+
+    SwaggerApi.prototype.build = function() {
+      var e, obj,
+        _this = this;
+      this.progress('fetching resource list: ' + this.url);
+      var response;
+      response = JSON.parse(getSpec());
+
+      _this.swaggerVersion = response.swaggerVersion;
+      if (_this.swaggerVersion === "1.2") {
+        setTimeout(function(){return _this.buildFromSpec(response);}, 500);
+      } else {
+        return setTimeout(function(){return _this.buildFrom1_1Spec(response);}, 500);
+      }
+      return this;
+    };
+
+    SwaggerApi.prototype.buildFromSpec = function(response) {
+      var api, isApi, newName, operation, res, resource, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2;
+      if (response.apiVersion != null) {
+        this.apiVersion = response.apiVersion;
+      }
+      this.apis = {};
+      this.apisArray = [];
+      this.produces = response.produces;
+      this.authSchemes = response.authorizations;
+      if (response.info != null) {
+        this.info = response.info;
+      }
+      isApi = false;
+      _ref = response.apis;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        api = _ref[_i];
+        if (api.operations) {
+          _ref1 = api.operations;
+          for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+            operation = _ref1[_j];
+            isApi = true;
+          }
+        }
+      }
+      if (response.basePath) {
+        this.basePath = response.basePath;
+      } else if (this.url.indexOf('?') > 0) {
+        this.basePath = this.url.substring(0, this.url.lastIndexOf('?'));
+      } else {
+        this.basePath = this.url;
+      }
+      if (isApi) {
+        newName = response.resourcePath.replace(/\//g, '');
+        this.resourcePath = response.resourcePath;
+        res = new SwaggerResource(response, this);
+        this.apis[newName] = res;
+        this.apisArray.push(res);
+      } else {
+        _ref2 = response.apis;
+        for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
+          resource = _ref2[_k];
+          res = new SwaggerResource(resource, this);
+          this.apis[res.name] = res;
+          this.apisArray.push(res);
+        }
+      }
+      if (this.success) {
+        this.success();
+      }
+      return this;
+    };
+
+    SwaggerApi.prototype.buildFrom1_1Spec = function(response) {
+      var api, isApi, newName, operation, res, resource, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2;
+      console.log("This API is using a deprecated version of Swagger!  Please see http://github.com/wordnik/swagger-core/wiki for more info");
+      if (response.apiVersion != null) {
+        this.apiVersion = response.apiVersion;
+      }
+      this.apis = {};
+      this.apisArray = [];
+      this.produces = response.produces;
+      if (response.info != null) {
+        this.info = response.info;
+      }
+      isApi = false;
+      _ref = response.apis;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        api = _ref[_i];
+        if (api.operations) {
+          _ref1 = api.operations;
+          for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+            operation = _ref1[_j];
+            isApi = true;
+          }
+        }
+      }
+      if (response.basePath) {
+        this.basePath = response.basePath;
+      } else if (this.url.indexOf('?') > 0) {
+        this.basePath = this.url.substring(0, this.url.lastIndexOf('?'));
+      } else {
+        this.basePath = this.url;
+      }
+      if (isApi) {
+        newName = response.resourcePath.replace(/\//g, '');
+        this.resourcePath = response.resourcePath;
+        res = new SwaggerResource(response, this);
+        this.apis[newName] = res;
+        this.apisArray.push(res);
+      } else {
+        _ref2 = response.apis;
+        for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
+          resource = _ref2[_k];
+          res = new SwaggerResource(resource, this);
+          this.apis[res.name] = res;
+          this.apisArray.push(res);
+        }
+      }
+      if (this.success) {
+        this.success();
+      }
+      return this;
+    };
+
+    SwaggerApi.prototype.selfReflect = function() {
+      var resource, resource_name, _ref;
+      if (this.apis == null) {
+        return false;
+      }
+      _ref = this.apis;
+      for (resource_name in _ref) {
+        resource = _ref[resource_name];
+        if (resource.ready == null) {
+          return false;
+        }
+      }
+      this.setConsolidatedModels();
+      this.ready = true;
+      if (this.success != null) {
+        return this.success();
+      }
+    };
+
+    SwaggerApi.prototype.fail = function(message) {
+      this.failure(message);
+      throw message;
+    };
+
+    SwaggerApi.prototype.setConsolidatedModels = function() {
+      var model, modelName, resource, resource_name, _i, _len, _ref, _ref1, _results;
+      this.modelsArray = [];
+      this.models = {};
+      _ref = this.apis;
+      for (resource_name in _ref) {
+        resource = _ref[resource_name];
+        for (modelName in resource.models) {
+          if (this.models[modelName] == null) {
+            this.models[modelName] = resource.models[modelName];
+            this.modelsArray.push(resource.models[modelName]);
+          }
+        }
+      }
+      _ref1 = this.modelsArray;
+      _results = [];
+      for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+        model = _ref1[_i];
+        _results.push(model.setReferencedModels(this.models));
+      }
+      return _results;
+    };
+
+    SwaggerApi.prototype.help = function() {
+      var operation, operation_name, parameter, resource, resource_name, _i, _len, _ref, _ref1, _ref2;
+      _ref = this.apis;
+      for (resource_name in _ref) {
+        resource = _ref[resource_name];
+        console.log(resource_name);
+        _ref1 = resource.operations;
+        for (operation_name in _ref1) {
+          operation = _ref1[operation_name];
+          console.log("  " + operation.nickname);
+          _ref2 = operation.parameters;
+          for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
+            parameter = _ref2[_i];
+            console.log("    " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description);
+          }
+        }
+      }
+      return this;
+    };
+
+    return SwaggerApi;
+
+  })();
+
+  SwaggerResource = (function() {
+    SwaggerResource.prototype.api = null;
+
+    SwaggerResource.prototype.produces = null;
+
+    SwaggerResource.prototype.consumes = null;
+
+    function SwaggerResource(resourceObj, api) {
+      var consumes, e, obj, parts, produces,
+        _this = this;
+      this.api = api;
+      this.api = this.api;
+      produces = [];
+      consumes = [];
+      this.path = this.api.resourcePath != null ? this.api.resourcePath : resourceObj.path;
+      this.description = resourceObj.description;
+      parts = this.path.split("/");
+      this.name = parts[parts.length - 1].replace('.{format}', '');
+      this.basePath = this.api.basePath;
+      this.operations = {};
+      this.operationsArray = [];
+      this.modelsArray = [];
+      this.models = {};
+      if ((resourceObj.apis != null) && (this.api.resourcePath != null)) {
+        this.addApiDeclaration(resourceObj);
+      } else {
+        if (this.path == null) {
+          this.api.fail("SwaggerResources must have a path.");
+        }
+        if (this.path.substring(0, 4) === 'http') {
+          this.url = this.path.replace('{format}', 'json');
+        } else {
+          this.url = this.api.basePath + this.path.replace('{format}', 'json');
+        }
+        this.api.progress('fetching resource ' + this.name + ': ' + this.url);
+        var response;
+        var split = resourceObj.path.split("/");
+        var resource = split[split.length-1];
+        response = JSON.parse(jsonFor(resource));
+        setTimeout(function(){return _this.addApiDeclaration(response);}, 500);
+      }
+    }
+
+    SwaggerResource.prototype.addApiDeclaration = function(response) {
+      var endpoint, _i, _len, _ref;
+      if (response.produces != null) {
+        this.produces = response.produces;
+      }
+      if (response.consumes != null) {
+        this.consumes = response.consumes;
+      }
+      if ((response.basePath != null) && response.basePath.replace(/\s/g, '').length > 0) {
+        this.basePath = response.basePath;
+      }
+      this.addModels(response.models);
+      if (response.apis) {
+        _ref = response.apis;
+        for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+          endpoint = _ref[_i];
+          this.addOperations(endpoint.path, endpoint.operations, response.consumes, response.produces);
+        }
+      }
+      this.api[this.name] = this;
+      this.ready = true;
+      return this.api.selfReflect();
+    };
+
+    SwaggerResource.prototype.addModels = function(models) {
+      var model, modelName, swaggerModel, _i, _len, _ref, _results;
+      if (models != null) {
+        for (modelName in models) {
+          if (this.models[modelName] == null) {
+            swaggerModel = new SwaggerModel(modelName, models[modelName]);
+            this.modelsArray.push(swaggerModel);
+            this.models[modelName] = swaggerModel;
+          }
+        }
+        _ref = this.modelsArray;
+        _results = [];
+        for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+          model = _ref[_i];
+          _results.push(model.setReferencedModels(this.models));
+        }
+        return _results;
+      }
+    };
+
+    SwaggerResource.prototype.addOperations = function(resource_path, ops, consumes, produces) {
+      var method, o, op, r, ref, responseMessages, type, _i, _j, _len, _len1, _results;
+      if (ops) {
+        _results = [];
+        for (_i = 0, _len = ops.length; _i < _len; _i++) {
+          o = ops[_i];
+          consumes = this.consumes;
+          produces = this.produces;
+          if (o.consumes != null) {
+            consumes = o.consumes;
+          } else {
+            consumes = this.consumes;
+          }
+          if (o.produces != null) {
+            produces = o.produces;
+          } else {
+            produces = this.produces;
+          }
+          type = o.type || o.responseClass;
+          if (type === "array") {
+            ref = null;
+            if (o.items) {
+              ref = o.items["type"] || o.items["$ref"];
+            }
+            type = "array[" + ref + "]";
+          }
+          responseMessages = o.responseMessages;
+          method = o.method;
+          if (o.httpMethod) {
+            method = o.httpMethod;
+          }
+          if (o.supportedContentTypes) {
+            consumes = o.supportedContentTypes;
+          }
+          if (o.errorResponses) {
+            responseMessages = o.errorResponses;
+            for (_j = 0, _len1 = responseMessages.length; _j < _len1; _j++) {
+              r = responseMessages[_j];
+              r.message = r.reason;
+              r.reason = null;
+            }
+          }
+          o.nickname = this.sanitize(o.nickname);
+          op = new SwaggerOperation(o.nickname, resource_path, method, o.parameters, o.summary, o.notes, type, responseMessages, this, consumes, produces);
+          this.operations[op.nickname] = op;
+          _results.push(this.operationsArray.push(op));
+        }
+        return _results;
+      }
+    };
+
+    SwaggerResource.prototype.sanitize = function(nickname) {
+      var op;
+      op = nickname.replace(/[\s!@#$%^&*()_+=\[{\]};:<>|./?,\\'""-]/g, '_');
+      op = op.replace(/((_){2,})/g, '_');
+      op = op.replace(/^(_)*/g, '');
+      op = op.replace(/([_])*$/g, '');
+      return op;
+    };
+
+    SwaggerResource.prototype.help = function() {
+      var msg, operation, operation_name, parameter, _i, _len, _ref, _ref1, _results;
+      _ref = this.operations;
+      _results = [];
+      for (operation_name in _ref) {
+        operation = _ref[operation_name];
+        msg = "  " + operation.nickname;
+        _ref1 = operation.parameters;
+        for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+          parameter = _ref1[_i];
+          msg.concat("    " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description);
+        }
+        _results.push(msg);
+      }
+      return _results;
+    };
+
+    return SwaggerResource;
+
+  })();
+
+  SwaggerModel = (function() {
+    function SwaggerModel(modelName, obj) {
+      var prop, propertyName, value;
+      this.name = obj.id != null ? obj.id : modelName;
+      this.properties = [];
+      for (propertyName in obj.properties) {
+        if (obj.required != null) {
+          for (value in obj.required) {
+            if (propertyName === obj.required[value]) {
+              obj.properties[propertyName].required = true;
+            }
+          }
+        }
+        prop = new SwaggerModelProperty(propertyName, obj.properties[propertyName]);
+        this.properties.push(prop);
+      }
+    }
+
+    SwaggerModel.prototype.setReferencedModels = function(allModels) {
+      var prop, type, _i, _len, _ref, _results;
+      _ref = this.properties;
+      _results = [];
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        prop = _ref[_i];
+        type = prop.type || prop.dataType;
+        if (allModels[type] != null) {
+          _results.push(prop.refModel = allModels[type]);
+        } else if ((prop.refDataType != null) && (allModels[prop.refDataType] != null)) {
+          _results.push(prop.refModel = allModels[prop.refDataType]);
+        } else {
+          _results.push(void 0);
+        }
+      }
+      return _results;
+    };
+
+    SwaggerModel.prototype.getMockSignature = function(modelsToIgnore) {
+      var classClose, classOpen, prop, propertiesStr, returnVal, strong, strongClose, stronger, _i, _j, _len, _len1, _ref, _ref1;
+      propertiesStr = [];
+      _ref = this.properties;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        prop = _ref[_i];
+        propertiesStr.push(prop.toString());
+      }
+      strong = '<span class="strong">';
+      stronger = '<span class="stronger">';
+      strongClose = '</span>';
+      classOpen = strong + this.name + ' {' + strongClose;
+      classClose = strong + '}' + strongClose;
+      returnVal = classOpen + '<div>' + propertiesStr.join(',</div><div>') + '</div>' + classClose;
+      if (!modelsToIgnore) {
+        modelsToIgnore = [];
+      }
+      modelsToIgnore.push(this);
+      _ref1 = this.properties;
+      for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+        prop = _ref1[_j];
+        if ((prop.refModel != null) && (modelsToIgnore.indexOf(prop.refModel)) === -1) {
+          returnVal = returnVal + ('<br>' + prop.refModel.getMockSignature(modelsToIgnore));
+        }
+      }
+      return returnVal;
+    };
+
+    SwaggerModel.prototype.createJSONSample = function(modelsToIgnore) {
+      var prop, result, _i, _len, _ref;
+      result = {};
+      modelsToIgnore = modelsToIgnore || [];
+      modelsToIgnore.push(this.name);
+      _ref = this.properties;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        prop = _ref[_i];
+        result[prop.name] = prop.getSampleValue(modelsToIgnore);
+      }
+      modelsToIgnore.pop(this.name);
+      return result;
+    };
+
+    return SwaggerModel;
+
+  })();
+
+  SwaggerModelProperty = (function() {
+    function SwaggerModelProperty(name, obj) {
+      this.name = name;
+      this.dataType = obj.type || obj.dataType || obj["$ref"];
+      this.isCollection = this.dataType && (this.dataType.toLowerCase() === 'array' || this.dataType.toLowerCase() === 'list' || this.dataType.toLowerCase() === 'set');
+      this.descr = obj.description;
+      this.required = obj.required;
+      if (obj.items != null) {
+        if (obj.items.type != null) {
+          this.refDataType = obj.items.type;
+        }
+        if (obj.items.$ref != null) {
+          this.refDataType = obj.items.$ref;
+        }
+      }
+      this.dataTypeWithRef = this.refDataType != null ? this.dataType + '[' + this.refDataType + ']' : this.dataType;
+      if (obj.allowableValues != null) {
+        this.valueType = obj.allowableValues.valueType;
+        this.values = obj.allowableValues.values;
+        if (this.values != null) {
+          this.valuesString = "'" + this.values.join("' or '") + "'";
+        }
+      }
+      if (obj["enum"] != null) {
+        this.valueType = "string";
+        this.values = obj["enum"];
+        if (this.values != null) {
+          this.valueString = "'" + this.values.join("' or '") + "'";
+        }
+      }
+    }
+
+    SwaggerModelProperty.prototype.getSampleValue = function(modelsToIgnore) {
+      var result;
+      if ((this.refModel != null) && (modelsToIgnore.indexOf(this.refModel.name) === -1)) {
+        result = this.refModel.createJSONSample(modelsToIgnore);
+      } else {
+        if (this.isCollection) {
+          result = this.refDataType;
+        } else {
+          result = this.dataType;
+        }
+      }
+      if (this.isCollection) {
+        return [result];
+      } else {
+        return result;
+      }
+    };
+
+    SwaggerModelProperty.prototype.toString = function() {
+      var req, str;
+      req = this.required ? 'propReq' : 'propOpt';
+      str = '<span class="propName ' + req + '">' + this.name + '</span> (<span class="propType">' + this.dataTypeWithRef + '</span>';
+      if (!this.required) {
+        str += ', <span class="propOptKey">optional</span>';
+      }
+      str += ')';
+      if (this.values != null) {
+        str += " = <span class='propVals'>['" + this.values.join("' or '") + "']</span>";
+      }
+      if (this.descr != null) {
+        str += ': <span class="propDesc">' + this.descr + '</span>';
+      }
+      return str;
+    };
+
+    return SwaggerModelProperty;
+
+  })();
+
+  SwaggerOperation = (function() {
+    function SwaggerOperation(nickname, path, method, parameters, summary, notes, type, responseMessages, resource, consumes, produces) {
+      var parameter, v, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2, _ref3,
+        _this = this;
+      this.nickname = nickname;
+      this.path = path;
+      this.method = method;
+      this.parameters = parameters != null ? parameters : [];
+      this.summary = summary;
+      this.notes = notes;
+      this.type = type;
+      this.responseMessages = responseMessages;
+      this.resource = resource;
+      this.consumes = consumes;
+      this.produces = produces;
+      this["do"] = __bind(this["do"], this);
+      if (this.nickname == null) {
+        this.resource.api.fail("SwaggerOperations must have a nickname.");
+      }
+      if (this.path == null) {
+        this.resource.api.fail("SwaggerOperation " + nickname + " is missing path.");
+      }
+      if (this.method == null) {
+        this.resource.api.fail("SwaggerOperation " + nickname + " is missing method.");
+      }
+      this.path = this.path.replace('{format}', 'json');
+      this.method = this.method.toLowerCase();
+      this.isGetMethod = this.method === "get";
+      this.resourceName = this.resource.name;
+      if (((_ref = this.type) != null ? _ref.toLowerCase() : void 0) === 'void') {
+        this.type = void 0;
+      }
+      if (this.type != null) {
+        this.responseClassSignature = this.getSignature(this.type, this.resource.models);
+        this.responseSampleJSON = this.getSampleJSON(this.type, this.resource.models);
+      }
+      this.responseMessages = this.responseMessages || [];
+      _ref1 = this.parameters;
+      for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+        parameter = _ref1[_i];
+        parameter.name = parameter.name || parameter.type || parameter.dataType;
+        type = parameter.type || parameter.dataType;
+        if (type.toLowerCase() === 'boolean') {
+          parameter.allowableValues = {};
+          parameter.allowableValues.values = ["true", "false"];
+        }
+        parameter.signature = this.getSignature(type, this.resource.models);
+        parameter.sampleJSON = this.getSampleJSON(type, this.resource.models);
+        if (parameter["enum"] != null) {
+          parameter.isList = true;
+          parameter.allowableValues = {};
+          parameter.allowableValues.descriptiveValues = [];
+          _ref2 = parameter["enum"];
+          for (_j = 0, _len1 = _ref2.length; _j < _len1; _j++) {
+            v = _ref2[_j];
+            if ((parameter.defaultValue != null) && parameter.defaultValue === v) {
+              parameter.allowableValues.descriptiveValues.push({
+                value: v,
+                isDefault: true
+              });
+            } else {
+              parameter.allowableValues.descriptiveValues.push({
+                value: v,
+                isDefault: false
+              });
+            }
+          }
+        }
+        if (parameter.allowableValues != null) {
+          if (parameter.allowableValues.valueType === "RANGE") {
+            parameter.isRange = true;
+          } else {
+            parameter.isList = true;
+          }
+          if (parameter.allowableValues.values != null) {
+            parameter.allowableValues.descriptiveValues = [];
+            _ref3 = parameter.allowableValues.values;
+            for (_k = 0, _len2 = _ref3.length; _k < _len2; _k++) {
+              v = _ref3[_k];
+              if ((parameter.defaultValue != null) && parameter.defaultValue === v) {
+                parameter.allowableValues.descriptiveValues.push({
+                  value: v,
+                  isDefault: true
+                });
+              } else {
+                parameter.allowableValues.descriptiveValues.push({
+                  value: v,
+                  isDefault: false
+                });
+              }
+            }
+          }
+        }
+      }
+      this.resource[this.nickname] = function(args, callback, error) {
+        return _this["do"](args, callback, error);
+      };
+      this.resource[this.nickname].help = function() {
+        return _this.help();
+      };
+    }
+
+    SwaggerOperation.prototype.isListType = function(type) {
+      if (type.indexOf('[') >= 0) {
+        return type.substring(type.indexOf('[') + 1, type.indexOf(']'));
+      } else {
+        return void 0;
+      }
+    };
+
+    SwaggerOperation.prototype.getSignature = function(type, models) {
+      var isPrimitive, listType;
+      listType = this.isListType(type);
+      isPrimitive = ((listType != null) && models[listType]) || (models[type] != null) ? false : true;
+      if (isPrimitive) {
+        return type;
+      } else {
+        if (listType != null) {
+          return models[listType].getMockSignature();
+        } else {
+          return models[type].getMockSignature();
+        }
+      }
+    };
+
+    SwaggerOperation.prototype.getSampleJSON = function(type, models) {
+      var isPrimitive, listType, val;
+      listType = this.isListType(type);
+      isPrimitive = ((listType != null) && models[listType]) || (models[type] != null) ? false : true;
+      val = isPrimitive ? void 0 : (listType != null ? models[listType].createJSONSample() : models[type].createJSONSample());
+      if (val) {
+        val = listType ? [val] : val;
+        return JSON.stringify(val, null, 2);
+      }
+    };
+
+    SwaggerOperation.prototype["do"] = function(args, opts, callback, error) {
+      var key, param, params, possibleParams, req, requestContentType, responseContentType, value, _i, _len, _ref;
+      if (args == null) {
+        args = {};
+      }
+      if (opts == null) {
+        opts = {};
+      }
+      requestContentType = null;
+      responseContentType = null;
+      if ((typeof args) === "function") {
+        error = opts;
+        callback = args;
+        args = {};
+      }
+      if ((typeof opts) === "function") {
+        error = callback;
+        callback = opts;
+      }
+      if (error == null) {
+        error = function(xhr, textStatus, error) {
+          return console.log(xhr, textStatus, error);
+        };
+      }
+      if (callback == null) {
+        callback = function(data) {
+          var content;
+          content = null;
+          if (data.content != null) {
+            content = data.content.data;
+          } else {
+            content = "no data";
+          }
+          return console.log("default callback: " + content);
+        };
+      }
+      params = {};
+      params.headers = [];
+      if (args.headers != null) {
+        params.headers = args.headers;
+        delete args.headers;
+      }
+      _ref = this.parameters;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        param = _ref[_i];
+        if (param.paramType === "header") {
+          if (args[param.name]) {
+            params.headers[param.name] = args[param.name];
+          }
+        }
+      }
+      if (args.body != null) {
+        params.body = args.body;
+        delete args.body;
+      }
+      possibleParams = (function() {
+        var _j, _len1, _ref1, _results;
+        _ref1 = this.parameters;
+        _results = [];
+        for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+          param = _ref1[_j];
+          if (param.paramType === "form" || param.paramType.toLowerCase() === "file") {
+            _results.push(param);
+          }
+        }
+        return _results;
+      }).call(this);
+      if (possibleParams) {
+        for (key in possibleParams) {
+          value = possibleParams[key];
+          if (args[value.name]) {
+            params[value.name] = args[value.name];
+          }
+        }
+      }
+      req = new SwaggerRequest(this.method, this.urlify(args), params, opts, callback, error, this);
+      if (opts.mock != null) {
+        return req;
+      } else {
+        return true;
+      }
+    };
+
+    SwaggerOperation.prototype.pathJson = function() {
+      return this.path.replace("{format}", "json");
+    };
+
+    SwaggerOperation.prototype.pathXml = function() {
+      return this.path.replace("{format}", "xml");
+    };
+
+    SwaggerOperation.prototype.urlify = function(args) {
+      var param, queryParams, reg, url, _i, _j, _len, _len1, _ref, _ref1;
+      url = this.resource.basePath + this.pathJson();
+      _ref = this.parameters;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        param = _ref[_i];
+        if (param.paramType === 'path') {
+          if (args[param.name]) {
+            reg = new RegExp('\{' + param.name + '[^\}]*\}', 'gi');
+            url = url.replace(reg, encodeURIComponent(args[param.name]));
+            delete args[param.name];
+          } else {
+            throw "" + param.name + " is a required path param.";
+          }
+        }
+      }
+      queryParams = "";
+      _ref1 = this.parameters;
+      for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+        param = _ref1[_j];
+        if (param.paramType === 'query') {
+          if (args[param.name]) {
+            if (queryParams !== "") {
+              queryParams += "&";
+            }
+            queryParams += encodeURIComponent(param.name) + '=' + encodeURIComponent(args[param.name]);
+          }
+        }
+      }
+      if ((queryParams != null) && queryParams.length > 0) {
+        url += "?" + queryParams;
+      }
+      return url;
+    };
+
+    SwaggerOperation.prototype.supportHeaderParams = function() {
+      return this.resource.api.supportHeaderParams;
+    };
+
+    SwaggerOperation.prototype.supportedSubmitMethods = function() {
+      return this.resource.api.supportedSubmitMethods;
+    };
+
+    SwaggerOperation.prototype.getQueryParams = function(args) {
+      return this.getMatchingParams(['query'], args);
+    };
+
+    SwaggerOperation.prototype.getHeaderParams = function(args) {
+      return this.getMatchingParams(['header'], args);
+    };
+
+    SwaggerOperation.prototype.getMatchingParams = function(paramTypes, args) {
+      var matchingParams, name, param, value, _i, _len, _ref, _ref1;
+      matchingParams = {};
+      _ref = this.parameters;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        param = _ref[_i];
+        if (args && args[param.name]) {
+          matchingParams[param.name] = args[param.name];
+        }
+      }
+      _ref1 = this.resource.api.headers;
+      for (name in _ref1) {
+        value = _ref1[name];
+        matchingParams[name] = value;
+      }
+      return matchingParams;
+    };
+
+    SwaggerOperation.prototype.help = function() {
+      var msg, parameter, _i, _len, _ref;
+      msg = "";
+      _ref = this.parameters;
+      for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+        parameter = _ref[_i];
+        if (msg !== "") {
+          msg += "\n";
+        }
+        msg += "* " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description;
+      }
+      return msg;
+    };
+
+    return SwaggerOperation;
+
+  })();
+
+  SwaggerRequest = (function() {
+    function SwaggerRequest(type, url, params, opts, successCallback, errorCallback, operation, execution) {
+      var body, e, fields, headers, key, myHeaders, name, obj, param, parent, possibleParams, requestContentType, responseContentType, urlEncoded, value, values,
+        _this = this;
+      this.type = type;
+      this.url = url;
+      this.params = params;
+      this.opts = opts;
+      this.successCallback = successCallback;
+      this.errorCallback = errorCallback;
+      this.operation = operation;
+      this.execution = execution;
+      if (this.type == null) {
+        throw "SwaggerRequest type is required (get/post/put/delete).";
+      }
+      if (this.url == null) {
+        throw "SwaggerRequest url is required.";
+      }
+      if (this.successCallback == null) {
+        throw "SwaggerRequest successCallback is required.";
+      }
+      if (this.errorCallback == null) {
+        throw "SwaggerRequest error callback is required.";
+      }
+      if (this.operation == null) {
+        throw "SwaggerRequest operation is required.";
+      }
+      this.type = this.type.toUpperCase();
+      headers = params.headers;
+      myHeaders = {};
+      body = params.body;
+      parent = params["parent"];
+      requestContentType = "application/json";
+      if (body && (this.type === "POST" || this.type === "PUT" || this.type === "PATCH")) {
+        if (this.opts.requestContentType) {
+          requestContentType = this.opts.requestContentType;
+        }
+      } else {
+        if (((function() {
+          var _i, _len, _ref, _results;
+          _ref = this.operation.parameters;
+          _results = [];
+          for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+            param = _ref[_i];
+            if (param.paramType === "form") {
+              _results.push(param);
+            }
+          }
+          return _results;
+        }).call(this)).length > 0) {
+          type = param.type || param.dataType;
+          if (((function() {
+            var _i, _len, _ref, _results;
+            _ref = this.operation.parameters;
+            _results = [];
+            for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+              param = _ref[_i];
+              if (type.toLowerCase() === "file") {
+                _results.push(param);
+              }
+            }
+            return _results;
+          }).call(this)).length > 0) {
+            requestContentType = "multipart/form-data";
+          } else {
+            requestContentType = "application/x-www-form-urlencoded";
+          }
+        } else if (this.type !== "DELETE") {
+          requestContentType = null;
+        }
+      }
+      if (requestContentType && this.operation.consumes) {
+        if (this.operation.consumes.indexOf(requestContentType) === -1) {
+          console.log("server doesn't consume " + requestContentType + ", try " + JSON.stringify(this.operation.consumes));
+          if (this.requestContentType === null) {
+            requestContentType = this.operation.consumes[0];
+          }
+        }
+      }
+      responseContentType = null;
+      if (this.type === "POST" || this.type === "GET" || this.type === "PATCH") {
+        if (this.opts.responseContentType) {
+          responseContentType = this.opts.responseContentType;
+        } else {
+          responseContentType = "application/json";
+        }
+      } else {
+        responseContentType = null;
+      }
+      if (responseContentType && this.operation.produces) {
+        if (this.operation.produces.indexOf(responseContentType) === -1) {
+          console.log("server can't produce " + responseContentType);
+        }
+      }
+      if (requestContentType && requestContentType.indexOf("application/x-www-form-urlencoded") === 0) {
+        fields = {};
+        possibleParams = (function() {
+          var _i, _len, _ref, _results;
+          _ref = this.operation.parameters;
+          _results = [];
+          for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+            param = _ref[_i];
+            if (param.paramType === "form") {
+              _results.push(param);
+            }
+          }
+          return _results;
+        }).call(this);
+        values = {};
+        for (key in possibleParams) {
+          value = possibleParams[key];
+          if (this.params[value.name]) {
+            values[value.name] = this.params[value.name];
+          }
+        }
+        urlEncoded = "";
+        for (key in values) {
+          value = values[key];
+          if (urlEncoded !== "") {
+            urlEncoded += "&";
+          }
+          urlEncoded += encodeURIComponent(key) + '=' + encodeURIComponent(value);
+        }
+        body = urlEncoded;
+      }
+      for (name in headers) {
+        myHeaders[name] = headers[name];
+      }
+      if (requestContentType) {
+        myHeaders["Content-Type"] = requestContentType;
+      }
+      if (responseContentType) {
+        myHeaders["Accept"] = responseContentType;
+      }
+      if (!((headers != null) && (headers.mock != null))) {
+        obj = {
+          url: this.url,
+          method: this.type,
+          headers: myHeaders,
+          body: body,
+          on: {
+            error: function(response) {
+              return _this.errorCallback(response, _this.opts.parent);
+            },
+            redirect: function(response) {
+              return _this.successCallback(response, _this.opts.parent);
+            },
+            307: function(response) {
+              return _this.successCallback(response, _this.opts.parent);
+            },
+            response: function(response) {
+              return _this.successCallback(response, _this.opts.parent);
+            }
+          }
+        };
+        e = {};
+        if (typeof window !== 'undefined') {
+          e = window;
+        } else {
+          e = exports;
+        }
+        e.authorizations.apply(obj);
+        if (opts.mock == null) {
+          new SwaggerHttp().execute(obj);
+        } else {
+          console.log(obj);
+          return obj;
+        }
+      }
+    }
+
+    SwaggerRequest.prototype.asCurl = function() {
+      var header_args, k, v;
+      header_args = (function() {
+        var _ref, _results;
+        _ref = this.headers;
+        _results = [];
+        for (k in _ref) {
+          v = _ref[k];
+          _results.push("--header \"" + k + ": " + v + "\"");
+        }
+        return _results;
+      }).call(this);
+      return "curl " + (header_args.join(" ")) + " " + this.url;
+    };
+
+    return SwaggerRequest;
+
+  })();
+
+  SwaggerHttp = (function() {
+    SwaggerHttp.prototype.Shred = null;
+
+    SwaggerHttp.prototype.shred = null;
+
+    SwaggerHttp.prototype.content = null;
+
+    function SwaggerHttp() {
+      var identity, toString,
+        _this = this;
+      if (typeof window !== 'undefined') {
+        this.Shred = require("./shred");
+      } else {
+        this.Shred = require("shred");
+      }
+      this.shred = new this.Shred();
+      identity = function(x) {
+        return x;
+      };
+      toString = function(x) {
+        return x.toString();
+      };
+      if (typeof window !== 'undefined') {
+        this.content = require("./shred/content");
+        this.content.registerProcessor(["application/json; charset=utf-8", "application/json", "json"], {
+          parser: identity,
+          stringify: toString
+        });
+      } else {
+        this.Shred.registerProcessor(["application/json; charset=utf-8", "application/json", "json"], {
+          parser: identity,
+          stringify: toString
+        });
+      }
+    }
+
+    SwaggerHttp.prototype.execute = function(obj) {
+      return this.shred.request(obj);
+    };
+
+    return SwaggerHttp;
+
+  })();
+
+  SwaggerAuthorizations = (function() {
+    SwaggerAuthorizations.prototype.authz = null;
+
+    function SwaggerAuthorizations() {
+      this.authz = {};
+    }
+
+    SwaggerAuthorizations.prototype.add = function(name, auth) {
+      this.authz[name] = auth;
+      return auth;
+    };
+
+    SwaggerAuthorizations.prototype.apply = function(obj) {
+      var key, value, _ref, _results;
+      _ref = this.authz;
+      _results = [];
+      for (key in _ref) {
+        value = _ref[key];
+        _results.push(value.apply(obj));
+      }
+      return _results;
+    };
+
+    return SwaggerAuthorizations;
+
+  })();
+
+  ApiKeyAuthorization = (function() {
+    ApiKeyAuthorization.prototype.type = null;
+
+    ApiKeyAuthorization.prototype.name = null;
+
+    ApiKeyAuthorization.prototype.value = null;
+
+    function ApiKeyAuthorization(name, value, type) {
+      this.name = name;
+      this.value = value;
+      this.type = type;
+    }
+
+    ApiKeyAuthorization.prototype.apply = function(obj) {
+      if (this.type === "query") {
+        if (obj.url.indexOf('?') > 0) {
+          obj.url = obj.url + "&" + this.name + "=" + this.value;
+        } else {
+          obj.url = obj.url + "?" + this.name + "=" + this.value;
+        }
+        return true;
+      } else if (this.type === "header") {
+        return obj.headers[this.name] = this.value;
+      }
+    };
+
+    return ApiKeyAuthorization;
+
+  })();
+
+  PasswordAuthorization = (function() {
+    PasswordAuthorization.prototype.name = null;
+
+    PasswordAuthorization.prototype.username = null;
+
+    PasswordAuthorization.prototype.password = null;
+
+    function PasswordAuthorization(name, username, password) {
+      this.name = name;
+      this.username = username;
+      this.password = password;
+    }
+
+    PasswordAuthorization.prototype.apply = function(obj) {
+      return obj.headers["Authorization"] = "Basic " + btoa(this.username + ":" + this.password);
+    };
+
+    return PasswordAuthorization;
+
+  })();
+
+  this.SwaggerApi = SwaggerApi;
+
+  this.SwaggerResource = SwaggerResource;
+
+  this.SwaggerOperation = SwaggerOperation;
+
+  this.SwaggerRequest = SwaggerRequest;
+
+  this.SwaggerModelProperty = SwaggerModelProperty;
+
+  this.ApiKeyAuthorization = ApiKeyAuthorization;
+
+  this.PasswordAuthorization = PasswordAuthorization;
+
+  this.authorizations = new SwaggerAuthorizations();
+
+}).call(this);
index d4d55b04c0bde2ddcc4c883e55af5248d7145c91..31a1237a5bad0328ce89e87cb3fad72d24221310 100644 (file)
@@ -71,7 +71,7 @@
         <dependency>
             <groupId>org.opendaylight.controller.samples</groupId>
             <artifactId>clustering-it-model</artifactId>
-            <version>${version}</version>
+            <version>${project.version}</version>
         </dependency>
         <dependency>
             <groupId>org.opendaylight.controller</groupId>
index 9116e66737f3abc8815abf7f3299409dd148835d..16ad28dd4fa9f439e201e59a9e66ff205c809d18 100644 (file)
@@ -82,17 +82,27 @@ public interface StatPermCollector extends Runnable, AutoCloseable {
 
     /**
      * All disconnected Nodes need be removed from stat list Nodes
+     *
      * @param flowNode
      * @return true/false if the {@link Node} removed successful
      */
     boolean disconnectedNodeUnregistration(InstanceIdentifier<Node> nodeIdent);
 
+    /**
+     * Method add new feature {@link StatCapabTypes} to Node identified by
+     * nodeIdent -> InstanceIdentifier<Node>
+     *
+     * @param flowNode
+     * @return true/false if the {@link StatCapabTypes} add successful
+     */
+    boolean registerAdditionalNodeFeature(InstanceIdentifier<Node> nodeIdent, StatCapabTypes statCapab);
+
     /**
      * Method return true only and only if {@link StatPermCollector} contain
      * valid node registration in its internal {@link Node} map.
      * Otherwise return false.
      *
-     * @param InstanceIdentifier<FlowCapableNode> flowNode
+     * @param flowNode
      * @return
      */
     boolean isProvidedFlowNodeActive(InstanceIdentifier<Node> nodeIdent);
index b53c54e360e662c7a7b6b935dbfb76d2b6c98788..7d57067df167ca95c0c1f8bc3b570c7ab4707a7a 100644 (file)
@@ -119,6 +119,16 @@ public interface StatisticsManager extends AutoCloseable, TransactionChainListen
       */
      void disconnectedNodeUnregistration(InstanceIdentifier<Node> nodeIdent);
 
+     /**
+      * Method wraps {@link StatPermCollector}.registerAdditionalNodeFeature to provide
+      * possibility to register additional Node Feature {@link StatCapabTypes} for
+      * statistics collecting.
+      *
+      * @param nodeIdent
+      * @param statCapab
+      */
+     void registerAdditionalNodeFeature(InstanceIdentifier<Node> nodeIdent, StatCapabTypes statCapab);
+
     /**
      * Method provides access to Device RPC methods by wrapped
      * internal method. In next {@link StatRpcMsgManager} is registered all
index 10bfcba6786ee013d3313192288f0be00f611701..68197805ab84ff5c0cc8e0c6805834643acb90d8 100644 (file)
@@ -51,6 +51,7 @@ public abstract class StatAbstractListenCommit<T extends DataObject, N extends N
     private ListenerRegistration<DataChangeListener> listenerRegistration;
 
     protected final Map<InstanceIdentifier<Node>, Map<InstanceIdentifier<T>, Integer>> mapNodesForDelete = new ConcurrentHashMap<>();
+    protected final Map<InstanceIdentifier<Node>, Integer> mapNodeFeautureRepeater = new ConcurrentHashMap<>();
 
     private final Class<T> clazz;
 
@@ -127,6 +128,11 @@ public abstract class StatAbstractListenCommit<T extends DataObject, N extends N
         super.close();
     }
 
+    /**
+     * Method return actual DataObject identified by InstanceIdentifier from Config/DS
+     * @param path
+     * @return
+     */
     protected final <K extends DataObject> Optional<K> readLatestConfiguration(final InstanceIdentifier<K> path) {
         if(currentReadTx == null) {
              currentReadTx = dataBroker.newReadOnlyTransaction();
index 08871e99806c7b51be2480f40b2d50914d96c0f9..6bc6a30f8fb806ebe9e3b15473f7160ccba7670d 100644 (file)
@@ -110,6 +110,29 @@ public abstract class StatAbstractNotifyCommit<N extends NotificationListener> i
         return txContainer;
     }
 
+    /**
+     * Method validate TransactionCacheContainer. It needs to call before every txCacheContainer processing.
+     *
+     * @param txCacheContainer
+     * @return
+     */
+    protected boolean isTransactionCacheContainerValid(final Optional<TransactionCacheContainer<?>> txCacheContainer) {
+        if ( ! txCacheContainer.isPresent()) {
+            LOG.debug("Transaction Cache Container is not presented!");
+            return false;
+        }
+        if (txCacheContainer.get().getNodeId() == null) {
+            LOG.debug("Transaction Cache Container {} don't have Node ID!", txCacheContainer.get().getId());
+            return false;
+        }
+        if (txCacheContainer.get().getNotifications() == null) {
+            LOG.debug("Transaction Cache Container {} for {} node don't have Notifications!",
+                    txCacheContainer.get().getId(), txCacheContainer.get().getNodeId());
+            return false;
+        }
+        return true;
+    }
+
     /**
      * Wrapping Future object call to {@link org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager}
      * isExpectedStatistics with 10sec TimeOut.
index f351132f7f816bcc98151e9e6cb991c7ea911310..5185ef0b821c51374ac1299e4b6bd295215e8a4f 100644 (file)
@@ -16,6 +16,7 @@ import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
 import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
 import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
 import org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager.TransactionCacheContainer;
 import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
 import org.opendaylight.controller.md.statistics.manager.StatisticsManager.StatDataStoreOperation;
@@ -29,7 +30,9 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupDescStats;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupDescStatsBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupFeatures;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupFeaturesBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupStatistics;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupStatisticsBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.OpendaylightGroupStatisticsListener;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.group.desc.GroupDescBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.group.features.GroupFeatures;
@@ -51,6 +54,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
 
 /**
  * statistics-manager
@@ -90,31 +94,48 @@ public class StatListenCommitGroup extends StatAbstractListenCommit<Group, Opend
         final TransactionId transId = notification.getTransactionId();
         final NodeId nodeId = notification.getId();
         if ( ! isExpectedStatistics(transId, nodeId)) {
-            LOG.debug("STAT-MANAGER - GroupDescStatsUpdated: unregistred notification detect TransactionId {}", transId);
+            LOG.debug("Unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
             return;
         }
-        final List<GroupDescStats> groupStats = notification.getGroupDescStats() != null
-                ? new ArrayList<>(notification.getGroupDescStats()) : new ArrayList<GroupDescStats>(10);
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if (txContainer.isPresent()) {
-            final List<? extends TransactionAware> cacheNotifs =
-                    txContainer.get().getNotifications();
-            for (final TransactionAware notif : cacheNotifs) {
-                if (notif instanceof GroupDescStatsUpdated) {
-                    groupStats.addAll(((GroupDescStatsUpdated) notif).getGroupDescStats());
-                }
-            }
-        }
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
-                .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
-                statGroupDescCommit(groupStats, nodeIdent, tx);
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+                /* Validate exist FlowCapableNode */
+                final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+                Optional<FlowCapableNode> fNode = Optional.absent();
+                try {
+                    fNode = tx.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
+                }
+                catch (final ReadFailedException e) {
+                    LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+                }
+                if ( ! fNode.isPresent()) {
+                    return;
+                }
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
+                }
+                /* Prepare List actual Groups and not updated Groups will be removed */
+                final List<Group> existGroups = fNode.get().getGroup() != null
+                        ? fNode.get().getGroup() : Collections.<Group> emptyList();
+                final List<GroupKey> existGroupKeys = new ArrayList<>();
+                for (final Group group : existGroups) {
+                    existGroupKeys.add(group.getKey());
+                }
+                /* GroupDesc processing */
+                statGroupDescCommit(txContainer, tx, fNodeIdent, existGroupKeys);
+                /* Delete all not presented Group Nodes */
+                deleteAllNotPresentNode(fNodeIdent, tx, Collections.unmodifiableList(existGroupKeys));
                 /* Notification for continue collecting statistics */
                 notifyToCollectNextStatistics(nodeIdent);
             }
@@ -123,39 +144,53 @@ public class StatListenCommitGroup extends StatAbstractListenCommit<Group, Opend
 
     @Override
     public void onGroupFeaturesUpdated(final GroupFeaturesUpdated notification) {
+        Preconditions.checkNotNull(notification);
         final TransactionId transId = notification.getTransactionId();
         final NodeId nodeId = notification.getId();
         if ( ! isExpectedStatistics(transId, nodeId)) {
-            LOG.debug("STAT-MANAGER - MeterFeaturesUpdated: unregistred notification detect TransactionId {}", transId);
+            LOG.debug("Unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
-            return;
-        }
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if ( ! txContainer.isPresent()) {
             return;
         }
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
-                .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
 
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
-                notifyToCollectNextStatistics(nodeIdent);
-                final GroupFeatures stats = new GroupFeaturesBuilder(notification).build();
-                final InstanceIdentifier<GroupFeatures> groupFeatureIdent = nodeIdent
-                        .augmentation(NodeGroupFeatures.class).child(GroupFeatures.class);
-                Optional<Node> node = Optional.absent();
-                try {
-                    node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
-                }
-                catch (final ReadFailedException e) {
-                    LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
                 }
-                if (node.isPresent()) {
-                    tx.put(LogicalDatastoreType.OPERATIONAL, groupFeatureIdent, stats);
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+                final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+                for (final TransactionAware notif : cacheNotifs) {
+                    if ( ! (notif instanceof GroupFeaturesUpdated)) {
+                        break;
+                    }
+                    final GroupFeatures stats = new GroupFeaturesBuilder((GroupFeaturesUpdated)notif).build();
+                    final InstanceIdentifier<NodeGroupFeatures> nodeGroupFeatureIdent =
+                            nodeIdent.augmentation(NodeGroupFeatures.class);
+                    final InstanceIdentifier<GroupFeatures> groupFeatureIdent = nodeGroupFeatureIdent
+                            .child(GroupFeatures.class);
+                    Optional<Node> node = Optional.absent();
+                    try {
+                        node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+                    }
+                    catch (final ReadFailedException e) {
+                        LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                    }
+                    if (node.isPresent()) {
+                        tx.merge(LogicalDatastoreType.OPERATIONAL, nodeGroupFeatureIdent, new NodeGroupFeaturesBuilder().build(), true);
+                        tx.put(LogicalDatastoreType.OPERATIONAL, groupFeatureIdent, stats);
+                        manager.registerAdditionalNodeFeature(nodeIdent, StatCapabTypes.GROUP_STATS);
+                    }
                 }
             }
         });
@@ -163,123 +198,140 @@ public class StatListenCommitGroup extends StatAbstractListenCommit<Group, Opend
 
     @Override
     public void onGroupStatisticsUpdated(final GroupStatisticsUpdated notification) {
+        Preconditions.checkNotNull(notification);
         final TransactionId transId = notification.getTransactionId();
         final NodeId nodeId = notification.getId();
         if ( ! isExpectedStatistics(transId, nodeId)) {
             LOG.debug("STAT-MANAGER - GroupStatisticsUpdated: unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
             return;
         }
-        final List<GroupStats> groupStats = notification.getGroupStats() != null
-                ? new ArrayList<>(notification.getGroupStats()) : new ArrayList<GroupStats>(10);
-        Optional<Group> notifGroup = Optional.absent();
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if (txContainer.isPresent()) {
-            final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
-            if (inputObj.isPresent() && inputObj.get() instanceof Group) {
-                notifGroup = Optional.<Group> of((Group)inputObj.get());
-            }
-            final List<? extends TransactionAware> cacheNotifs =
-                    txContainer.get().getNotifications();
-            for (final TransactionAware notif : cacheNotifs) {
-                if (notif instanceof GroupStatisticsUpdated) {
-                    groupStats.addAll(((GroupStatisticsUpdated) notif).getGroupStats());
-                }
-            }
-        }
-        final Optional<Group> group = notifGroup;
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
-                .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
-                /* Notification for continue collecting statistics */
-                if ( ! group.isPresent()) {
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+                /* Node exist check */
+                Optional<Node> node = Optional.absent();
+                try {
+                    node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+                }
+                catch (final ReadFailedException e) {
+                    LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                }
+                if ( ! node.isPresent()) {
+                    return;
+                }
+
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
+                }
+                final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+
+                Optional<Group> notifGroup = Optional.absent();
+                final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
+                if (inputObj.isPresent() && inputObj.get() instanceof Group) {
+                    notifGroup = Optional.<Group> of((Group)inputObj.get());
+                }
+                for (final TransactionAware notif : cacheNotifs) {
+                    if ( ! (notif instanceof GroupStatisticsUpdated)) {
+                        break;
+                    }
+                    statGroupCommit(((GroupStatisticsUpdated) notif).getGroupStats(), nodeIdent, tx);
+                }
+                if (notifGroup.isPresent()) {
                     notifyToCollectNextStatistics(nodeIdent);
                 }
-                statGroupCommit(groupStats, nodeIdent, group, tx);
             }
         });
     }
 
     private void statGroupCommit(final List<GroupStats> groupStats, final InstanceIdentifier<Node> nodeIdent,
-            final Optional<Group> group, final ReadWriteTransaction trans) {
+            final ReadWriteTransaction tx) {
+
+        Preconditions.checkNotNull(groupStats);
+        Preconditions.checkNotNull(nodeIdent);
+        Preconditions.checkNotNull(tx);
+
         final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
 
-        for (final GroupStats groupStat : groupStats) {
-            final GroupStatistics stats = new GroupStatisticsBuilder(groupStat).build();
+        for (final GroupStats gStat : groupStats) {
+            final GroupStatistics stats = new GroupStatisticsBuilder(gStat).build();
 
-            final GroupKey groupKey = new GroupKey(groupStat.getGroupId());
-            final InstanceIdentifier<GroupStatistics> gsIdent = fNodeIdent
-                    .child(Group.class,groupKey).augmentation(NodeGroupStatistics.class)
-                    .child(GroupStatistics.class);
+            final InstanceIdentifier<Group> groupIdent = fNodeIdent.child(Group.class, new GroupKey(gStat.getGroupId()));
+            final InstanceIdentifier<NodeGroupStatistics> nGroupStatIdent =groupIdent
+                    .augmentation(NodeGroupStatistics.class);
+            final InstanceIdentifier<GroupStatistics> gsIdent = nGroupStatIdent.child(GroupStatistics.class);
             /* Statistics Writing */
-            Optional<FlowCapableNode> fNode = Optional.absent();
+            Optional<Group> group = Optional.absent();
             try {
-                fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
+                group = tx.read(LogicalDatastoreType.OPERATIONAL, groupIdent).checkedGet();
             }
             catch (final ReadFailedException e) {
-                LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+                LOG.debug("Read Operational/DS for Group node fail! {}", groupIdent, e);
             }
-            if (fNode.isPresent()) {
-                trans.put(LogicalDatastoreType.OPERATIONAL, gsIdent, stats);
+            if (group.isPresent()) {
+                tx.merge(LogicalDatastoreType.OPERATIONAL, nGroupStatIdent, new NodeGroupStatisticsBuilder().build(), true);
+                tx.put(LogicalDatastoreType.OPERATIONAL, gsIdent, stats);
             }
         }
     }
 
-    private void statGroupDescCommit(final List<GroupDescStats> groupStats, final InstanceIdentifier<Node> nodeIdent,
-            final ReadWriteTransaction trans) {
-        final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+    private void statGroupDescCommit(final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+            final InstanceIdentifier<FlowCapableNode> fNodeIdent, final List<GroupKey> existGroupKeys) {
 
-        final List<GroupKey> deviceGroupKeys = new ArrayList<>();
+        Preconditions.checkNotNull(existGroupKeys);
+        Preconditions.checkNotNull(txContainer);
+        Preconditions.checkNotNull(fNodeIdent);
+        Preconditions.checkNotNull(tx);
 
-        for (final GroupDescStats group : groupStats) {
-            if (group.getGroupId() != null) {
-                final GroupBuilder groupBuilder = new GroupBuilder(group);
-                final GroupKey groupKey = new GroupKey(group.getGroupId());
-                final InstanceIdentifier<Group> groupRef = fNodeIdent.child(Group.class,groupKey);
+        final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+        for (final TransactionAware notif : cacheNotifs) {
+            if ( ! (notif instanceof GroupDescStatsUpdated)) {
+                break;
+            }
+            final List<GroupDescStats> groupStats = ((GroupDescStatsUpdated) notif).getGroupDescStats();
+            if (groupStats == null) {
+                break;
+            }
+            for (final GroupDescStats group : groupStats) {
+                if (group.getGroupId() != null) {
+                    final GroupBuilder groupBuilder = new GroupBuilder(group);
+                    final GroupKey groupKey = new GroupKey(group.getGroupId());
+                    final InstanceIdentifier<Group> groupRef = fNodeIdent.child(Group.class,groupKey);
 
-                final NodeGroupDescStatsBuilder groupDesc= new NodeGroupDescStatsBuilder();
-                groupDesc.setGroupDesc(new GroupDescBuilder(group).build());
-                //Update augmented data
-                groupBuilder.addAugmentation(NodeGroupDescStats.class, groupDesc.build());
-                deviceGroupKeys.add(groupKey);
-                Optional<FlowCapableNode> hashIdUpd = Optional.absent();
-                try {
-                    hashIdUpd = trans.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
-                }
-                catch (final ReadFailedException e) {
-                    LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
-                }
-                if (hashIdUpd.isPresent()) {
-                    trans.put(LogicalDatastoreType.OPERATIONAL, groupRef, groupBuilder.build());
+                    final NodeGroupDescStatsBuilder groupDesc= new NodeGroupDescStatsBuilder();
+                    groupDesc.setGroupDesc(new GroupDescBuilder(group).build());
+                    //Update augmented data
+                    groupBuilder.addAugmentation(NodeGroupDescStats.class, groupDesc.build());
+                    existGroupKeys.remove(groupKey);
+                    tx.put(LogicalDatastoreType.OPERATIONAL, groupRef, groupBuilder.build());
                 }
             }
         }
-        /* Delete all not presented Group Nodes */
-        deleteAllNotPresentNode(fNodeIdent, trans, deviceGroupKeys);
     }
 
     private void deleteAllNotPresentNode(final InstanceIdentifier<FlowCapableNode> fNodeIdent,
             final ReadWriteTransaction trans, final List<GroupKey> deviceGroupKeys) {
 
-        final Optional<FlowCapableNode> fNode = readLatestConfiguration(fNodeIdent);
-        if ( ! fNode.isPresent()) {
-            LOG.trace("Read Operational/DS for FlowCapableNode fail! Node {} doesn't exist.", fNodeIdent);
+        Preconditions.checkNotNull(fNodeIdent);
+        Preconditions.checkNotNull(trans);
+
+        if (deviceGroupKeys == null) {
             return;
         }
-        final List<Group> existGroups = fNode.get().getGroup() != null
-                ? fNode.get().getGroup() : Collections.<Group> emptyList();
-        /* Add all existed groups paths - no updated paths has to be removed */
-        for (final Group group : existGroups) {
-            if (deviceGroupKeys.remove(group.getKey())) {
-                break; // group still exist on device
-            }
-            LOG.trace("Group {} has to removed.", group);
-            final InstanceIdentifier<Group> delGroupIdent = fNodeIdent.child(Group.class, group.getKey());
+
+        for (final GroupKey key : deviceGroupKeys) {
+            final InstanceIdentifier<Group> delGroupIdent = fNodeIdent.child(Group.class, key);
+            LOG.trace("Group {} has to removed.", key);
             Optional<Group> delGroup = Optional.absent();
             try {
                 delGroup = trans.read(LogicalDatastoreType.OPERATIONAL, delGroupIdent).checkedGet();
index 9c9de59a6ad8f53b562add5a7c8255cc2a54da19..d6988a6f2b8dac73197afc80b082fab023e4c9a7 100644 (file)
@@ -16,6 +16,7 @@ import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
 import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
 import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
 import org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager.TransactionCacheContainer;
 import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
 import org.opendaylight.controller.md.statistics.manager.StatisticsManager.StatDataStoreOperation;
@@ -36,7 +37,9 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterConfigStats;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterConfigStatsBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeatures;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeaturesBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterStatistics;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterStatisticsBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.OpendaylightMeterStatisticsListener;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.MeterFeatures;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.MeterFeaturesBuilder;
@@ -45,11 +48,13 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.meter.MeterStatisticsBuilder;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.meter.config.stats.reply.MeterConfigStats;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.meter.statistics.reply.MeterStats;
+import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
 
 /**
  * statistics-manager
@@ -90,68 +95,102 @@ public class StatListenCommitMeter extends StatAbstractListenCommit<Meter, Opend
             LOG.debug("STAT-MANAGER - MeterConfigStatsUpdated: unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
             return;
         }
-        final List<MeterConfigStats> meterConfStat = notification.getMeterConfigStats() != null
-                ? new ArrayList<>(notification.getMeterConfigStats()) : new ArrayList<MeterConfigStats>(10);
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if (txContainer.isPresent()) {
-            final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
-            for (final TransactionAware notif : cacheNotifs) {
-                if (notif instanceof MeterConfigStatsUpdated) {
-                    meterConfStat.addAll(((MeterConfigStatsUpdated) notif).getMeterConfigStats());
-                }
-            }
-        }
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+                /* Validate exist FlowCapableNode */
+                final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+                Optional<FlowCapableNode> fNode = Optional.absent();
+                try {
+                    fNode = tx.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
+                }
+                catch (final ReadFailedException e) {
+                    LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+                }
+                if ( ! fNode.isPresent()) {
+                    return;
+                }
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
+                }
+                /* Prepare List actual Meters and not updated Meters will be removed */
+                final List<Meter> existMeters = fNode.get().getMeter() != null
+                        ? fNode.get().getMeter() : Collections.<Meter> emptyList();
+                final List<MeterKey> existMeterKeys = new ArrayList<>();
+                for (final Meter meter : existMeters) {
+                    existMeterKeys.add(meter.getKey());
+                }
+                /* MeterConfig processing */
+                comitConfMeterStats(txContainer, tx, fNodeIdent, existMeterKeys);
+                /* Delete all not presented Meter Nodes */
+                deleteAllNotPresentedNodes(fNodeIdent, tx, Collections.unmodifiableList(existMeterKeys));
                 /* Notification for continue collecting statistics */
                 notifyToCollectNextStatistics(nodeIdent);
-                comitConfMeterStats(meterConfStat, nodeIdent, tx);
             }
         });
     }
 
     @Override
     public void onMeterFeaturesUpdated(final MeterFeaturesUpdated notification) {
+        Preconditions.checkNotNull(notification);
         final TransactionId transId = notification.getTransactionId();
         final NodeId nodeId = notification.getId();
         if ( ! isExpectedStatistics(transId, nodeId)) {
             LOG.debug("STAT-MANAGER - MeterFeaturesUpdated: unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
-            return;
-        }
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if ( ! txContainer.isPresent()) {
             return;
         }
-        final MeterFeatures stats = new MeterFeaturesBuilder(notification).build();
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
-                .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
-        final InstanceIdentifier<MeterFeatures> meterFeatureIdent = nodeIdent
-                .augmentation(NodeMeterFeatures.class).child(MeterFeatures.class);
 
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
-                /* Notification for continue collecting statistics */
-                notifyToCollectNextStatistics(nodeIdent);
-                Optional<Node> node = Optional.absent();
-                try {
-                    node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
-                }
-                catch (final ReadFailedException e) {
-                    LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
                 }
-                if (node.isPresent()) {
-                    tx.put(LogicalDatastoreType.OPERATIONAL, meterFeatureIdent, stats);
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+                final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+                for (final TransactionAware notif : cacheNotifs) {
+                    if ( ! (notif instanceof MeterFeaturesUpdated)) {
+                        break;
+                    }
+                    final MeterFeatures stats = new MeterFeaturesBuilder((MeterFeaturesUpdated)notif).build();
+                    final InstanceIdentifier<NodeMeterFeatures> nodeMeterFeatureIdent =
+                            nodeIdent.augmentation(NodeMeterFeatures.class);
+                    final InstanceIdentifier<MeterFeatures> meterFeatureIdent = nodeMeterFeatureIdent
+                            .child(MeterFeatures.class);
+                    Optional<Node> node = Optional.absent();
+                    try {
+                        node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+                    }
+                    catch (final ReadFailedException e) {
+                        LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                    }
+                    if (node.isPresent()) {
+                        tx.merge(LogicalDatastoreType.OPERATIONAL, nodeMeterFeatureIdent, new NodeMeterFeaturesBuilder().build(), true);
+                        tx.put(LogicalDatastoreType.OPERATIONAL, meterFeatureIdent, stats);
+                        manager.registerAdditionalNodeFeature(nodeIdent, StatCapabTypes.METER_STATS);
+                    }
                 }
             }
         });
@@ -159,123 +198,149 @@ public class StatListenCommitMeter extends StatAbstractListenCommit<Meter, Opend
 
     @Override
     public void onMeterStatisticsUpdated(final MeterStatisticsUpdated notification) {
+        Preconditions.checkNotNull(notification);
         final TransactionId transId = notification.getTransactionId();
         final NodeId nodeId = notification.getId();
         if ( ! isExpectedStatistics(transId, nodeId)) {
             LOG.debug("STAT-MANAGER - MeterStatisticsUpdated: unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
             return;
         }
-        final List<MeterStats> meterStat = notification.getMeterStats() != null
-                ? new ArrayList<>(notification.getMeterStats()) : new ArrayList<MeterStats>(10);
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if (txContainer.isPresent()) {
-            final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
-            for (final TransactionAware notif : cacheNotifs) {
-                if (notif instanceof MeterConfigStatsUpdated) {
-                    meterStat.addAll(((MeterStatisticsUpdated) notif).getMeterStats());
-                }
-            }
-        }
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
             public void applyOperation(final ReadWriteTransaction tx) {
-                statMeterCommit(meterStat, nodeIdent, tx);
-                /* Notification for continue collecting statistics */
-                notifyToCollectNextStatistics(nodeIdent);
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+                        .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+                /* Node exist check */
+                Optional<Node> node = Optional.absent();
+                try {
+                    node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+                }
+                catch (final ReadFailedException e) {
+                    LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                }
+                if ( ! node.isPresent()) {
+                    return;
+                }
+
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
+                }
+                final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+
+                Optional<Meter> notifMeter = Optional.absent();
+                final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
+                if (inputObj.isPresent() && inputObj.get() instanceof Meter) {
+                    notifMeter = Optional.<Meter> of((Meter)inputObj.get());
+                }
+                for (final TransactionAware notif : cacheNotifs) {
+                    if ( ! (notif instanceof MeterStatisticsUpdated)) {
+                        break;
+                    }
+                    statMeterCommit(((MeterStatisticsUpdated) notif).getMeterStats(), nodeIdent, tx);
+                }
+                if (notifMeter.isPresent()) {
+                    notifyToCollectNextStatistics(nodeIdent);
+                }
             }
         });
     }
 
     private void statMeterCommit(final List<MeterStats> meterStats,
-            final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+            final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction tx) {
+
+        Preconditions.checkNotNull(meterStats);
+        Preconditions.checkNotNull(nodeIdent);
+        Preconditions.checkNotNull(tx);
 
         final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+
         for (final MeterStats mStat : meterStats) {
             final MeterStatistics stats = new MeterStatisticsBuilder(mStat).build();
 
-            final MeterKey mKey = new MeterKey(mStat.getMeterId());
-            final InstanceIdentifier<MeterStatistics> msIdent = fNodeIdent
-                    .child(Meter.class, mKey).augmentation(NodeMeterStatistics.class)
-                    .child(MeterStatistics.class);
+            final InstanceIdentifier<Meter> meterIdent = fNodeIdent.child(Meter.class, new MeterKey(mStat.getMeterId()));
+            final InstanceIdentifier<NodeMeterStatistics> nodeMeterStatIdent = meterIdent
+                    .augmentation(NodeMeterStatistics.class);
+            final InstanceIdentifier<MeterStatistics> msIdent = nodeMeterStatIdent.child(MeterStatistics.class);
             /* Meter Statistics commit */
-            Optional<FlowCapableNode> fNode = Optional.absent();
+            Optional<Meter> meter = Optional.absent();
             try {
-                fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
+                meter = tx.read(LogicalDatastoreType.OPERATIONAL, meterIdent).checkedGet();
             }
             catch (final ReadFailedException e) {
                 LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
             }
-            if (fNode.isPresent()) {
-                trans.put(LogicalDatastoreType.OPERATIONAL, msIdent, stats);
+            if (meter.isPresent()) {
+                tx.merge(LogicalDatastoreType.OPERATIONAL, nodeMeterStatIdent, new NodeMeterStatisticsBuilder().build(), true);
+                tx.put(LogicalDatastoreType.OPERATIONAL, msIdent, stats);
             }
         }
     }
 
-    private void comitConfMeterStats(final List<MeterConfigStats> meterConfStat,
-            final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+    private void comitConfMeterStats(final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+            final InstanceIdentifier<FlowCapableNode> fNodeIdent, final List<MeterKey> existMeterKeys) {
 
-        final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
-        final List<MeterKey> deviceMeterKeys = new ArrayList<>();
-
-        for (final MeterConfigStats meterConf : meterConfStat) {
-            final MeterBuilder meterBuilder = new MeterBuilder(meterConf);
-            if (meterConf.getMeterId() != null) {
-                final MeterKey meterKey = new MeterKey(meterConf.getMeterId());
-                meterBuilder.setKey(meterKey);
-                final InstanceIdentifier<Meter> meterRef = nodeIdent
-                        .augmentation(FlowCapableNode.class).child(Meter.class,meterKey);
-                final NodeMeterConfigStatsBuilder meterConfig = new NodeMeterConfigStatsBuilder();
-                meterConfig.setMeterConfigStats(new MeterConfigStatsBuilder(meterConf).build());
-                //Update augmented data
-                meterBuilder.addAugmentation(NodeMeterConfigStats.class, meterConfig.build());
-                deviceMeterKeys.add(meterKey);
-                Optional<FlowCapableNode> fNode = Optional.absent();
-                try {
-                    fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
-                }
-                catch (final ReadFailedException e) {
-                    LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
-                }
-                if (fNode.isPresent()) {
-                    trans.put(LogicalDatastoreType.OPERATIONAL, meterRef, meterBuilder.build());
+        Preconditions.checkNotNull(existMeterKeys);
+        Preconditions.checkNotNull(txContainer);
+        Preconditions.checkNotNull(fNodeIdent);
+        Preconditions.checkNotNull(tx);
+
+        final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+        for (final TransactionAware notif : cacheNotifs) {
+            if ( ! (notif instanceof MeterConfigStatsUpdated)) {
+                break;
+            }
+            final List<MeterConfigStats> meterStats = ((MeterConfigStatsUpdated) notif).getMeterConfigStats();
+            if (meterStats == null) {
+                break;
+            }
+            for (final MeterConfigStats meterStat : meterStats) {
+                if (meterStat.getMeterId() != null) {
+                    final MeterBuilder meterBuilder = new MeterBuilder(meterStat);
+                    final MeterKey meterKey = new MeterKey(meterStat.getMeterId());
+                    final InstanceIdentifier<Meter> meterRef = fNodeIdent.child(Meter.class, meterKey);
+
+                    final NodeMeterConfigStatsBuilder meterConfig = new NodeMeterConfigStatsBuilder();
+                    meterConfig.setMeterConfigStats(new MeterConfigStatsBuilder(meterStat).build());
+                    //Update augmented data
+                    meterBuilder.addAugmentation(NodeMeterConfigStats.class, meterConfig.build());
+                    existMeterKeys.remove(meterKey);
+                    tx.put(LogicalDatastoreType.OPERATIONAL, meterRef, meterBuilder.build());
                 }
             }
         }
-        /* Delete all not presented Meter Nodes */
-        deleteAllNotPresentedNodes(fNodeIdent, trans, deviceMeterKeys);
     }
 
     private void deleteAllNotPresentedNodes(final InstanceIdentifier<FlowCapableNode> fNodeIdent,
-            final ReadWriteTransaction trans, final List<MeterKey> deviceMeterKeys) {
-        /* Delete all not presented meters */
-        final Optional<FlowCapableNode> fNode = readLatestConfiguration(fNodeIdent);
+            final ReadWriteTransaction tx, final List<MeterKey> deviceMeterKeys) {
+
+        Preconditions.checkNotNull(fNodeIdent);
+        Preconditions.checkNotNull(tx);
 
-        if ( ! fNode.isPresent()) {
-            LOG.trace("Read Operational/DS for FlowCapableNode fail! Node {} doesn't exist.", fNodeIdent);
+        if (deviceMeterKeys == null) {
             return;
         }
-        final List<Meter> existMeters = fNode.get().getMeter() != null
-                ? fNode.get().getMeter() : Collections.<Meter> emptyList();
-        /* Add all existed groups paths - no updated paths has to be removed */
-        for (final Meter meter : existMeters) {
-            if (deviceMeterKeys.remove(meter.getKey())) {
-                break; // Meter still exist on device
-            }
-            final InstanceIdentifier<Meter> delMeterIdent = fNodeIdent.child(Meter.class, meter.getKey());
+
+        for (final MeterKey key : deviceMeterKeys) {
+            final InstanceIdentifier<Meter> delMeterIdent = fNodeIdent.child(Meter.class, key);
+            LOG.trace("Meter {} has to removed.", key);
             Optional<Meter> delMeter = Optional.absent();
             try {
-                delMeter = trans.read(LogicalDatastoreType.OPERATIONAL, delMeterIdent).checkedGet();
+                delMeter = tx.read(LogicalDatastoreType.OPERATIONAL, delMeterIdent).checkedGet();
             }
             catch (final ReadFailedException e) {
                 // NOOP - probably another transaction delete that node
             }
             if (delMeter.isPresent()) {
-                trans.delete(LogicalDatastoreType.OPERATIONAL, delMeterIdent);
+                tx.delete(LogicalDatastoreType.OPERATIONAL, delMeterIdent);
             }
         }
     }
index e336f01874e25f48b2a13d1221b06147bc7c9df0..07e167d1e481aa427e53ba869182e10472fc28df 100644 (file)
@@ -8,8 +8,11 @@
 
 package org.opendaylight.controller.md.statistics.manager.impl;
 
-import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 
 import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
@@ -42,6 +45,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
 
 /**
  * statistics-manager
@@ -84,65 +88,120 @@ public class StatListenCommitQueue extends StatAbstractListenCommit<Queue, Opend
             LOG.debug("STAT-MANAGER - QueueStatisticsUpdate: unregistred notification detect TransactionId {}", transId);
             return;
         }
+        manager.getRpcMsgManager().addNotification(notification, nodeId);
         if (notification.isMoreReplies()) {
-            manager.getRpcMsgManager().addNotification(notification, nodeId);
             return;
         }
-        final List<QueueIdAndStatisticsMap> queueStats = notification.getQueueIdAndStatisticsMap() != null
-                ? new ArrayList<>(notification.getQueueIdAndStatisticsMap()) : new ArrayList<QueueIdAndStatisticsMap>(10);
-        final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
-        if (txContainer.isPresent()) {
-            final List<? extends TransactionAware> cachedNotifs =
-                    txContainer.get().getNotifications();
-            for (final TransactionAware notif : cachedNotifs) {
-                if (notif instanceof QueueStatisticsUpdate) {
-                    queueStats.addAll(((QueueStatisticsUpdate) notif).getQueueIdAndStatisticsMap());
-                }
-            }
-        }
-        final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class)
-                .child(Node.class, new NodeKey(nodeId));
-        /* Queue statistics are small size and we are not able to change for OF cross controller
-         * - don't need to make are atomic */
+
+        /* Don't block RPC Notification thread */
         manager.enqueue(new StatDataStoreOperation() {
             @Override
-            public void applyOperation(final ReadWriteTransaction trans) {
-                /* Notification for continue */
+            public void applyOperation(final ReadWriteTransaction tx) {
+
+                final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class)
+                        .child(Node.class, new NodeKey(nodeId));
+
+                /* Validate exist Node */
+                Optional<Node> fNode = Optional.absent();
+                try {
+                    fNode = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+                }
+                catch (final ReadFailedException e) {
+                    LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+                }
+                if ( ! fNode.isPresent()) {
+                    LOG.trace("Read Operational/DS for Node fail! Node {} doesn't exist.", nodeIdent);
+                    return;
+                }
+
+                /* Get and Validate TransactionCacheContainer */
+                final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+                if ( ! isTransactionCacheContainerValid(txContainer)) {
+                    return;
+                }
+                /* Prepare List actual Queues and not updated Queues will be removed */
+                final List<NodeConnector> existConnectors = fNode.get().getNodeConnector() != null
+                        ? fNode.get().getNodeConnector() : Collections.<NodeConnector> emptyList();
+                final Map<QueueKey, NodeConnectorKey> existQueueKeys = new HashMap<>();
+                for (final NodeConnector connect : existConnectors) {
+                    final List<Queue> listQueues = connect.getAugmentation(FlowCapableNodeConnector.class).getQueue();
+                    if (listQueues != null) {
+                        for (final Queue queue : listQueues) {
+                            existQueueKeys.put(queue.getKey(), connect.getKey());
+                        }
+                    }
+                }
+                /* Queue processing */
+                statQueueCommit(txContainer, tx, nodeIdent, existQueueKeys);
+                /* Delete all not presented Group Nodes */
+                deleteAllNotPresentedNodes(nodeIdent, tx, Collections.unmodifiableMap(existQueueKeys));
+                /* Notification for continue collecting statistics */
                 notifyToCollectNextStatistics(nodeIdent);
-                statQueueCommit(queueStats, nodeIdent, trans);
             }
         });
     }
 
-    private void statQueueCommit(final List<QueueIdAndStatisticsMap> queueStats,
-            final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+    private void statQueueCommit(
+            final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+            final InstanceIdentifier<Node> nodeIdent, final Map<QueueKey, NodeConnectorKey> existQueueKeys) {
 
-        /* check exist FlowCapableNode and write statistics */
-        Optional<Node> fNode = Optional.absent();
-        try {
-            fNode = trans.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
-        }
-        catch (final ReadFailedException e) {
-            LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
-            return;
+        Preconditions.checkNotNull(existQueueKeys);
+        Preconditions.checkNotNull(txContainer);
+        Preconditions.checkNotNull(nodeIdent);
+        Preconditions.checkNotNull(tx);
+
+        final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+        for (final TransactionAware notif : cacheNotifs) {
+            if ( ! (notif instanceof QueueStatisticsUpdate)) {
+                break;
+            }
+            final List<QueueIdAndStatisticsMap> queueStats = ((QueueStatisticsUpdate) notif).getQueueIdAndStatisticsMap();
+            if (queueStats == null) {
+                break;
+            }
+            for (final QueueIdAndStatisticsMap queueStat : queueStats) {
+                if (queueStat.getQueueId() != null) {
+                    final FlowCapableNodeConnectorQueueStatistics statChild =
+                            new FlowCapableNodeConnectorQueueStatisticsBuilder(queueStat).build();
+                    final FlowCapableNodeConnectorQueueStatisticsDataBuilder statBuild =
+                            new FlowCapableNodeConnectorQueueStatisticsDataBuilder();
+                    statBuild.setFlowCapableNodeConnectorQueueStatistics(statChild);
+                    final QueueKey qKey = new QueueKey(queueStat.getQueueId());
+                    final InstanceIdentifier<FlowCapableNodeConnectorQueueStatisticsData> queueStatIdent = nodeIdent
+                            .child(NodeConnector.class, new NodeConnectorKey(queueStat.getNodeConnectorId()))
+                            .augmentation(FlowCapableNodeConnector.class)
+                            .child(Queue.class, qKey).augmentation(FlowCapableNodeConnectorQueueStatisticsData.class);
+                    existQueueKeys.remove(qKey);
+                    tx.put(LogicalDatastoreType.OPERATIONAL, queueStatIdent, statBuild.build());
+                }
+            }
         }
-        if ( ! fNode.isPresent()) {
-            LOG.trace("Read Operational/DS for Node fail! Node {} doesn't exist.", nodeIdent);
+    }
+
+    private void deleteAllNotPresentedNodes(final InstanceIdentifier<Node> nodeIdent,
+            final ReadWriteTransaction tx, final Map<QueueKey, NodeConnectorKey> existQueueKeys) {
+
+        Preconditions.checkNotNull(nodeIdent);
+        Preconditions.checkNotNull(tx);
+
+        if (existQueueKeys == null) {
             return;
         }
 
-        for (final QueueIdAndStatisticsMap queueEntry : queueStats) {
-            final FlowCapableNodeConnectorQueueStatistics statChild =
-                    new FlowCapableNodeConnectorQueueStatisticsBuilder(queueEntry).build();
-            final FlowCapableNodeConnectorQueueStatisticsDataBuilder statBuild =
-                    new FlowCapableNodeConnectorQueueStatisticsDataBuilder();
-            statBuild.setFlowCapableNodeConnectorQueueStatistics(statChild);
-            final QueueKey qKey = new QueueKey(queueEntry.getQueueId());
-            final InstanceIdentifier<FlowCapableNodeConnectorQueueStatisticsData> queueStatIdent = nodeIdent
-                    .child(NodeConnector.class, new NodeConnectorKey(queueEntry.getNodeConnectorId()))
-                    .augmentation(FlowCapableNodeConnector.class)
-                    .child(Queue.class, qKey).augmentation(FlowCapableNodeConnectorQueueStatisticsData.class);
-            trans.put(LogicalDatastoreType.OPERATIONAL, queueStatIdent, statBuild.build());
+        for (final Entry<QueueKey, NodeConnectorKey> entry : existQueueKeys.entrySet()) {
+            final InstanceIdentifier<Queue> queueIdent = nodeIdent.child(NodeConnector.class, entry.getValue())
+                    .augmentation(FlowCapableNodeConnector.class).child(Queue.class, entry.getKey());
+            LOG.trace("Queue {} has to removed.", queueIdent);
+            Optional<Queue> delQueue = Optional.absent();
+            try {
+                delQueue = tx.read(LogicalDatastoreType.OPERATIONAL, queueIdent).checkedGet();
+            }
+            catch (final ReadFailedException e) {
+                // NOOP - probably another transaction delete that node
+            }
+            if (delQueue.isPresent()) {
+                tx.delete(LogicalDatastoreType.OPERATIONAL, queueIdent);
+            }
         }
     }
 }
index adb58709865d7310dc41aeaf759d7695c50be7e7..afb45e59f0a6c6effd7856e02bfbdc3e10a47116 100644 (file)
@@ -11,12 +11,14 @@ package org.opendaylight.controller.md.statistics.manager.impl;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Set;
 
 import org.opendaylight.controller.md.sal.binding.api.DataBroker;
 import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
 import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
 import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
 import org.opendaylight.controller.md.statistics.manager.StatNodeRegistration;
 import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
 import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
@@ -36,9 +38,10 @@ import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeCon
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRemoved;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeUpdated;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
 import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeatures;
 import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -60,7 +63,7 @@ import com.google.common.base.Preconditions;
  *
  * Created: Aug 28, 2014
  */
-public class StatNodeRegistrationImpl implements StatNodeRegistration {
+public class StatNodeRegistrationImpl implements StatNodeRegistration, DataChangeListener {
 
     private static final Logger LOG = LoggerFactory.getLogger(StatNodeRegistrationImpl.class);
 
@@ -74,6 +77,11 @@ public class StatNodeRegistrationImpl implements StatNodeRegistration {
         Preconditions.checkArgument(db != null, "DataBroker can not be null!");
         Preconditions.checkArgument(notificationService != null, "NotificationProviderService can not be null!");
         notifListenerRegistration = notificationService.registerNotificationListener(this);
+        /* Build Path */
+        final InstanceIdentifier<FlowCapableNode> flowNodeWildCardIdentifier = InstanceIdentifier.create(Nodes.class)
+                .child(Node.class).augmentation(FlowCapableNode.class);
+        listenerRegistration = db.registerDataChangeListener(LogicalDatastoreType.OPERATIONAL,
+                flowNodeWildCardIdentifier, StatNodeRegistrationImpl.this, DataChangeScope.BASE);
     }
 
     @Override
@@ -131,21 +139,6 @@ public class StatNodeRegistrationImpl implements StatNodeRegistration {
                 maxCapTables = data.getMaxTables();
 
                 final Optional<Short> maxTables = Optional.<Short> of(maxCapTables);
-
-                /* Meters management */
-                final InstanceIdentifier<NodeMeterFeatures> meterFeaturesIdent = nodeIdent.augmentation(NodeMeterFeatures.class);
-
-
-                Optional<NodeMeterFeatures> meterFeatures = Optional.absent();
-                try {
-                    meterFeatures = tx.read(LogicalDatastoreType.OPERATIONAL, meterFeaturesIdent).checkedGet();
-                }
-                catch (final ReadFailedException e) {
-                    LOG.warn("Read NodeMeterFeatures {} fail!", meterFeaturesIdent, e);
-                }
-                if (meterFeatures.isPresent()) {
-                    statCapabTypes.add(StatCapabTypes.METER_STATS);
-                }
                 manager.connectedNodeRegistration(nodeIdent,
                         Collections.unmodifiableList(statCapabTypes), maxTables.get());
             }
@@ -178,6 +171,7 @@ public class StatNodeRegistrationImpl implements StatNodeRegistration {
 
     @Override
     public void onNodeRemoved(final NodeRemoved notification) {
+        Preconditions.checkNotNull(notification);
         final NodeRef nodeRef = notification.getNodeRef();
         final InstanceIdentifier<?> nodeRefIdent = nodeRef.getValue();
         final InstanceIdentifier<Node> nodeIdent =
@@ -189,6 +183,7 @@ public class StatNodeRegistrationImpl implements StatNodeRegistration {
 
     @Override
     public void onNodeUpdated(final NodeUpdated notification) {
+        Preconditions.checkNotNull(notification);
         final FlowCapableNodeUpdated newFlowNode =
                 notification.getAugmentation(FlowCapableNodeUpdated.class);
         if (newFlowNode != null && newFlowNode.getSwitchFeatures() != null) {
@@ -203,5 +198,25 @@ public class StatNodeRegistrationImpl implements StatNodeRegistration {
             connectFlowCapableNode(swichFeaturesIdent, switchFeatures, nodeIdent);
         }
     }
+
+    @Override
+    public void onDataChanged(final AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+        Preconditions.checkNotNull(changeEvent,"Async ChangeEvent can not be null!");
+        /* All DataObjects for create */
+        final Set<InstanceIdentifier<?>>  createdData = changeEvent.getCreatedData() != null
+                ? changeEvent.getCreatedData().keySet() : Collections.<InstanceIdentifier<?>> emptySet();
+
+        for (final InstanceIdentifier<?> entryKey : createdData) {
+            final InstanceIdentifier<Node> nodeIdent = entryKey
+                    .firstIdentifierOf(Node.class);
+            if ( ! nodeIdent.isWildcarded()) {
+                final NodeRef nodeRef = new NodeRef(nodeIdent);
+                // FIXME: these calls is a job for handshake or for inventory manager
+                /* check Group and Meter future */
+                manager.getRpcMsgManager().getGroupFeaturesStat(nodeRef);
+                manager.getRpcMsgManager().getMeterFeaturesStat(nodeRef);
+            }
+        }
+    }
 }
 
index d008042e8d014e639be7c02f7f922fb847f9746a..ff1778e8aa9900c0512c6888d8580c92b4c75482 100644 (file)
@@ -1,5 +1,6 @@
 package org.opendaylight.controller.md.statistics.manager.impl;
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -80,6 +81,11 @@ public class StatPermCollectorImpl implements StatPermCollector {
         statNetCollectorServ.shutdown();
     }
 
+    @Override
+    public boolean hasActiveNodes() {
+        return ( ! statNodeHolder.isEmpty());
+    }
+
     @Override
     public boolean isProvidedFlowNodeActive(
             final InstanceIdentifier<Node> flowNode) {
@@ -89,9 +95,7 @@ public class StatPermCollectorImpl implements StatPermCollector {
     @Override
     public boolean connectedNodeRegistration(final InstanceIdentifier<Node> ident,
             final List<StatCapabTypes> statTypes, final Short nrOfSwitchTables) {
-        if (ident.isWildcarded()) {
-            LOG.warn("FlowCapableNode IstanceIdentifier {} registration can not be wildcarded!", ident);
-        } else {
+        if (isNodeIdentValidForUse(ident)) {
             if ( ! statNodeHolder.containsKey(ident)) {
                 synchronized (statNodeHolderLock) {
                     final boolean startStatCollecting = statNodeHolder.size() == 0;
@@ -119,9 +123,7 @@ public class StatPermCollectorImpl implements StatPermCollector {
 
     @Override
     public boolean disconnectedNodeUnregistration(final InstanceIdentifier<Node> ident) {
-        if (ident.isWildcarded()) {
-            LOG.warn("FlowCapableNode IstanceIdentifier {} unregistration can not be wildcarded!", ident);
-        } else {
+        if (isNodeIdentValidForUse(ident)) {
             if (statNodeHolder.containsKey(ident)) {
                 synchronized (statNodeHolderLock) {
                     if (statNodeHolder.containsKey(ident)) {
@@ -142,6 +144,33 @@ public class StatPermCollectorImpl implements StatPermCollector {
         return false;
     }
 
+    @Override
+    public boolean registerAdditionalNodeFeature(final InstanceIdentifier<Node> ident,
+            final StatCapabTypes statCapab) {
+        if (isNodeIdentValidForUse(ident)) {
+            if ( ! statNodeHolder.containsKey(ident)) {
+                return false;
+            }
+            final StatNodeInfoHolder statNode = statNodeHolder.get(ident);
+            if ( ! statNode.getStatMarkers().contains(statCapab)) {
+                synchronized (statNodeHolderLock) {
+                    if ( ! statNode.getStatMarkers().contains(statCapab)) {
+                        final List<StatCapabTypes> statCapabForEdit = new ArrayList<>(statNode.getStatMarkers());
+                        statCapabForEdit.add(statCapab);
+                        final StatNodeInfoHolder nodeInfoHolder = new StatNodeInfoHolder(statNode.getNodeRef(),
+                                Collections.unmodifiableList(statCapabForEdit), statNode.getMaxTables());
+
+                        final Map<InstanceIdentifier<Node>, StatNodeInfoHolder> statNodes =
+                                new HashMap<>(statNodeHolder);
+                        statNodes.put(ident, nodeInfoHolder);
+                        statNodeHolder = Collections.unmodifiableMap(statNodes);
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
     @Override
     public void collectNextStatistics() {
         if (wakeMe) {
@@ -238,8 +267,6 @@ public class StatPermCollectorImpl implements StatPermCollector {
                     break;
                 case GROUP_STATS:
                     LOG.trace("STAT-MANAGER-collecting GROUP-STATS for NodeRef {}", actualNodeRef);
-                    manager.getRpcMsgManager().getGroupFeaturesStat(actualNodeRef);
-                    waitingForNotification();
                     manager.getRpcMsgManager().getAllGroupsConfStats(actualNodeRef);
                     waitingForNotification();
                     manager.getRpcMsgManager().getAllGroupsStat(actualNodeRef);
@@ -247,8 +274,6 @@ public class StatPermCollectorImpl implements StatPermCollector {
                     break;
                 case METER_STATS:
                     LOG.trace("STAT-MANAGER-collecting METER-STATS for NodeRef {}", actualNodeRef);
-                    manager.getRpcMsgManager().getMeterFeaturesStat(actualNodeRef);
-                    waitingForNotification();
                     manager.getRpcMsgManager().getAllMeterConfigStat(actualNodeRef);
                     waitingForNotification();
                     manager.getRpcMsgManager().getAllMetersStat(actualNodeRef);
@@ -297,9 +322,16 @@ public class StatPermCollectorImpl implements StatPermCollector {
         }
     }
 
-    @Override
-    public boolean hasActiveNodes() {
-        return ( ! statNodeHolder.isEmpty());
+    private boolean isNodeIdentValidForUse(final InstanceIdentifier<Node> ident) {
+        if (ident == null) {
+            LOG.warn("FlowCapableNode InstanceIdentifier {} can not be null!");
+            return false;
+        }
+        if (ident.isWildcarded()) {
+            LOG.warn("FlowCapableNode InstanceIdentifier {} can not be wildcarded!", ident);
+            return false;
+        }
+        return true;
     }
 }
 
index e53f4941295f5a1891ad1e8dbdbf32ce2404d9b6..176e52708b0a9a7c8ce91710ad2631285904a283 100644 (file)
@@ -454,7 +454,6 @@ public class StatRpcMsgManagerImpl implements StatRpcMsgManager {
 
             @Override
             public Void call() throws Exception {
-                Preconditions.checkArgument(nodeRef != null, "NodeRef can not be null!");
                 final GetGroupDescriptionInputBuilder builder =
                         new GetGroupDescriptionInputBuilder();
                 builder.setNode(nodeRef);
index ca0e5b20ed8101206e685a64f39b3e0d96e5b7d9..396ff0e1f6c8700d16b8e07f491d33e1fcc63bcf 100644 (file)
@@ -300,7 +300,18 @@ public class StatisticsManagerImpl implements StatisticsManager, Runnable {
                return;
            }
        }
-       LOG.debug("Node {} has not removed.", nodeIdent);
+       LOG.debug("Node {} has not been removed.", nodeIdent);
+   }
+
+   @Override
+   public void registerAdditionalNodeFeature(final InstanceIdentifier<Node> nodeIdent,
+           final StatCapabTypes statCapab) {
+       for (final StatPermCollector collector : statCollectors) {
+           if (collector.registerAdditionalNodeFeature(nodeIdent, statCapab)) {
+               return;
+           }
+       }
+       LOG.debug("Node {} has not been extended for feature {}!", nodeIdent, statCapab);
    }
 
    /* Getter internal Statistic Manager Job Classes */
index 91385bab33e5102659e72c7f42a435f509b5c1fa..13cc973ba7127ccd16180dd0d0ff30c2a75acd07 100644 (file)
@@ -11,25 +11,24 @@ package org.opendaylight.controller.netconf.impl;
 import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
-import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
-import org.opendaylight.controller.netconf.mapping.api.Capability;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CapabilityProviderImpl implements CapabilityProvider {
     private final NetconfOperationServiceSnapshot netconfOperationServiceSnapshot;
     private final Set<String> capabilityURIs;
 
-    private static final Logger logger = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CapabilityProviderImpl.class);
 
     public CapabilityProviderImpl(NetconfOperationServiceSnapshot netconfOperationServiceSnapshot) {
         this.netconfOperationServiceSnapshot = netconfOperationServiceSnapshot;
@@ -49,7 +48,7 @@ public class CapabilityProviderImpl implements CapabilityProvider {
             for (Capability cap : caps) {
 
                 if(capabilityMap.containsKey(cap.getCapabilityUri())) {
-                    logger.debug("Duplicate capability {} from service {}", cap.getCapabilityUri(), netconfOperationService);
+                    LOG.debug("Duplicate capability {} from service {}", cap.getCapabilityUri(), netconfOperationService);
                 }
 
                 capabilityMap.put(cap.getCapabilityUri(), cap);
index 203fdf272584015f40f8bde0dd4e80b753eb51f0..ab37bac683829a95cc4e78ec24bcfb56c584faf1 100644 (file)
@@ -8,13 +8,7 @@
 
 package org.opendaylight.controller.netconf.impl;
 
-import org.opendaylight.controller.netconf.api.jmx.CommitJMXNotification;
-import org.opendaylight.controller.netconf.api.jmx.DefaultCommitOperationMXBean;
-import org.opendaylight.controller.netconf.api.jmx.NetconfJMXNotification;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Element;
-
+import java.util.Set;
 import javax.management.InstanceAlreadyExistsException;
 import javax.management.InstanceNotFoundException;
 import javax.management.MBeanRegistrationException;
@@ -22,12 +16,17 @@ import javax.management.MBeanServer;
 import javax.management.NotCompliantMBeanException;
 import javax.management.NotificationBroadcasterSupport;
 import javax.management.ObjectName;
-import java.util.Set;
+import org.opendaylight.controller.netconf.api.jmx.CommitJMXNotification;
+import org.opendaylight.controller.netconf.api.jmx.DefaultCommitOperationMXBean;
+import org.opendaylight.controller.netconf.api.jmx.NetconfJMXNotification;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Element;
 
 public class DefaultCommitNotificationProducer extends NotificationBroadcasterSupport implements
         DefaultCommitOperationMXBean, AutoCloseable {
 
-    private static final Logger logger = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
 
     private final MBeanServer mbeanServer;
 
@@ -35,7 +34,7 @@ public class DefaultCommitNotificationProducer extends NotificationBroadcasterSu
 
     public DefaultCommitNotificationProducer(MBeanServer mBeanServer) {
         this.mbeanServer = mBeanServer;
-        logger.debug("Registering to JMX under {}", on);
+        LOG.debug("Registering to JMX under {}", on);
         registerMBean(this, mbeanServer, on);
     }
 
@@ -49,7 +48,7 @@ public class DefaultCommitNotificationProducer extends NotificationBroadcasterSu
 
     public void sendCommitNotification(String message, Element cfgSnapshot, Set<String> capabilities) {
         CommitJMXNotification notif = NetconfJMXNotification.afterCommit(this, message, cfgSnapshot, capabilities);
-        logger.debug("Notification about commit {} sent", notif);
+        LOG.debug("Notification about commit {} sent", notif);
         sendNotification(notif);
     }
 
@@ -58,7 +57,7 @@ public class DefaultCommitNotificationProducer extends NotificationBroadcasterSu
         try {
             mbeanServer.unregisterMBean(on);
         } catch (InstanceNotFoundException | MBeanRegistrationException e) {
-            logger.warn("Ignoring exception while unregistering {} as {}", this, on, e);
+            LOG.warn("Ignoring exception while unregistering {} as {}", this, on, e);
         }
     }
 }
index 7d409092b9178bcd2816c96371108930f200fe3a..ca604a4f6531c4f03d878825ac5fde5cfd360aeb 100644 (file)
@@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
 
 public final class NetconfServerSession extends AbstractNetconfSession<NetconfServerSession, NetconfServerSessionListener> implements NetconfManagementSession {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfServerSession.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSession.class);
 
     private final NetconfHelloMessageAdditionalHeader header;
 
@@ -50,7 +50,7 @@ public final class NetconfServerSession extends AbstractNetconfSession<NetconfSe
             NetconfHelloMessageAdditionalHeader header) {
         super(sessionListener, channel, sessionId);
         this.header = header;
-        logger.debug("Session {} created", toString());
+        LOG.debug("Session {} created", toString());
     }
 
     @Override
@@ -111,9 +111,12 @@ public final class NetconfServerSession extends AbstractNetconfSession<NetconfSe
 
     private Class<? extends Transport> getTransportForString(String transport) {
         switch(transport) {
-        case "ssh" : return NetconfSsh.class;
-        case "tcp" : return NetconfTcp.class;
-        default: throw new IllegalArgumentException("Unknown transport type " + transport);
+        case "ssh" :
+            return NetconfSsh.class;
+        case "tcp" :
+            return NetconfTcp.class;
+        default:
+            throw new IllegalArgumentException("Unknown transport type " + transport);
         }
     }
 
index 4f2f65c1a00732137229fba7e1416d9c1db70904..b2b8c50029868d507971cf70d1f22af940861e0d 100644 (file)
@@ -11,7 +11,6 @@ package org.opendaylight.controller.netconf.impl;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
-
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.NetconfMessage;
 import org.opendaylight.controller.netconf.api.NetconfSessionListener;
@@ -31,7 +30,7 @@ import org.w3c.dom.Node;
 
 public class NetconfServerSessionListener implements NetconfSessionListener<NetconfServerSession> {
 
-    static final Logger logger = LoggerFactory.getLogger(NetconfServerSessionListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionListener.class);
     private final SessionMonitoringService monitoringService;
     private final NetconfOperationRouter operationRouter;
     private final AutoCloseable onSessionDownCloseable;
@@ -50,7 +49,7 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
 
     @Override
     public void onSessionDown(NetconfServerSession netconfNetconfServerSession, Exception cause) {
-        logger.debug("Session {} down, reason: {}", netconfNetconfServerSession, cause.getMessage());
+        LOG.debug("Session {} down, reason: {}", netconfNetconfServerSession, cause.getMessage());
         onDown(netconfNetconfServerSession);
     }
 
@@ -60,19 +59,19 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
         try {
             operationRouter.close();
         } catch (Exception closingEx) {
-            logger.debug("Ignoring exception while closing operationRouter", closingEx);
+            LOG.debug("Ignoring exception while closing operationRouter", closingEx);
         }
         try {
             onSessionDownCloseable.close();
         } catch(Exception ex){
-            logger.debug("Ignoring exception while closing onSessionDownCloseable", ex);
+            LOG.debug("Ignoring exception while closing onSessionDownCloseable", ex);
         }
     }
 
     @Override
     public void onSessionTerminated(NetconfServerSession netconfNetconfServerSession,
             NetconfTerminationReason netconfTerminationReason) {
-        logger.debug("Session {} terminated, reason: {}", netconfNetconfServerSession,
+        LOG.debug("Session {} terminated, reason: {}", netconfNetconfServerSession,
                 netconfTerminationReason.getErrorMessage());
         onDown(netconfNetconfServerSession);
     }
@@ -86,7 +85,7 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
             // schemas
             final NetconfMessage message = processDocument(netconfMessage,
                     session);
-            logger.debug("Responding with message {}", XmlUtil.toString(message.getDocument()));
+            LOG.debug("Responding with message {}", XmlUtil.toString(message.getDocument()));
             session.sendMessage(message);
 
             if (isCloseSession(netconfMessage)) {
@@ -95,11 +94,11 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
 
         } catch (final RuntimeException e) {
             // TODO: should send generic error or close session?
-            logger.error("Unexpected exception", e);
+            LOG.error("Unexpected exception", e);
             session.onIncommingRpcFail();
             throw new IllegalStateException("Unable to process incoming message " + netconfMessage, e);
         } catch (NetconfDocumentedException e) {
-            logger.trace("Error occurred while processing message",e);
+            LOG.trace("Error occurred while processing message",e);
             session.onOutgoingRpcError();
             session.onIncommingRpcFail();
             SendErrorExceptionUtil.sendErrorMessage(session, e, netconfMessage);
@@ -109,7 +108,7 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
     private void closeNetconfSession(NetconfServerSession session) {
         // destroy NetconfOperationService
         session.close();
-        logger.info("Session {} closed successfully", session.getSessionId());
+        LOG.info("Session {} closed successfully", session.getSessionId());
     }
 
 
@@ -148,7 +147,9 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
     }
 
     private void checkMessageId(Node rootNode) throws NetconfDocumentedException {
-            NamedNodeMap attributes = rootNode.getAttributes();
+
+        NamedNodeMap attributes = rootNode.getAttributes();
+
         if(attributes.getNamedItemNS(XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.MESSAGE_ID)!=null) {
             return;
         }
@@ -159,8 +160,9 @@ public class NetconfServerSessionListener implements NetconfSessionListener<Netc
 
         throw new NetconfDocumentedException("Missing attribute" + rootNode.getNodeName(),
                 NetconfDocumentedException.ErrorType.protocol, NetconfDocumentedException.ErrorTag.missing_attribute,
-                NetconfDocumentedException.ErrorSeverity.error, ImmutableMap.of(NetconfDocumentedException.ErrorTag.missing_attribute.toString(),
-                XmlNetconfConstants.MESSAGE_ID));
+                NetconfDocumentedException.ErrorSeverity.error,
+                ImmutableMap.of(NetconfDocumentedException.ErrorTag.missing_attribute.toString(),
+                        XmlNetconfConstants.MESSAGE_ID));
     }
 
     private static boolean isCloseSession(final NetconfMessage incomingDocument) {
index 34a8bc4fd57f7a670aad66aa68cc824bf2905572..4be8b91a946ebbc115077d71e5d8751e8757f047 100644 (file)
@@ -8,8 +8,8 @@
 
 package org.opendaylight.controller.netconf.impl;
 
-import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
 import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
 import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouterImpl;
 import org.opendaylight.controller.netconf.impl.osgi.SessionMonitoringService;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
index f7bb281b9122b259e6e4df1a2e15b5bdca63b22f..2f72ad4d4769964021c0cc5f443692c4993ec0aa 100644 (file)
@@ -13,6 +13,10 @@ import io.netty.channel.Channel;
 import io.netty.channel.local.LocalAddress;
 import io.netty.util.Timer;
 import io.netty.util.concurrent.Promise;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.AbstractMap;
+import java.util.Map;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.NetconfServerSessionPreferences;
 import org.opendaylight.controller.netconf.nettyutil.AbstractNetconfSessionNegotiator;
@@ -21,17 +25,12 @@ import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAddi
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.AbstractMap;
-import java.util.Map;
-
 public class NetconfServerSessionNegotiator
         extends
         AbstractNetconfSessionNegotiator<NetconfServerSessionPreferences, NetconfServerSession, NetconfServerSessionListener> {
 
-    static final Logger logger = LoggerFactory
-            .getLogger(NetconfServerSessionNegotiator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionNegotiator.class);
+    
     private static final String UNKNOWN = "unknown";
 
     protected NetconfServerSessionNegotiator(
@@ -71,7 +70,7 @@ public class NetconfServerSessionNegotiator
 
         }
 
-        logger.debug("Additional header from hello parsed as {} from {}",
+        LOG.debug("Additional header from hello parsed as {} from {}",
                 parsedHeader, additionalHeader);
 
         return new NetconfServerSession(sessionListener, channel,
index 487ffd6e5dc2d8f5e7dbd13db1402b3d7e0559a7..34f4f0e653137235a22e917a99224bb96f138ca7 100644 (file)
@@ -12,9 +12,11 @@ import static org.opendaylight.controller.netconf.mapping.api.NetconfOperationPr
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableSet;
-
+import com.google.common.collect.Sets;
+import io.netty.channel.Channel;
+import io.netty.util.Timer;
+import io.netty.util.concurrent.Promise;
 import java.util.Set;
-
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.NetconfServerSessionPreferences;
 import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
@@ -26,13 +28,6 @@ import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessage;
 import org.opendaylight.protocol.framework.SessionListenerFactory;
 import org.opendaylight.protocol.framework.SessionNegotiator;
 import org.opendaylight.protocol.framework.SessionNegotiatorFactory;
-
-import com.google.common.collect.Sets;
-
-import io.netty.channel.Channel;
-import io.netty.util.Timer;
-import io.netty.util.concurrent.Promise;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,7 +46,7 @@ public class NetconfServerSessionNegotiatorFactory implements SessionNegotiatorF
     private final long connectionTimeoutMillis;
     private final DefaultCommitNotificationProducer commitNotificationProducer;
     private final SessionMonitoringService monitoringService;
-    private static final Logger logger = LoggerFactory.getLogger(NetconfServerSessionNegotiatorFactory.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionNegotiatorFactory.class);
     private final Set<String> baseCapabilities;
 
     // TODO too many params, refactor
@@ -112,7 +107,7 @@ public class NetconfServerSessionNegotiatorFactory implements SessionNegotiatorF
             proposal = new NetconfServerSessionPreferences(
                     createHelloMessage(sessionId, capabilityProvider), sessionId);
         } catch (NetconfDocumentedException e) {
-            logger.error("Unable to create hello mesage for session {} with capability provider {}", sessionId,capabilityProvider);
+            LOG.error("Unable to create hello mesage for session {} with capability provider {}", sessionId,capabilityProvider);
             throw new IllegalStateException(e);
         }
 
index 8b2ca86010a06aa5c1b509bd617a08de72fd684a..2e7accad45e8950b7be2c3fb11a0a1ad1e74b93d 100644 (file)
@@ -28,14 +28,14 @@ import org.xml.sax.SAXException;
  * See <a href="http://tools.ietf.org/html/rfc6241#section-6">rfc6241</a> for details.
  */
 public class SubtreeFilter {
-    private static final Logger logger = LoggerFactory.getLogger(SubtreeFilter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SubtreeFilter.class);
 
     static Document applySubtreeFilter(Document requestDocument, Document rpcReply) throws NetconfDocumentedException {
         // FIXME: rpcReply document must be reread otherwise some nodes do not inherit namespaces. (services/service)
         try {
             rpcReply = XmlUtil.readXmlToDocument(XmlUtil.toString(rpcReply, true));
         } catch (SAXException | IOException e) {
-            logger.error("Cannot transform document", e);
+            LOG.error("Cannot transform document", e);
             throw new NetconfDocumentedException("Cannot transform document");
         }
 
@@ -49,8 +49,8 @@ public class SubtreeFilter {
                     XmlNetconfConstants.FILTER, XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0);
             if (maybeFilter.isPresent() && (
                     "subtree".equals(maybeFilter.get().getAttribute("type"))||
-                            "subtree".equals(maybeFilter.get().getAttribute("type", XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0))
-            )) {
+                            "subtree".equals(maybeFilter.get().getAttribute("type", XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0)))
+            ) {
 
 
                 // do
@@ -160,7 +160,7 @@ public class SubtreeFilter {
         if (result == null) {
             result = MatchingResult.NO_MATCH;
         }
-        logger.debug("Matching {} to {} resulted in {}", src, filter, result);
+        LOG.debug("Matching {} to {} resulted in {}", src, filter, result);
         return result;
     }
 
index b7a98bae83acae9a1ee0f525a015db251c998f3e..8c7465b02961197e4a1bc5c836d10d00c0441be5 100644 (file)
@@ -8,6 +8,8 @@
 
 package org.opendaylight.controller.netconf.impl.mapping.operations;
 
+import com.google.common.base.Optional;
+import java.util.Collections;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
 import org.opendaylight.controller.netconf.util.mapping.AbstractSingletonNetconfOperation;
@@ -16,10 +18,6 @@ import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
-import com.google.common.base.Optional;
-
-import java.util.Collections;
-
 public class DefaultCloseSession extends AbstractSingletonNetconfOperation {
     public static final String CLOSE_SESSION = "close-session";
     private final AutoCloseable sessionResources;
@@ -49,7 +47,7 @@ public class DefaultCloseSession extends AbstractSingletonNetconfOperation {
                     + getNetconfSessionIdForReporting(), NetconfDocumentedException.ErrorType.application,
                     NetconfDocumentedException.ErrorTag.operation_failed,
                     NetconfDocumentedException.ErrorSeverity.error, Collections.singletonMap(
-                    NetconfDocumentedException.ErrorSeverity.error.toString(), e.getMessage()));
+                        NetconfDocumentedException.ErrorSeverity.error.toString(), e.getMessage()));
         }
         return XmlUtil.createElement(document, XmlNetconfConstants.OK, Optional.<String>absent());
     }
index 67b782c7c1d7666a32f78fe08f0ff77d5dd9f17e..fbe855f8bebddcb8aeffd728b553ce112fc50f6c 100644 (file)
@@ -9,12 +9,12 @@
 package org.opendaylight.controller.netconf.impl.mapping.operations;
 
 import com.google.common.base.Preconditions;
-
+import java.io.InputStream;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
-import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
 import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
 import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
 import org.opendaylight.controller.netconf.mapping.api.HandlingPriority;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
 import org.opendaylight.controller.netconf.util.mapping.AbstractNetconfOperation;
@@ -25,11 +25,9 @@ import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
-import java.io.InputStream;
-
 public class DefaultCommit extends AbstractNetconfOperation {
 
-    private static final Logger logger = LoggerFactory.getLogger(DefaultCommit.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultCommit.class);
 
     private static final String NOTIFY_ATTR = "notify";
 
@@ -69,12 +67,12 @@ public class DefaultCommit extends AbstractNetconfOperation {
                 "Subsequent netconf operation expected by %s", this);
 
         if (isCommitWithoutNotification(requestMessage)) {
-            logger.debug("Skipping commit notification");
+            LOG.debug("Skipping commit notification");
         } else {
             // Send commit notification if commit was not issued by persister
             removePersisterAttributes(requestMessage);
             Element cfgSnapshot = getConfigSnapshot(operationRouter);
-            logger.debug("Config snapshot retrieved successfully {}", cfgSnapshot);
+            LOG.debug("Config snapshot retrieved successfully {}", cfgSnapshot);
             notificationProducer.sendCommitNotification("ok", cfgSnapshot, cap.getCapabilities());
         }
 
@@ -101,7 +99,7 @@ public class DefaultCommit extends AbstractNetconfOperation {
             xmlElement = XmlElement.fromDomElementWithExpected(message.getDocumentElement(),
                     XmlNetconfConstants.RPC_KEY, XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0);
         } catch (NetconfDocumentedException e) {
-            logger.trace("Commit operation is not valid due to  {}",e);
+            LOG.trace("Commit operation is not valid due to ",e);
             return false;
         }
 
@@ -110,7 +108,7 @@ public class DefaultCommit extends AbstractNetconfOperation {
         if (attr == null || attr.equals("")){
             return false;
         } else if (attr.equals(Boolean.toString(false))) {
-            logger.debug("Commit operation received with notify=false attribute {}", message);
+            LOG.debug("Commit operation received with notify=false attribute {}", message);
             return true;
         } else {
             return false;
index 6db74eaba2ec0aba6efb421b40e12e735709edde..53107048760dbee1ae27f23be7977b15481ba39a 100644 (file)
@@ -10,7 +10,7 @@ package org.opendaylight.controller.netconf.impl.mapping.operations;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.Maps;
-
+import java.util.Map;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
 import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
@@ -23,14 +23,12 @@ import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
-import java.util.Map;
-
 public final class DefaultGetSchema extends AbstractLastNetconfOperation {
     public static final String GET_SCHEMA = "get-schema";
     public static final String IDENTIFIER = "identifier";
     public static final String VERSION = "version";
 
-    private static final Logger logger = LoggerFactory.getLogger(DefaultGetSchema.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultGetSchema.class);
     private final CapabilityProvider cap;
 
     public DefaultGetSchema(CapabilityProvider cap, String netconfSessionIdForReporting) {
@@ -60,7 +58,7 @@ public final class DefaultGetSchema extends AbstractLastNetconfOperation {
         } catch (IllegalStateException e) {
             Map<String, String> errorInfo = Maps.newHashMap();
             errorInfo.put(entry.identifier, e.getMessage());
-            logger.warn("Rpc error: {}", NetconfDocumentedException.ErrorTag.operation_failed, e);
+            LOG.warn("Rpc error: {}", NetconfDocumentedException.ErrorTag.operation_failed, e);
             throw new NetconfDocumentedException(e.getMessage(), NetconfDocumentedException.ErrorType.application,
                     NetconfDocumentedException.ErrorTag.operation_failed,
                     NetconfDocumentedException.ErrorSeverity.error, errorInfo);
@@ -69,7 +67,7 @@ public final class DefaultGetSchema extends AbstractLastNetconfOperation {
         Element getSchemaResult;
         getSchemaResult = XmlUtil.createTextElement(document, XmlNetconfConstants.DATA_KEY, schema,
                 Optional.of(XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_YANG_IETF_NETCONF_MONITORING));
-        logger.trace("{} operation successful", GET_SCHEMA);
+        LOG.trace("{} operation successful", GET_SCHEMA);
 
         return getSchemaResult;
     }
@@ -86,7 +84,7 @@ public final class DefaultGetSchema extends AbstractLastNetconfOperation {
             try {
                 identifierElement = getSchemaElement.getOnlyChildElementWithSameNamespace(IDENTIFIER);
             } catch (MissingNameSpaceException e) {
-                logger.trace("Can't get identifier element as only child element with same namespace due to {}",e);
+                LOG.trace("Can't get identifier element as only child element with same namespace due to ",e);
                 throw NetconfDocumentedException.wrap(e);
             }
             identifier = identifierElement.getTextContent();
index 6de185ac1c1da4948fc343a4be19759362af03f0..3f0ae27dbbcea4e1226694eedd3aaab4a05ce586 100644 (file)
@@ -11,8 +11,8 @@ import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorSeverity;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorTag;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorType;
-import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
 import org.opendaylight.controller.netconf.api.NetconfMessage;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
 import org.opendaylight.controller.netconf.impl.NetconfServerSession;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
 import org.opendaylight.controller.netconf.util.mapping.AbstractSingletonNetconfOperation;
@@ -25,7 +25,7 @@ import org.w3c.dom.Element;
 public class DefaultStartExi extends AbstractSingletonNetconfOperation implements DefaultNetconfOperation {
     public static final String START_EXI = "start-exi";
 
-    private static final Logger logger = LoggerFactory.getLogger(DefaultStartExi.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultStartExi.class);
     private NetconfServerSession netconfSession;
 
     public DefaultStartExi(String netconfSessionIdForReporting) {
@@ -35,7 +35,7 @@ public class DefaultStartExi extends AbstractSingletonNetconfOperation implement
     @Override
     public Document handle(Document message,
                            NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
-        logger.debug("Received start-exi message {} ", XmlUtil.toString(message));
+        LOG.debug("Received start-exi message {} ", XmlUtil.toString(message));
 
         try {
             netconfSession.startExiCommunication(new NetconfMessage(message));
@@ -50,7 +50,7 @@ public class DefaultStartExi extends AbstractSingletonNetconfOperation implement
     @Override
     protected Element handleWithNoSubsequentOperations(Document document, XmlElement operationElement) throws NetconfDocumentedException {
         Element getSchemaResult = document.createElementNS( XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.OK);
-        logger.trace("{} operation successful", START_EXI);
+        LOG.trace("{} operation successful", START_EXI);
         return getSchemaResult;
     }
 
index 233638d5b08d57b608957d1f18085535918f80f6..2b90a6150552e9306f3a137c6fe7056b2e422b3d 100644 (file)
@@ -23,8 +23,8 @@ public class DefaultStopExi extends AbstractSingletonNetconfOperation implements
     public static final String STOP_EXI = "stop-exi";
     private NetconfServerSession netconfSession;
 
-    private static final Logger logger = LoggerFactory
-            .getLogger(DefaultStartExi.class);
+    private static final Logger LOG = LoggerFactory
+            .getLogger(DefaultStopExi.class);
 
     public DefaultStopExi(String netconfSessionIdForReporting) {
         super(netconfSessionIdForReporting);
@@ -32,12 +32,12 @@ public class DefaultStopExi extends AbstractSingletonNetconfOperation implements
 
     @Override
     protected Element handleWithNoSubsequentOperations(Document document, XmlElement operationElement) throws NetconfDocumentedException {
-        logger.debug("Received stop-exi message {} ", XmlUtil.toString(operationElement));
+        LOG.debug("Received stop-exi message {} ", XmlUtil.toString(operationElement));
 
         netconfSession.stopExiCommunication();
 
         Element getSchemaResult = document.createElementNS( XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.OK);
-        logger.trace("{} operation successful", STOP_EXI);
+        LOG.trace("{} operation successful", STOP_EXI);
         return getSchemaResult;
     }
 
index 6ab62ef29a82037cd12e01c5f71d88e34a32a5bd..27423c09b7fb732e0bd6364dc3469efd2c13f884 100644 (file)
@@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory;
 
 public class NetconfImplActivator implements BundleActivator {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfImplActivator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfImplActivator.class);
 
     private NetconfOperationServiceFactoryTracker factoriesTracker;
     private DefaultCommitNotificationProducer commitNot;
@@ -62,7 +62,7 @@ public class NetconfImplActivator implements BundleActivator {
         NetconfServerDispatcher dispatch = new NetconfServerDispatcher(serverChannelInitializer, eventLoopGroup, eventLoopGroup);
 
         LocalAddress address = NetconfConfigUtil.getNetconfLocalAddress();
-        logger.trace("Starting local netconf server at {}", address);
+        LOG.trace("Starting local netconf server at {}", address);
         dispatch.createLocalServer(address);
 
         context.registerService(NetconfOperationProvider.class, factoriesListener, null);
@@ -84,7 +84,7 @@ public class NetconfImplActivator implements BundleActivator {
 
     @Override
     public void stop(final BundleContext context) {
-        logger.info("Shutting down netconf because YangStoreService service was removed");
+        LOG.info("Shutting down netconf because YangStoreService service was removed");
 
         commitNot.close();
         eventLoopGroup.shutdownGracefully(0, 1, TimeUnit.SECONDS);
index 2a45e1757b8655a6b6be8bcf097867b677fd70d1..3f44ff4ff834120d3edb655e55b5419b1569e7e0 100644 (file)
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
 
 public class NetconfMonitoringServiceImpl implements NetconfMonitoringService, SessionMonitoringService {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringServiceImpl.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringServiceImpl.class);
 
     private final Set<NetconfManagementSession> sessions = new ConcurrentSet<>();
     private final NetconfOperationProvider netconfOperationProvider;
@@ -49,14 +49,14 @@ public class NetconfMonitoringServiceImpl implements NetconfMonitoringService, S
 
     @Override
     public void onSessionUp(NetconfManagementSession session) {
-        logger.debug("Session {} up", session);
+        LOG.debug("Session {} up", session);
         Preconditions.checkState(!sessions.contains(session), "Session %s was already added", session);
         sessions.add(session);
     }
 
     @Override
     public void onSessionDown(NetconfManagementSession session) {
-        logger.debug("Session {} down", session);
+        LOG.debug("Session {} down", session);
         Preconditions.checkState(sessions.contains(session), "Session %s not present", session);
         sessions.remove(session);
     }
index aa590604b0087d5f7cc79f135a246b6a929abe07..6915ee4bfe373fa2ac402cedb5edd5f7540f643f 100644 (file)
@@ -10,6 +10,12 @@ package org.opendaylight.controller.netconf.impl.osgi;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.TreeMap;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
 import org.opendaylight.controller.netconf.impl.NetconfServerSession;
@@ -30,16 +36,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Set;
-import java.util.TreeMap;
-
 public class NetconfOperationRouterImpl implements NetconfOperationRouter {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfOperationRouterImpl.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfOperationRouterImpl.class);
 
     private final NetconfOperationServiceSnapshot netconfOperationServiceSnapshot;
     private Set<NetconfOperation> allNetconfOperations;
@@ -104,7 +103,7 @@ public class NetconfOperationRouterImpl implements NetconfOperationRouter {
             messageAsString = XmlUtil.toString(message);
             netconfOperationExecution = getNetconfOperationWithHighestPriority(message, session);
         } catch (IllegalArgumentException | IllegalStateException e) {
-            logger.warn("Unable to handle rpc {} on session {}", messageAsString, session, e);
+            LOG.warn("Unable to handle rpc {} on session {}", messageAsString, session, e);
 
             String errorMessage = String.format("Unable to handle rpc %s on session %s", messageAsString, session);
             Map<String, String> errorInfo = Maps.newHashMap();
@@ -137,7 +136,7 @@ public class NetconfOperationRouterImpl implements NetconfOperationRouter {
     }
 
     private NetconfDocumentedException handleUnexpectedEx(String s, Exception e) throws NetconfDocumentedException {
-        logger.error(s, e);
+        LOG.error(s, e);
 
         Map<String, String> info = Maps.newHashMap();
         info.put(NetconfDocumentedException.ErrorSeverity.error.toString(), e.toString());
@@ -150,7 +149,7 @@ public class NetconfOperationRouterImpl implements NetconfOperationRouter {
     private Document executeOperationWithHighestPriority(Document message,
             NetconfOperationExecution netconfOperationExecution, String messageAsString)
             throws NetconfDocumentedException {
-        logger.debug("Forwarding netconf message {} to {}", messageAsString, netconfOperationExecution.netconfOperation);
+        LOG.debug("Forwarding netconf message {} to {}", messageAsString, netconfOperationExecution.netconfOperation);
         return netconfOperationExecution.execute(message);
     }
 
index 63cd0baf347f8821a2f639cac5fca55cf4eae774..6c55c35e25b44716e1f930007f77d9c90f238e56 100644 (file)
@@ -7,11 +7,10 @@
  */
 package org.opendaylight.controller.netconf.impl.osgi;
 
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
-
 import java.util.HashSet;
 import java.util.Set;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
 
 public class NetconfOperationServiceFactoryListenerImpl implements NetconfOperationServiceFactoryListener,
         NetconfOperationProvider {
index ce312548b2858f7884596ea1f8d2b9f927f966a5..1a35e4d6bace2e313c084cf5ffdf23bbfd3833bb 100644 (file)
@@ -8,15 +8,14 @@
 
 package org.opendaylight.controller.netconf.impl.osgi;
 
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
 import org.opendaylight.controller.netconf.util.CloseableUtil;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
 public class NetconfOperationServiceSnapshotImpl implements NetconfOperationServiceSnapshot {
 
     private final Set<NetconfOperationService> services;
index 31c4d4f57e7b65f6f4b9ae5e28376346e3be3600..e336914981222e5572a77a4f2b09a05fea82dc50 100644 (file)
@@ -11,18 +11,15 @@ package org.opendaylight.controller.netconf.impl.util;
 import com.google.common.collect.Maps;
 import io.netty.channel.ChannelHandler;
 import io.netty.channel.ChannelHandlerContext;
+import java.util.Map;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.util.messages.SendErrorExceptionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Map;
-
-public final class
-        DeserializerExceptionHandler implements ChannelHandler {
-
-    private static final Logger logger = LoggerFactory.getLogger(DeserializerExceptionHandler.class);
+public final class DeserializerExceptionHandler implements ChannelHandler {
 
+    private static final Logger LOG = LoggerFactory.getLogger(DeserializerExceptionHandler.class);
 
     @Override
     public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
@@ -36,7 +33,7 @@ public final class
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
-        logger.warn("An exception occurred during message handling", cause);
+        LOG.warn("An exception occurred during message handling", cause);
         handleDeserializerException(ctx, cause);
     }
 
index 444d4fe4abad44c6168d7ef2befff13f5d1af7fe..355f84ac66a581ed73557885cf0f4d2428bc1c78 100644 (file)
@@ -8,6 +8,7 @@
 package org.opendaylight.controller.netconf.impl;
 
 import static org.junit.Assert.assertEquals;
+
 import org.junit.Test;
 import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
 
index 5f8bc06e1015625cf48b9134058f928c54f337c1..3b83daa693fb926a7e28831fcf64f6761d3cdd7d 100644 (file)
@@ -18,13 +18,11 @@ import static org.mockito.Mockito.mock;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import io.netty.channel.ChannelFuture;
 import io.netty.channel.EventLoopGroup;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.util.HashedWheelTimer;
 import io.netty.util.concurrent.GlobalEventExecutor;
-
 import java.io.DataOutputStream;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -42,7 +40,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.atomic.AtomicLong;
-
 import org.apache.commons.io.IOUtils;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -57,9 +54,9 @@ import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
 import org.opendaylight.controller.netconf.client.NetconfClientDispatcher;
 import org.opendaylight.controller.netconf.client.NetconfClientDispatcherImpl;
 import org.opendaylight.controller.netconf.client.SimpleNetconfClientSessionListener;
+import org.opendaylight.controller.netconf.client.TestingNetconfClient;
 import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration;
 import org.opendaylight.controller.netconf.client.conf.NetconfClientConfigurationBuilder;
-import org.opendaylight.controller.netconf.client.TestingNetconfClient;
 import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationServiceFactoryListenerImpl;
 import org.opendaylight.controller.netconf.impl.osgi.SessionMonitoringService;
 import org.opendaylight.controller.netconf.mapping.api.Capability;
@@ -80,7 +77,7 @@ import org.w3c.dom.Document;
 
 @RunWith(Parameterized.class)
 public class ConcurrentClientsTest {
-    private static final Logger logger = LoggerFactory.getLogger(ConcurrentClientsTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ConcurrentClientsTest.class);
 
     private static ExecutorService clientExecutor;
 
@@ -99,16 +96,14 @@ public class ConcurrentClientsTest {
 
     @Parameterized.Parameters()
     public static Collection<Object[]> data() {
-        return Arrays.asList(new Object[][]{
-                {4, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
-                {1, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
-                // empty set of capabilities = only base 1.0 netconf capability
-                {4, TestingNetconfClientRunnable.class, Collections.emptySet()},
-                {4, TestingNetconfClientRunnable.class, getOnlyExiServerCaps()},
-                {4, TestingNetconfClientRunnable.class, getOnlyChunkServerCaps()},
-
-                {4, BlockingClientRunnable.class, getOnlyExiServerCaps()},
-                {1, BlockingClientRunnable.class, getOnlyExiServerCaps()},
+        return Arrays.asList(new Object[][]{{4, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
+                                            {1, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
+                                            // empty set of capabilities = only base 1.0 netconf capability
+                                            {4, TestingNetconfClientRunnable.class, Collections.emptySet()},
+                                            {4, TestingNetconfClientRunnable.class, getOnlyExiServerCaps()},
+                                            {4, TestingNetconfClientRunnable.class, getOnlyChunkServerCaps()},
+                                            {4, BlockingClientRunnable.class, getOnlyExiServerCaps()},
+                                            {1, BlockingClientRunnable.class, getOnlyExiServerCaps()},
         });
     }
 
@@ -174,7 +169,7 @@ public class ConcurrentClientsTest {
         try {
             nettyGroup.shutdownGracefully().get();
         } catch (InterruptedException | ExecutionException e) {
-            logger.warn("Ignoring exception while cleaning up after test", e);
+            LOG.warn("Ignoring exception while cleaning up after test", e);
         }
     }
 
@@ -198,7 +193,7 @@ public class ConcurrentClientsTest {
             } catch (InterruptedException e) {
                 throw new IllegalStateException(e);
             } catch (ExecutionException e) {
-                logger.error("Thread for testing client failed", e);
+                LOG.error("Thread for testing client failed", e);
                 fail("Client failed: " + e.getMessage());
             }
         }
@@ -241,7 +236,7 @@ public class ConcurrentClientsTest {
         @Override
         public Document handle(Document requestMessage, NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
             try {
-                logger.info("Handling netconf message from test {}", XmlUtil.toString(requestMessage));
+                LOG.info("Handling netconf message from test {}", XmlUtil.toString(requestMessage));
                 counter.getAndIncrement();
                 return XmlUtil.readXmlToDocument("<test/>");
             } catch (Exception e) {
@@ -310,7 +305,7 @@ public class ConcurrentClientsTest {
             while (sb.toString().endsWith("]]>]]>") == false) {
                 sb.append((char) inFromServer.read());
             }
-            logger.info(sb.toString());
+            LOG.info(sb.toString());
 
             outToServer.write(IOUtils.toByteArray(clientHello));
             outToServer.write("]]>]]>".getBytes());
@@ -324,7 +319,7 @@ public class ConcurrentClientsTest {
             while (sb.toString().endsWith("]]>]]>") == false) {
                 sb.append((char) inFromServer.read());
             }
-            logger.info(sb.toString());
+            LOG.info(sb.toString());
             clientSocket.close();
         }
     }
@@ -340,19 +335,19 @@ public class ConcurrentClientsTest {
                 final TestingNetconfClient netconfClient =
                         new TestingNetconfClient(Thread.currentThread().getName(), netconfClientDispatcher, getClientConfig());
                 long sessionId = netconfClient.getSessionId();
-                logger.info("Client with session id {}: hello exchanged", sessionId);
+                LOG.info("Client with session id {}: hello exchanged", sessionId);
 
                 final NetconfMessage getMessage = XmlFileLoader
                         .xmlFileToNetconfMessage("netconfMessages/getConfig.xml");
                 NetconfMessage result = netconfClient.sendRequest(getMessage).get();
-                logger.info("Client with session id {}: got result {}", sessionId, result);
+                LOG.info("Client with session id {}: got result {}", sessionId, result);
 
                 Preconditions.checkState(NetconfMessageUtil.isErrorMessage(result) == false,
                         "Received error response: " + XmlUtil.toString(result.getDocument()) + " to request: "
                                 + XmlUtil.toString(getMessage.getDocument()));
 
                 netconfClient.close();
-                logger.info("Client with session id {}: ended", sessionId);
+                LOG.info("Client with session id {}: ended", sessionId);
             } catch (final Exception e) {
                 throw new IllegalStateException(Thread.currentThread().getName(), e);
             }
index 42bd033c712d22daf600322680739b1f7b9f2b10..ecc33ca90fd05f5392d6173c6d88cd4a7a36128c 100644 (file)
@@ -12,14 +12,13 @@ import io.netty.channel.ChannelFuture;
 import io.netty.channel.EventLoopGroup;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.util.HashedWheelTimer;
+import java.lang.management.ManagementFactory;
+import java.net.InetSocketAddress;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationServiceFactoryListenerImpl;
 
-import java.lang.management.ManagementFactory;
-import java.net.InetSocketAddress;
-
 public class NetconfDispatcherImplTest {
 
     private EventLoopGroup nettyGroup;
index 21250357994644a7bafa0940acc880a802c43a0f..395e5c03384abad0e988789d5bf5d42f8db42d9c 100644 (file)
@@ -8,6 +8,17 @@
 
 package org.opendaylight.controller.netconf.impl;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import com.google.common.base.Optional;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
@@ -18,8 +29,6 @@ import org.junit.Test;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
-import org.opendaylight.controller.netconf.impl.NetconfServerSession;
-import org.opendaylight.controller.netconf.impl.NetconfServerSessionListener;
 import org.opendaylight.controller.netconf.impl.osgi.NetconfMonitoringServiceImpl;
 import org.opendaylight.controller.netconf.mapping.api.Capability;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
@@ -27,11 +36,6 @@ import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
 import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
 import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.*;
-
 public class NetconfMonitoringServiceImplTest {
 
     private NetconfMonitoringServiceImpl service;
index 51dfa4b1a8df3ed9a785c468ec38b8ddfc5136f7..b925295e74a8ec86e8b235a48f691c2222d82aa8 100644 (file)
@@ -29,7 +29,7 @@ import org.xml.sax.SAXException;
 
 @RunWith(value = Parameterized.class)
 public class SubtreeFilterTest {
-    private static final Logger logger = LoggerFactory.getLogger(SubtreeFilterTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SubtreeFilterTest.class);
 
     private final int directoryIndex;
 
@@ -57,7 +57,7 @@ public class SubtreeFilterTest {
         Document preFilterDocument = getDocument("pre-filter.xml");
         Document postFilterDocument = getDocument("post-filter.xml");
         Document actualPostFilterDocument = SubtreeFilter.applySubtreeFilter(requestDocument, preFilterDocument);
-        logger.info("Actual document: {}", XmlUtil.toString(actualPostFilterDocument));
+        LOG.info("Actual document: {}", XmlUtil.toString(actualPostFilterDocument));
         Diff diff = XMLUnit.compareXML(postFilterDocument, actualPostFilterDocument);
         assertTrue(diff.toString(), diff.similar());
 
index d6b0201ab88a0245e666b4bb7fe2e77552a0075b..d82c1484e49e2f4786c5ac3ee7ddca51d6a3b95d 100644 (file)
@@ -8,14 +8,15 @@
 
 package org.opendaylight.controller.netconf.impl.mapping.operations;
 
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
 import org.junit.Test;
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.util.xml.XmlElement;
 import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.w3c.dom.Document;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
 
 public class DefaultCloseSessionTest {
     @Test
index 98050de565c868a8e234da1c20757a52b1e1a81f..15aeb8d27c8badbafa64db0a7614174ce7155ceb 100644 (file)
@@ -8,6 +8,16 @@
 
 package org.opendaylight.controller.netconf.impl.mapping.operations;
 
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anySetOf;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import com.google.common.collect.Sets;
 import org.junit.Before;
 import org.junit.Test;
@@ -22,8 +32,6 @@ import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
-import static org.mockito.Mockito.*;
-
 public class DefaultCommitTest {
 
     private NetconfOperationChainedExecution operation;
index 4218176f1d8ff7297f769c9c0f097287c5ae24a8..4ca50f6a1cb1aaeb92ea65d6a5313270fb95e18a 100644 (file)
@@ -14,6 +14,7 @@ import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
+
 import com.google.common.base.Optional;
 import org.junit.Before;
 import org.junit.Test;
index b335165706e137ae2fd840a90c988c38b88329a7..c06e78aa997cfaafdc5dd2624d784a33bb047364 100644 (file)
@@ -8,6 +8,14 @@
 
 package org.opendaylight.controller.netconf.impl.mapping.operations;
 
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelHandler;
 import io.netty.channel.ChannelPipeline;
@@ -17,9 +25,6 @@ import org.opendaylight.controller.netconf.util.xml.XmlElement;
 import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.w3c.dom.Document;
 
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Mockito.*;
-
 public class DefaultStopExiTest {
     @Test
     public void testHandleWithNoSubsequentOperations() throws Exception {
index fd9295a4b365fe7908b8a330cd205ae5ba2fa62c..413c9cc9452334640e3aab9269def616e99b1ff3 100644 (file)
@@ -8,6 +8,13 @@
 
 package org.opendaylight.controller.netconf.impl.osgi;
 
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import java.util.Arrays;
 import java.util.Dictionary;
 import org.junit.Before;
@@ -20,13 +27,6 @@ import org.osgi.framework.ServiceListener;
 import org.osgi.framework.ServiceReference;
 import org.osgi.framework.ServiceRegistration;
 
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.anyString;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
 public class NetconfImplActivatorTest {
 
     @Mock
index 0d7158aa21de7fc02d69d85697f6031abbba6cfa..d744504bb2aa4160dfb6b57229a29e2623cad864 100644 (file)
@@ -8,6 +8,14 @@
 
 package org.opendaylight.controller.netconf.impl.osgi;
 
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.Mock;
@@ -17,10 +25,6 @@ import org.osgi.framework.BundleContext;
 import org.osgi.framework.Filter;
 import org.osgi.framework.ServiceReference;
 
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
 public class NetconfOperationServiceFactoryTrackerTest {
 
     @Mock
index 6512b4bd334e298785712dafb234993425fc00b8..946654e60e6b009a0fe4b7a639edc31c36363d2f 100644 (file)
@@ -8,6 +8,13 @@
 
 package org.opendaylight.controller.netconf.impl.util;
 
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyObject;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelFuture;
 import io.netty.channel.ChannelHandlerContext;
@@ -15,8 +22,6 @@ import io.netty.util.concurrent.GenericFutureListener;
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.mockito.Mockito.*;
-
 public class DeserializerExceptionHandlerTest {
 
     private DeserializerExceptionHandler handler;
index bd931c33d0212858b615aed61d843e1e6f1744d9..a724d1d9c5e4e8b6cec46cdb94f98a7158c48407 100644 (file)
@@ -78,9 +78,11 @@ public abstract class AbstractNetconfConfigTest extends AbstractConfigTest {
     public static final String LOOPBACK_ADDRESS = "127.0.0.1";
     public static final int SERVER_CONNECTION_TIMEOUT_MILLIS = 5000;
 
-    static ModuleFactory[] FACTORIES = { new TestImplModuleFactory(),
-            new DepTestImplModuleFactory(), new NetconfTestImplModuleFactory(),
-            new IdentityTestModuleFactory(), new MultipleDependenciesModuleFactory() };
+    static ModuleFactory[] FACTORIES = {new TestImplModuleFactory(),
+                                        new DepTestImplModuleFactory(),
+                                        new NetconfTestImplModuleFactory(),
+                                        new IdentityTestModuleFactory(),
+                                        new MultipleDependenciesModuleFactory() };
 
     private EventLoopGroup nettyThreadgroup;
     private HashedWheelTimer hashedWheelTimer;
index a9558c06cdcd34472315c9ac80019e55dbb98b71..463b5f045efa9ea1f34a3694198ecc7e26fdd937 100644 (file)
@@ -72,17 +72,17 @@ public class NetconfITMonitoringTest extends AbstractNetconfConfigTest {
         return TCP_ADDRESS;
     }
 
-    static SessionMonitoringService getNetconfMonitoringListenerService(final Logger logger, final NetconfMonitoringServiceImpl monitor) {
+    static SessionMonitoringService getNetconfMonitoringListenerService(final Logger LOG, final NetconfMonitoringServiceImpl monitor) {
         return new SessionMonitoringService() {
             @Override
             public void onSessionUp(final NetconfManagementSession session) {
-                logger.debug("Management session up {}", session);
+                LOG.debug("Management session up {}", session);
                 monitor.onSessionUp(session);
             }
 
             @Override
             public void onSessionDown(final NetconfManagementSession session) {
-                logger.debug("Management session down {}", session);
+                LOG.debug("Management session down {}", session);
                 monitor.onSessionDown(session);
             }
         };
index 6e265a44a50a55f18a3555c73ddd4afed91f13b5..fdc9a020065be13d5de4268abcfe65c35117a206 100644 (file)
@@ -25,6 +25,7 @@ import io.netty.channel.EventLoopGroup;
 import io.netty.channel.local.LocalAddress;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.util.concurrent.GlobalEventExecutor;
+import java.io.File;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.nio.file.Files;
@@ -77,6 +78,7 @@ public class NetconfITSecureTest extends AbstractNetconfConfigTest {
     public static final String USERNAME = "user";
     public static final String PASSWORD = "pwd";
 
+    private File sshKeyPair;
     private SshProxyServer sshProxyServer;
 
     private ExecutorService nioExec;
@@ -85,6 +87,8 @@ public class NetconfITSecureTest extends AbstractNetconfConfigTest {
 
     @Before
     public void setUp() throws Exception {
+        sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+        sshKeyPair.deleteOnExit();
         nioExec = Executors.newFixedThreadPool(1);
         clientGroup = new NioEventLoopGroup();
         minaTimerEx = Executors.newScheduledThreadPool(1);
@@ -99,7 +103,7 @@ public class NetconfITSecureTest extends AbstractNetconfConfigTest {
                 return true;
             }
         })
-                        .setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString()))
+                        .setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString()))
                         .setIdleTimeout(Integer.MAX_VALUE)
                         .createSshProxyServerConfiguration());
     }
index f76bf4f4c37ebf96c99ba0eeccc90c9280f773da..56015b962519a9fd4945e81a7875f1beb062da0f 100644 (file)
@@ -7,6 +7,7 @@
  */
 package org.opendaylight.controller.netconf.it;
 
+import com.google.common.base.Preconditions;
 import java.io.IOException;
 import java.io.InputStream;
 import java.security.KeyManagementException;
@@ -15,13 +16,10 @@ import java.security.KeyStoreException;
 import java.security.NoSuchAlgorithmException;
 import java.security.UnrecoverableKeyException;
 import java.security.cert.CertificateException;
-
 import javax.net.ssl.KeyManagerFactory;
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManagerFactory;
 
-import com.google.common.base.Preconditions;
-
 public final class SSLUtil {
 
     private SSLUtil() {}
index 3b3f71b0ed0f3aad635d88d6b0bb93a3e4229aa3..6ab3cd4b6e638999ed9fa1787b8b3b8652e09422 100644 (file)
@@ -24,7 +24,7 @@ import org.w3c.dom.Element;
 
 public class Get extends AbstractNetconfOperation {
 
-    private static final Logger logger = LoggerFactory.getLogger(Get.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Get.class);
     private final NetconfMonitoringService netconfMonitor;
 
     public Get(final NetconfMonitoringService netconfMonitor) {
@@ -72,7 +72,7 @@ public class Get extends AbstractNetconfOperation {
             return innerResult;
         } catch (final RuntimeException e) {
             final String errorMessage = "Get operation for netconf-state subtree failed";
-            logger.warn(errorMessage, e);
+            LOG.warn(errorMessage, e);
 
             throw new NetconfDocumentedException(errorMessage, NetconfDocumentedException.ErrorType.application,
                     NetconfDocumentedException.ErrorTag.operation_failed,
index 9d332c644029ff3c26a2725703e4b348ff046cf0..1411350cd3dce0fec1d4854e3eeabe580c4aef4d 100644 (file)
@@ -16,7 +16,7 @@ import org.slf4j.LoggerFactory;
 
 public class NetconfMonitoringActivator implements BundleActivator {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringActivator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringActivator.class);
 
     private NetconfMonitoringServiceTracker monitor;
 
@@ -32,7 +32,7 @@ public class NetconfMonitoringActivator implements BundleActivator {
             try {
                 monitor.close();
             } catch (Exception e) {
-                logger.warn("Ignoring exception while closing {}", monitor, e);
+                LOG.warn("Ignoring exception while closing {}", monitor, e);
             }
         }
     }
index f99ae54e6dafac6e9ea01f36e85ab46261d9ce67..0b4d1c2688d64307b51815b0d3b441f0ff9722ec 100644 (file)
@@ -20,7 +20,7 @@ import org.slf4j.LoggerFactory;
 
 public class NetconfMonitoringServiceTracker extends ServiceTracker<NetconfMonitoringService, NetconfMonitoringService> {
 
-    private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringServiceTracker.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringServiceTracker.class);
 
     private ServiceRegistration<NetconfOperationServiceFactory> reg;
 
@@ -51,7 +51,7 @@ public class NetconfMonitoringServiceTracker extends ServiceTracker<NetconfMonit
             try {
                 reg.unregister();
             } catch (final Exception e) {
-                logger.warn("Ignoring exception while unregistering {}", reg, e);
+                LOG.warn("Ignoring exception while unregistering {}", reg, e);
             }
         }
     }
index 962ad17b66c280c5e7fb68cfc279ff30d79c2285..45f02af78994d1e87b8dc1c86c7b81e7e57d9f1a 100644 (file)
@@ -7,14 +7,13 @@
  */
 package org.opendaylight.controller.netconf.monitoring.xml;
 
-import org.opendaylight.controller.netconf.monitoring.xml.model.NetconfState;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.transform.dom.DOMResult;
+import org.opendaylight.controller.netconf.monitoring.xml.model.NetconfState;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
 
 public class JaxBSerializer {
 
@@ -29,7 +28,7 @@ public class JaxBSerializer {
             res = new DOMResult();
             marshaller.marshal(monitoringModel, res);
         } catch (final JAXBException e) {
-           throw new RuntimeException("Unable to serialize netconf state " + monitoringModel, e);
+            throw new RuntimeException("Unable to serialize netconf state " + monitoringModel, e);
         }
         return ((Document)res.getNode()).getDocumentElement();
     }
index 16b38eca5172a96a6254713dbefb6dbd44324949..259fd73197b44d32b758720f775d7573e36df4a5 100644 (file)
@@ -11,13 +11,12 @@ package org.opendaylight.controller.netconf.monitoring.xml.model;
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Collections2;
+import java.util.Collection;
 import javax.annotation.Nonnull;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.Yang;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
-
 import javax.annotation.Nullable;
 import javax.xml.bind.annotation.XmlElement;
-import java.util.Collection;
+import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.Yang;
+import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
 
 final class MonitoringSchema {
 
index 8685ef72208b15fd77845b7486724846d67780a4..c70db895bf19300be20d30e2d1b1b4ababbdccd0 100644 (file)
@@ -7,10 +7,9 @@
  */
 package org.opendaylight.controller.netconf.monitoring.xml.model;
 
+import com.google.common.base.Joiner;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlTransient;
-
-import com.google.common.base.Joiner;
 import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.extension.rev131210.Session1;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.sessions.Session;
index 8f5a1c029d8611b9f75c7b4570fc63002d9f1e18..58f5b1897b4f0d67d4544f9048b3afb60d05baca 100644 (file)
@@ -9,6 +9,11 @@ package org.opendaylight.controller.netconf.monitoring.xml.model;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Collections2;
+import java.util.Collection;
+import javax.annotation.Nullable;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlRootElement;
 import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
 import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.Schemas;
@@ -16,12 +21,6 @@ import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.mon
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
 import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.sessions.Session;
 
-import javax.annotation.Nullable;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlElementWrapper;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.Collection;
-
 @XmlRootElement(name = MonitoringConstants.NETCONF_MONITORING_XML_ROOT_ELEMENT)
 public final class NetconfState {
 
index a96c3a3836e55309751011af56bcea32f39d069a..1e510d4ed0395117e2be2406ae627fb00a2bb919 100644 (file)
@@ -15,8 +15,7 @@
 )
 package org.opendaylight.controller.netconf.monitoring.xml.model;
 
-import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
-
 import javax.xml.bind.annotation.XmlNs;
 import javax.xml.bind.annotation.XmlNsForm;
-import javax.xml.bind.annotation.XmlSchema;
\ No newline at end of file
+import javax.xml.bind.annotation.XmlSchema;
+import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
\ No newline at end of file
index b5b9f3c060f5bdf6c98abf832c256ab894ad95c2..6664f3e733bc399bd08d710dfbed882a36baa24e 100644 (file)
@@ -14,6 +14,7 @@ import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+
 import java.util.Arrays;
 import org.junit.Before;
 import org.junit.Test;
index b8e35e934b23dc8c064eebe809be12320f488aee..74e6747ab79608b9c13fefd37348d0e3e18555e4 100644 (file)
@@ -8,14 +8,14 @@
 
 package org.opendaylight.controller.netconf.monitoring.osgi;
 
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
 import com.google.common.base.Optional;
 import org.junit.Test;
 import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
 import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
 
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-
 public class NetconfMonitoringOperationServiceTest {
     @Test
     public void testGetters() throws Exception {
index b992b0e44f363deaba01eeca50d9194d40fefc81..de716ccf6635e9a0eebb1c312170cc02658a21cd 100644 (file)
@@ -9,11 +9,12 @@
 package org.opendaylight.controller.netconf.monitoring.osgi;
 
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.anyString;
 import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+
 import java.util.Hashtable;
 import org.junit.Before;
 import org.junit.Test;
index e0799d3a2aff8e006032bcec24c96146e7a35ae7..4eaeee9d78bf09c9dc8ef554de7745bf427d453a 100644 (file)
@@ -26,7 +26,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public abstract class AbstractNetconfSession<S extends NetconfSession, L extends NetconfSessionListener<S>> extends AbstractProtocolSession<NetconfMessage> implements NetconfSession, NetconfExiSession {
-    private static final Logger logger = LoggerFactory.getLogger(AbstractNetconfSession.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractNetconfSession.class);
     private final L sessionListener;
     private final long sessionId;
     private boolean up = false;
@@ -39,7 +39,7 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
         this.sessionListener = sessionListener;
         this.channel = channel;
         this.sessionId = sessionId;
-        logger.debug("Session {} created", sessionId);
+        LOG.debug("Session {} created", sessionId);
     }
 
     protected abstract S thisInstance();
@@ -53,7 +53,7 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
 
     @Override
     protected void handleMessage(final NetconfMessage netconfMessage) {
-        logger.debug("handling incoming message");
+        LOG.debug("handling incoming message");
         sessionListener.onMessage(thisInstance(), netconfMessage);
     }
 
@@ -61,8 +61,8 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
     public ChannelFuture sendMessage(final NetconfMessage netconfMessage) {
         final ChannelFuture future = channel.writeAndFlush(netconfMessage);
         if (delayedEncoder != null) {
-                replaceMessageEncoder(delayedEncoder);
-                delayedEncoder = null;
+            replaceMessageEncoder(delayedEncoder);
+            delayedEncoder = null;
         }
 
         return future;
@@ -70,7 +70,7 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
 
     @Override
     protected void endOfInput() {
-        logger.debug("Session {} end of input detected while session was in state {}", toString(), isUp() ? "up"
+        LOG.debug("Session {} end of input detected while session was in state {}", toString(), isUp() ? "up"
                 : "initialized");
         if (isUp()) {
             this.sessionListener.onSessionDown(thisInstance(), new IOException("End of input detected. Close the session."));
@@ -79,7 +79,7 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
 
     @Override
     protected void sessionUp() {
-        logger.debug("Session {} up", toString());
+        LOG.debug("Session {} up", toString());
         sessionListener.onSessionUp(thisInstance());
         this.up = true;
     }
@@ -114,12 +114,12 @@ public abstract class AbstractNetconfSession<S extends NetconfSession, L extends
         try {
             exiParams = EXIParameters.fromXmlElement(XmlElement.fromDomDocument(startExiMessage.getDocument()));
         } catch (final EXIOptionsException e) {
-            logger.warn("Unable to parse EXI parameters from {} om session {}", XmlUtil.toString(startExiMessage.getDocument()), this, e);
+            LOG.warn("Unable to parse EXI parameters from {} om session {}", XmlUtil.toString(startExiMessage.getDocument()), this, e);
             throw new IllegalArgumentException(e);
         }
         final NetconfEXICodec exiCodec = new NetconfEXICodec(exiParams.getOptions());
         addExiHandlers(exiCodec);
-        logger.debug("Session {} EXI handlers added to pipeline", this);
+        LOG.debug("Session {} EXI handlers added to pipeline", this);
     }
 
     protected abstract void addExiHandlers(NetconfEXICodec exiCodec);
index de3f732b25763fa19d6b481a64bfbcf4d8bcf87c..e5c3c12b992246516165e94f1f0dbb48de3ddefd 100644 (file)
@@ -42,9 +42,9 @@ import org.w3c.dom.Document;
 import org.w3c.dom.NodeList;
 
 public abstract class AbstractNetconfSessionNegotiator<P extends NetconfSessionPreferences, S extends AbstractNetconfSession<S, L>, L extends NetconfSessionListener<S>>
-extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
+    extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
 
-    private static final Logger logger = LoggerFactory.getLogger(AbstractNetconfSessionNegotiator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractNetconfSessionNegotiator.class);
 
     public static final String NAME_OF_EXCEPTION_HANDLER = "lastExceptionHandler";
 
@@ -85,7 +85,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
                 @Override
                 public void operationComplete(Future<? super Channel> future) {
                     Preconditions.checkState(future.isSuccess(), "Ssl handshake was not successful");
-                    logger.debug("Ssl handshake complete");
+                    LOG.debug("Ssl handshake complete");
                     start();
                 }
             });
@@ -105,7 +105,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
 
     private void start() {
         final NetconfMessage helloMessage = this.sessionPreferences.getHelloMessage();
-        logger.debug("Session negotiation started with hello message {} on channel {}", XmlUtil.toString(helloMessage.getDocument()), channel);
+        LOG.debug("Session negotiation started with hello message {} on channel {}", XmlUtil.toString(helloMessage.getDocument()), channel);
 
         channel.pipeline().addLast(NAME_OF_EXCEPTION_HANDLER, new ExceptionHandlingInboundChannelHandler());
 
@@ -121,7 +121,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
                 synchronized (this) {
                     if (state != State.ESTABLISHED) {
 
-                        logger.debug("Connection timeout after {}, session is in state {}", timeout, state);
+                        LOG.debug("Connection timeout after {}, session is in state {}", timeout, state);
 
                         // Do not fail negotiation if promise is done or canceled
                         // It would result in setting result of the promise second time and that throws exception
@@ -133,9 +133,9 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
                                 @Override
                                 public void operationComplete(ChannelFuture future) throws Exception {
                                     if(future.isSuccess()) {
-                                        logger.debug("Channel {} closed: success", future.channel());
+                                        LOG.debug("Channel {} closed: success", future.channel());
                                     } else {
-                                        logger.warn("Channel {} closed: fail", future.channel());
+                                        LOG.warn("Channel {} closed: fail", future.channel());
                                     }
                                 }
                             });
@@ -223,7 +223,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
     protected abstract S getSession(L sessionListener, Channel channel, NetconfHelloMessage message) throws NetconfDocumentedException;
 
     private synchronized void changeState(final State newState) {
-        logger.debug("Changing state from : {} to : {} for channel: {}", state, newState, channel);
+        LOG.debug("Changing state from : {} to : {} for channel: {}", state, newState, channel);
         Preconditions.checkState(isStateChangePermitted(state, newState), "Cannot change state from %s to %s for chanel %s", state,
                 newState, channel);
         this.state = newState;
@@ -249,7 +249,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
         if (state == State.OPEN_WAIT && newState == State.FAILED) {
             return true;
         }
-        logger.debug("Transition from {} to {} is not allowed", state, newState);
+        LOG.debug("Transition from {} to {} is not allowed", state, newState);
         return false;
     }
 
@@ -259,7 +259,7 @@ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
     private final class ExceptionHandlingInboundChannelHandler extends ChannelInboundHandlerAdapter {
         @Override
         public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
-            logger.warn("An exception occurred during negotiation with {}", channel.remoteAddress(), cause);
+            LOG.warn("An exception occurred during negotiation with {}", channel.remoteAddress(), cause);
             cancelTimeout();
             negotiationFailed(cause);
             changeState(State.FAILED);
index a66e45882fda1f8397134a68060769c32cd4bc24..c4cddb802ea35dd51b17d49d832114c404ae5085 100644 (file)
@@ -8,16 +8,14 @@
 
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import com.google.common.base.Preconditions;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToByteEncoder;
-
 import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
 import org.opendaylight.controller.netconf.util.messages.NetconfMessageHeader;
 
-import com.google.common.base.Preconditions;
-
 public class ChunkedFramingMechanismEncoder extends MessageToByteEncoder<ByteBuf> {
     public static final int DEFAULT_CHUNK_SIZE = 8192;
     public static final int MIN_CHUNK_SIZE = 128;
index 92d8f12c1adf08576d696a1d766955865a7e1a43..514b2e129eb7f8eb6df5a704a6b4ce30bf39e8b0 100644 (file)
@@ -11,7 +11,6 @@ package org.opendaylight.controller.netconf.nettyutil.handler;
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToByteEncoder;
-
 import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
 
 public class EOMFramingMechanismEncoder extends MessageToByteEncoder<ByteBuf> {
index 35f7679496259f0b3b46267ad5b2ded0a69511ff..30589daa22ac65ab9724c3a842f815613f7c75d5 100644 (file)
@@ -8,23 +8,22 @@
 
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import io.netty.buffer.ByteBuf;
+import io.netty.handler.codec.MessageToByteEncoder;
 import org.opendaylight.controller.netconf.util.messages.FramingMechanism;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import io.netty.buffer.ByteBuf;
-import io.netty.handler.codec.MessageToByteEncoder;
-
 public final class FramingMechanismHandlerFactory {
 
-    private static final Logger logger = LoggerFactory.getLogger(FramingMechanismHandlerFactory.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FramingMechanismHandlerFactory.class);
 
     private FramingMechanismHandlerFactory() {
         // not called - private constructor for utility class
     }
 
     public static MessageToByteEncoder<ByteBuf> createHandler(FramingMechanism framingMechanism) {
-        logger.debug("{} framing mechanism was selected.", framingMechanism);
+        LOG.debug("{} framing mechanism was selected.", framingMechanism);
         if (framingMechanism == FramingMechanism.EOM) {
             return new EOMFramingMechanismEncoder();
         } else {
index e2a745f3fb876ba296b835210ec10486d5c5d86d..6a7b752023fef30717e1067d57d029ea7c6eee2f 100644 (file)
@@ -8,19 +8,17 @@
 
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
-import java.util.List;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.CompositeByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.ByteToMessageDecoder;
+import java.util.List;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class NetconfChunkAggregator extends ByteToMessageDecoder {
-    private final static Logger logger = LoggerFactory.getLogger(NetconfChunkAggregator.class);
+    private final static Logger LOG = LoggerFactory.getLogger(NetconfChunkAggregator.class);
     private static final String GOT_PARAM_WHILE_WAITING_FOR_PARAM = "Got byte {} while waiting for {}";
     private static final String GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM = "Got byte {} while waiting for {}-{}";
     public static final int DEFAULT_MAXIMUM_CHUNK_SIZE = 16 * 1024 * 1024;
@@ -44,21 +42,21 @@ public class NetconfChunkAggregator extends ByteToMessageDecoder {
 
     private void checkNewLine(byte b,String errorMessage){
         if (b != '\n') {
-            logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'\n');
+            LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'\n');
             throw new IllegalStateException(errorMessage);
         }
     }
 
     private void checkHash(byte b,String errorMessage){
         if (b != '#') {
-            logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'#');
+            LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'#');
             throw new IllegalStateException(errorMessage);
         }
     }
 
     private void checkChunkSize(){
         if (chunkSize > maxChunkSize) {
-            logger.debug("Parsed chunk size {}, maximum allowed is {}", chunkSize, maxChunkSize);
+            LOG.debug("Parsed chunk size {}, maximum allowed is {}", chunkSize, maxChunkSize);
             throw new IllegalStateException("Maximum chunk size exceeded");
         }
 
@@ -101,7 +99,7 @@ public class NetconfChunkAggregator extends ByteToMessageDecoder {
                 }
 
                 if (b < '0' || b > '9') {
-                    logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'0', (byte)'9');
+                    LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'0', (byte)'9');
                     throw new IllegalStateException("Invalid chunk size encountered");
                 }
 
@@ -118,7 +116,7 @@ public class NetconfChunkAggregator extends ByteToMessageDecoder {
                  *        comes through.
                  */
                 if (in.readableBytes() < chunkSize) {
-                    logger.debug("Buffer has {} bytes, need {} to complete chunk", in.readableBytes(), chunkSize);
+                    LOG.debug("Buffer has {} bytes, need {} to complete chunk", in.readableBytes(), chunkSize);
                     in.discardReadBytes();
                     return;
                 }
@@ -175,7 +173,7 @@ public class NetconfChunkAggregator extends ByteToMessageDecoder {
         } else if (b == '#') {
             state = State.FOOTER_FOUR;
         } else {
-            logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte) '#', (byte) '1', (byte) '9');
+            LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte) '#', (byte) '1', (byte) '9');
             throw new IllegalStateException("Malformed chunk footer encountered (byte 2)");
         }
     }
@@ -193,7 +191,7 @@ public class NetconfChunkAggregator extends ByteToMessageDecoder {
 
     private static int processHeaderLengthFirst(byte b) {
         if (!isHeaderLengthFirst(b)) {
-            logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'1', (byte)'9');
+            LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'1', (byte)'9');
             throw new IllegalStateException("Invalid chunk size encountered (byte 0)");
         }
 
index 57134af3c74f213884ca701109b8454d0d1b32df..db3dcafbdef93eb4a007fb4e1189daa1ad9e3957 100644 (file)
@@ -7,14 +7,18 @@
  */
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import com.google.common.base.Preconditions;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+import io.netty.buffer.ByteBufUtil;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.ByteToMessageDecoder;
 import java.io.InputStream;
 import java.util.List;
-
 import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMResult;
 import javax.xml.transform.sax.SAXTransformerFactory;
 import javax.xml.transform.sax.TransformerHandler;
-
 import org.opendaylight.controller.netconf.api.NetconfMessage;
 import org.openexi.sax.EXIReader;
 import org.slf4j.Logger;
@@ -22,14 +26,6 @@ import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
-import com.google.common.base.Preconditions;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.ByteBufInputStream;
-import io.netty.buffer.ByteBufUtil;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.handler.codec.ByteToMessageDecoder;
-
 public final class NetconfEXIToMessageDecoder extends ByteToMessageDecoder {
 
     private static final Logger LOG = LoggerFactory.getLogger(NetconfEXIToMessageDecoder.class);
@@ -60,7 +56,7 @@ public final class NetconfEXIToMessageDecoder extends ByteToMessageDecoder {
         final EXIReader r = codec.getReader();
 
         final SAXTransformerFactory transformerFactory
-                = (SAXTransformerFactory) TransformerFactory.newInstance();
+            = (SAXTransformerFactory) TransformerFactory.newInstance();
         final TransformerHandler handler = transformerFactory.newTransformerHandler();
         r.setContentHandler(handler);
 
index f39e2c425d039cca549fd574607c52af02eced9e..8af5cf3ff4fd7d7b2d1eb45cb1da4f4d7e220028 100644 (file)
@@ -7,22 +7,18 @@
  */
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Charsets;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
-
 import java.io.IOException;
-
 import javax.xml.transform.TransformerException;
-
 import org.opendaylight.controller.netconf.api.NetconfMessage;
 import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessage;
 import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Charsets;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-
 /**
  * Customized NetconfMessageToXMLEncoder that serializes additional header with
  * session metadata along with
index d810a870ff29f68b59b19788bf3064e0218cb148..8ce9411cbd9c00ecf31639301454ac1f871b3179 100644 (file)
@@ -7,32 +7,28 @@
  */
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Optional;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufOutputStream;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToByteEncoder;
-
 import java.io.BufferedWriter;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
-
 import javax.xml.transform.OutputKeys;
 import javax.xml.transform.Transformer;
 import javax.xml.transform.TransformerException;
 import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamResult;
-
 import org.opendaylight.controller.netconf.api.NetconfMessage;
 import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Comment;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Optional;
-
 public class NetconfMessageToXMLEncoder extends MessageToByteEncoder<NetconfMessage> {
     private static final Logger LOG = LoggerFactory.getLogger(NetconfMessageToXMLEncoder.class);
     private static final TransformerFactory FACTORY = TransformerFactory.newInstance();
index efe4861577b0ce3173a182d8005f37e186242ae8..197ae5f424469b453c9fe5c789181a55fd776628 100644 (file)
@@ -7,19 +7,20 @@
  */
 package org.opendaylight.controller.netconf.nettyutil.handler;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Charsets;
 import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufUtil;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.ByteToMessageDecoder;
-
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
-
 import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
 import org.opendaylight.controller.netconf.api.NetconfMessage;
 import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessage;
@@ -28,10 +29,6 @@ import org.opendaylight.controller.netconf.util.xml.XmlUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Charsets;
-import com.google.common.collect.ImmutableList;
 import org.xml.sax.SAXException;
 
 /**
index ccd7dead9f1a9a782743d423650fd5153005e1eb..859006877486292258f77409dbbdb3e48d826903 100644 (file)
@@ -7,6 +7,7 @@
  */
 package org.opendaylight.controller.netconf.nettyutil.handler.exi;
 
+import com.google.common.base.Preconditions;
 import org.opendaylight.controller.netconf.util.xml.XmlElement;
 import org.openexi.proc.common.AlignmentType;
 import org.openexi.proc.common.EXIOptions;
@@ -14,8 +15,6 @@ import org.openexi.proc.common.EXIOptionsException;
 import org.w3c.dom.Element;
 import org.w3c.dom.NodeList;
 
-import com.google.common.base.Preconditions;
-
 public final class EXIParameters {
     private static final String EXI_PARAMETER_ALIGNMENT = "alignment";
     static final String EXI_PARAMETER_BYTE_ALIGNED = "byte-aligned";
index fa7d0900edc805ebd45f3b2edce8a21d46cfb031..14d753f1f8b59c25a7ea871e3bfe5e430c1d9970 100644 (file)
@@ -8,9 +8,13 @@
 
 package org.opendaylight.controller.netconf.nettyutil.handler.ssh.client;
 
+import com.google.common.base.Preconditions;
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelOutboundHandlerAdapter;
+import io.netty.channel.ChannelPromise;
 import java.io.IOException;
 import java.net.SocketAddress;
-
 import org.apache.sshd.ClientChannel;
 import org.apache.sshd.ClientSession;
 import org.apache.sshd.SshClient;
@@ -23,19 +27,12 @@ import org.opendaylight.controller.netconf.nettyutil.handler.ssh.authentication.
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelOutboundHandlerAdapter;
-import io.netty.channel.ChannelPromise;
-
 /**
  * Netty SSH handler class. Acts as interface between Netty and SSH library.
  */
 public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
 
-    private static final Logger logger = LoggerFactory.getLogger(AsyncSshHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AsyncSshHandler.class);
     public static final String SUBSYSTEM = "netconf";
 
     public static final SshClient DEFAULT_CLIENT = SshClient.setUpDefaultClient();
@@ -77,7 +74,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
     }
 
     private void startSsh(final ChannelHandlerContext ctx, final SocketAddress address) {
-        logger.debug("Starting SSH to {} on channel: {}", address, ctx.channel());
+        LOG.debug("Starting SSH to {} on channel: {}", address, ctx.channel());
 
         final ConnectFuture sshConnectionFuture = sshClient.connect(authenticationHandler.getUsername(), address);
         sshConnectionFuture.addListener(new SshFutureListener<ConnectFuture>() {
@@ -94,7 +91,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
 
     private synchronized void handleSshSessionCreated(final ConnectFuture future, final ChannelHandlerContext ctx) {
         try {
-            logger.trace("SSH session created on channel: {}", ctx.channel());
+            LOG.trace("SSH session created on channel: {}", ctx.channel());
 
             session = future.getSession();
             final AuthFuture authenticateFuture = authenticationHandler.authenticate(session);
@@ -115,7 +112,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
 
     private synchronized void handleSshAuthenticated(final ClientSession session, final ChannelHandlerContext ctx) {
         try {
-            logger.debug("SSH session authenticated on channel: {}, server version: {}", ctx.channel(), session.getServerVersion());
+            LOG.debug("SSH session authenticated on channel: {}, server version: {}", ctx.channel(), session.getServerVersion());
 
             channel = session.createSubsystemChannel(SUBSYSTEM);
             channel.setStreaming(ClientChannel.Streaming.Async);
@@ -137,7 +134,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
     }
 
     private synchronized void handleSshChanelOpened(final ChannelHandlerContext ctx) {
-        logger.trace("SSH subsystem channel opened successfully on channel: {}", ctx.channel());
+        LOG.trace("SSH subsystem channel opened successfully on channel: {}", ctx.channel());
 
         connectPromise.setSuccess();
         connectPromise = null;
@@ -164,7 +161,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
     }
 
     private synchronized void handleSshSetupFailure(final ChannelHandlerContext ctx, final Throwable e) {
-        logger.warn("Unable to setup SSH connection on channel: {}", ctx.channel(), e);
+        LOG.warn("Unable to setup SSH connection on channel: {}", ctx.channel(), e);
         connectPromise.setFailure(e);
         connectPromise = null;
         throw new IllegalStateException("Unable to setup SSH connection on channel: " + ctx.channel(), e);
@@ -211,7 +208,7 @@ public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
         channel = null;
         promise.setSuccess();
 
-        logger.debug("SSH session closed on channel: {}", ctx.channel());
+        LOG.debug("SSH session closed on channel: {}", ctx.channel());
         ctx.fireChannelInactive();
     }
 
index ada15583cdef9cc86eb6a6d3b45bcf8f9be8defc..ca212e7c4739b1ce66ebc122c07d955c6e82d730 100644 (file)
@@ -23,7 +23,7 @@ import org.slf4j.LoggerFactory;
  */
 public final class AsyncSshHandlerReader implements SshFutureListener<IoReadFuture>, AutoCloseable {
 
-    private static final Logger logger = LoggerFactory.getLogger(AsyncSshHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AsyncSshHandlerReader.class);
 
     private static final int BUFFER_SIZE = 8192;
 
@@ -49,9 +49,9 @@ public final class AsyncSshHandlerReader implements SshFutureListener<IoReadFutu
         if(future.getException() != null) {
             if(asyncOut.isClosed() || asyncOut.isClosing()) {
                 // Ssh dropped
-                logger.debug("Ssh session dropped on channel: {}", channelId, future.getException());
+                LOG.debug("Ssh session dropped on channel: {}", channelId, future.getException());
             } else {
-                logger.warn("Exception while reading from SSH remote on channel {}", channelId, future.getException());
+                LOG.warn("Exception while reading from SSH remote on channel {}", channelId, future.getException());
             }
             invokeDisconnect();
             return;
@@ -59,8 +59,8 @@ public final class AsyncSshHandlerReader implements SshFutureListener<IoReadFutu
 
         if (future.getRead() > 0) {
             final ByteBuf msg = Unpooled.wrappedBuffer(buf.array(), 0, future.getRead());
-            if(logger.isTraceEnabled()) {
-                logger.trace("Reading message on channel: {}, message: {}", channelId, AsyncSshHandlerWriter.byteBufToString(msg));
+            if(LOG.isTraceEnabled()) {
+                LOG.trace("Reading message on channel: {}, message: {}", channelId, AsyncSshHandlerWriter.byteBufToString(msg));
             }
             readHandler.onMessageRead(msg);
 
index 8e639bd47cc1b24e18176e365d50df2d3185ae5c..1e976ce6a1394c31e0bbd8dd8746463193a3ac5c 100644 (file)
@@ -30,7 +30,7 @@ import org.slf4j.LoggerFactory;
  */
 public final class AsyncSshHandlerWriter implements AutoCloseable {
 
-    private static final Logger logger = LoggerFactory
+    private static final Logger LOG = LoggerFactory
             .getLogger(AsyncSshHandlerWriter.class);
 
     // public static final int MAX_PENDING_WRITES = 1000;
@@ -68,34 +68,34 @@ public final class AsyncSshHandlerWriter implements AutoCloseable {
 
     private void writeWithPendingDetection(final ChannelHandlerContext ctx, final ChannelPromise promise, final ByteBuf byteBufMsg) {
         try {
-            if (logger.isTraceEnabled()) {
-                logger.trace("Writing request on channel: {}, message: {}", ctx.channel(), byteBufToString(byteBufMsg));
+            if (LOG.isTraceEnabled()) {
+                LOG.trace("Writing request on channel: {}, message: {}", ctx.channel(), byteBufToString(byteBufMsg));
             }
             asyncIn.write(toBuffer(byteBufMsg)).addListener(new SshFutureListener<IoWriteFuture>() {
 
-                        @Override
-                        public void operationComplete(final IoWriteFuture future) {
-                            if (logger.isTraceEnabled()) {
-                                logger.trace("Ssh write request finished on channel: {} with result: {}: and ex:{}, message: {}",
+                @Override
+                public void operationComplete(final IoWriteFuture future) {
+                    if (LOG.isTraceEnabled()) {
+                        LOG.trace("Ssh write request finished on channel: {} with result: {}: and ex:{}, message: {}",
                                         ctx.channel(), future.isWritten(), future.getException(), byteBufToString(byteBufMsg));
-                            }
-
-                            // Notify success or failure
-                            if (future.isWritten()) {
-                                promise.setSuccess();
-                            } else {
-                                logger.warn("Ssh write request failed on channel: {} for message: {}", ctx.channel(), byteBufToString(byteBufMsg), future.getException());
-                                promise.setFailure(future.getException());
-                            }
-
-                            // Not needed anymore, release
-                            byteBufMsg.release();
-
-                            // Check pending queue and schedule next
-                            // At this time we are guaranteed that we are not in pending state anymore so the next request should succeed
-                            writePendingIfAny();
-                        }
-                    });
+                    }
+
+                    // Notify success or failure
+                    if (future.isWritten()) {
+                        promise.setSuccess();
+                    } else {
+                        LOG.warn("Ssh write request failed on channel: {} for message: {}", ctx.channel(), byteBufToString(byteBufMsg), future.getException());
+                        promise.setFailure(future.getException());
+                    }
+
+                    // Not needed anymore, release
+                    byteBufMsg.release();
+
+                    // Check pending queue and schedule next
+                    // At this time we are guaranteed that we are not in pending state anymore so the next request should succeed
+                    writePendingIfAny();
+                }
+            });
         } catch (final WritePendingException e) {
             queueRequest(ctx, byteBufMsg, promise);
         }
@@ -109,8 +109,8 @@ public final class AsyncSshHandlerWriter implements AutoCloseable {
         // In case of pending, reschedule next message from queue
         final PendingWriteRequest pendingWrite = pending.poll();
         final ByteBuf msg = pendingWrite.msg;
-        if (logger.isTraceEnabled()) {
-            logger.trace("Writing pending request on channel: {}, message: {}", pendingWrite.ctx.channel(), byteBufToString(msg));
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Writing pending request on channel: {}, message: {}", pendingWrite.ctx.channel(), byteBufToString(msg));
         }
 
         writeWithPendingDetection(pendingWrite.ctx, pendingWrite.promise, msg);
@@ -125,13 +125,13 @@ public final class AsyncSshHandlerWriter implements AutoCloseable {
 
     private void queueRequest(final ChannelHandlerContext ctx, final ByteBuf msg, final ChannelPromise promise) {
 //        try {
-        logger.debug("Write pending on channel: {}, queueing, current queue size: {}", ctx.channel(), pending.size());
-        if (logger.isTraceEnabled()) {
-            logger.trace("Queueing request due to pending: {}", byteBufToString(msg));
+        LOG.debug("Write pending on channel: {}, queueing, current queue size: {}", ctx.channel(), pending.size());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Queueing request due to pending: {}", byteBufToString(msg));
         }
         new PendingWriteRequest(ctx, msg, promise).pend(pending);
 //        } catch (final Exception ex) {
-//            logger.warn("Unable to queue write request on channel: {}. Setting fail for the request: {}", ctx.channel(), ex, byteBufToString(msg));
+//            LOG.warn("Unable to queue write request on channel: {}. Setting fail for the request: {}", ctx.channel(), ex, byteBufToString(msg));
 //            msg.release();
 //            promise.setFailure(ex);
 //        }
index 8bc0fb8e8242efcc640ea4a1b1ffc4a9f88818f6..4f804abfe8e4478fffb89b69e817afc1157665d0 100644 (file)
@@ -10,6 +10,7 @@ package org.opendaylight.controller.netconf.nettyutil.handler;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+
 import com.google.common.collect.Lists;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
index 15ba3b41f8443aefa99a40fbca7de84267777626..70186f39a45b71fcb0c2e472c71862c97ce174c8 100644 (file)
@@ -51,8 +51,8 @@ public class EXIParametersTest {
         fullOptions.setPreservePIs(true);
 
         return Arrays.asList(new Object[][]{
-                {noChangeXml, new EXIOptions()},
-                {fullOptionsXml, fullOptions},
+            {noChangeXml, new EXIOptions()},
+            {fullOptionsXml, fullOptions},
         });
     }
 
index 47abe96687540969174d50d7ced6affc4c3b3f0e..9d831729fced6a5341fb3f5d1793624bf73ce1a7 100644 (file)
@@ -53,8 +53,8 @@ public class NetconfStartExiMessageTest {
         fullOptions.setPreservePIs(true);
 
         return Arrays.asList(new Object[][]{
-                {noChangeXml, new EXIOptions()},
-                {fullOptionsXml, fullOptions},
+            {noChangeXml, new EXIOptions()},
+            {fullOptionsXml, fullOptions},
         });
     }
 
index b4c9e1e95083dd94b21d57318ef989dca355bdd2..73f2287c8bce6151b9dd33f8fa0bc7ed8d39ac40 100644 (file)
@@ -22,6 +22,7 @@ import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
 import static org.mockito.Mockito.verifyZeroInteractions;
+
 import com.google.common.util.concurrent.FutureCallback;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
index e642e073a31bb3fb001d4c446dc66dd87d413087..00a5b4c7c469e3492326fbddf2cbce01c459c243 100644 (file)
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
  */
 public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
 
-    private static final Logger logger = LoggerFactory.getLogger(RemoteNetconfCommand.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RemoteNetconfCommand.class);
 
     private final EventLoopGroup clientEventGroup;
     private final LocalAddress localAddress;
@@ -100,7 +100,7 @@ public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
 
     @Override
     public void start(final Environment env) throws IOException {
-        logger.trace("Establishing internal connection to netconf server for client: {}", getClientAddress());
+        LOG.trace("Establishing internal connection to netconf server for client: {}", getClientAddress());
 
         final Bootstrap clientBootstrap = new Bootstrap();
         clientBootstrap.group(clientEventGroup).channel(LocalChannel.class);
@@ -120,7 +120,7 @@ public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
                 if(future.isSuccess()) {
                     clientChannel = clientChannelFuture.channel();
                 } else {
-                    logger.warn("Unable to establish internal connection to netconf server for client: {}", getClientAddress());
+                    LOG.warn("Unable to establish internal connection to netconf server for client: {}", getClientAddress());
                     Preconditions.checkNotNull(callback, "Exit callback must be set");
                     callback.onExit(1, "Unable to establish internal connection to netconf server for client: "+ getClientAddress());
                 }
@@ -130,7 +130,7 @@ public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
 
     @Override
     public void destroy() {
-        logger.trace("Releasing internal connection to netconf server for client: {} on channel: {}",
+        LOG.trace("Releasing internal connection to netconf server for client: {} on channel: {}",
                 getClientAddress(), clientChannel);
 
         clientChannelFuture.cancel(true);
@@ -140,7 +140,7 @@ public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
                 @Override
                 public void operationComplete(final ChannelFuture future) throws Exception {
                     if (future.isSuccess() == false) {
-                        logger.warn("Unable to release internal connection to netconf server on channel: {}", clientChannel);
+                        LOG.warn("Unable to release internal connection to netconf server on channel: {}", clientChannel);
                     }
                 }
             });
index 2b2b3b3e8187009b6976306293c3c16060a26242..5fa39749110c9fb13eaa8cac139b03f15e970dc8 100644 (file)
@@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory;
  */
 final class SshProxyClientHandler extends ChannelInboundHandlerAdapter {
 
-    private static final Logger logger = LoggerFactory.getLogger(SshProxyClientHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SshProxyClientHandler.class);
 
     private final IoInputStream in;
     private final IoOutputStream out;
@@ -64,8 +64,8 @@ final class SshProxyClientHandler extends ChannelInboundHandlerAdapter {
         }, new AsyncSshHandlerReader.ReadMsgHandler() {
             @Override
             public void onMessageRead(final ByteBuf msg) {
-                if(logger.isTraceEnabled()) {
-                    logger.trace("Forwarding message for client: {} on channel: {}, message: {}",
+                if(LOG.isTraceEnabled()) {
+                    LOG.trace("Forwarding message for client: {} on channel: {}, message: {}",
                             netconfHelloMessageAdditionalHeader.getAddress(), ctx.channel(), AsyncSshHandlerWriter.byteBufToString(msg));
                 }
                 // Just forward to delegate
@@ -83,12 +83,12 @@ final class SshProxyClientHandler extends ChannelInboundHandlerAdapter {
 
     @Override
     public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {
-       asyncSshHandlerWriter.write(ctx, msg, ctx.newPromise());
+        asyncSshHandlerWriter.write(ctx, msg, ctx.newPromise());
     }
 
     @Override
     public void channelInactive(final ChannelHandlerContext ctx) throws Exception {
-        logger.debug("Internal connection to netconf server was dropped for client: {} on channel: ",
+        LOG.debug("Internal connection to netconf server was dropped for client: {} on channel: ",
                 netconfHelloMessageAdditionalHeader.getAddress(), ctx.channel());
         callback.onExit(1, "Internal connection to netconf server was dropped for client: " +
                 netconfHelloMessageAdditionalHeader.getAddress() + " on channel: " + ctx.channel());
index 20088fe87603f353fcd560f81e1e1b55e0d2855b..e41a3eaa2cc9f5ea9555bd6d71b9103534bdca5a 100644 (file)
@@ -70,11 +70,13 @@ public class SshProxyServer implements AutoCloseable {
 
     private static Map<String, String> getProperties(final SshProxyServerConfiguration sshProxyServerConfiguration) {
         return new HashMap<String, String>()
-        {{
-            put(ServerFactoryManager.IDLE_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
-            // TODO make auth timeout configurable on its own
-            put(ServerFactoryManager.AUTH_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
-        }};
+        {
+            {
+                put(ServerFactoryManager.IDLE_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
+                // TODO make auth timeout configurable on its own
+                put(ServerFactoryManager.AUTH_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
+            }
+        };
     }
 
     @Override
index 97e611c0d23a7eea5ecd5cdceb2a54bea54e15cf..c473b70eee7a981dadd71f671ec462abcab215cd 100644 (file)
@@ -21,7 +21,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 final class AuthProviderTracker implements ServiceTrackerCustomizer<AuthProvider, AuthProvider>, PasswordAuthenticator {
-    private static final Logger logger = LoggerFactory.getLogger(AuthProviderTracker.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AuthProviderTracker.class);
 
     private final BundleContext bundleContext;
 
@@ -37,7 +37,7 @@ final class AuthProviderTracker implements ServiceTrackerCustomizer<AuthProvider
 
     @Override
     public AuthProvider addingService(final ServiceReference<AuthProvider> reference) {
-        logger.trace("Service {} added", reference);
+        LOG.trace("Service {} added", reference);
         final AuthProvider authService = bundleContext.getService(reference);
         final Integer newServicePreference = getPreference(reference);
         if(isBetter(newServicePreference)) {
@@ -66,15 +66,15 @@ final class AuthProviderTracker implements ServiceTrackerCustomizer<AuthProvider
         final AuthProvider authService = bundleContext.getService(reference);
         final Integer newServicePreference = getPreference(reference);
         if(isBetter(newServicePreference)) {
-            logger.trace("Replacing modified service {} in netconf SSH.", reference);
+            LOG.trace("Replacing modified service {} in netconf SSH.", reference);
             this.authProvider = authService;
         }
     }
 
     @Override
     public void removedService(final ServiceReference<AuthProvider> reference, final AuthProvider service) {
-        logger.trace("Removing service {} from netconf SSH. " +
-                "SSH won't authenticate users until AuthProvider service will be started.", reference);
+        LOG.trace("Removing service {} from netconf SSH. {}", reference,
+                " SSH won't authenticate users until AuthProvider service will be started.");
         maxPreference = null;
         this.authProvider = null;
     }
index 5fc04eee8305279d9c3ab3dec67933c6bfe6435b..b098329e4a01b13c33e737cf7315ad4746f13f92 100644 (file)
@@ -33,7 +33,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class NetconfSSHActivator implements BundleActivator {
-    private static final Logger logger = LoggerFactory.getLogger(NetconfSSHActivator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(NetconfSSHActivator.class);
 
     private static final java.lang.String ALGORITHM = "RSA";
     private static final int KEY_SIZE = 4096;
@@ -87,12 +87,12 @@ public class NetconfSSHActivator implements BundleActivator {
         final Optional<InetSocketAddress> maybeSshSocketAddress = NetconfConfigUtil.extractNetconfServerAddress(bundleContext, InfixProp.ssh);
 
         if (maybeSshSocketAddress.isPresent() == false) {
-            logger.trace("SSH bridge not configured");
+            LOG.trace("SSH bridge not configured");
             return null;
         }
 
         final InetSocketAddress sshSocketAddress = maybeSshSocketAddress.get();
-        logger.trace("Starting netconf SSH bridge at {}", sshSocketAddress);
+        LOG.trace("Starting netconf SSH bridge at {}", sshSocketAddress);
 
         final LocalAddress localAddress = NetconfConfigUtil.getNetconfLocalAddress();
 
index b768e2b1d1ce08d8cef9585c538837157405bdc6..bdaee7957d277ef7bff5c9aafb81e1e0fcd85cb5 100644 (file)
@@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory;
  * the server.
  */
 public class EchoClient extends Thread {
-    private static final Logger logger = LoggerFactory.getLogger(EchoClient.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EchoClient.class);
 
 
     private final ChannelInitializer<LocalChannel> channelInitializer;
@@ -63,11 +63,11 @@ public class EchoClient extends Thread {
             // Wait until the connection is closed.
             f.channel().closeFuture().sync();
         } catch (Exception e) {
-            logger.error("Error in client", e);
+            LOG.error("Error in client", e);
             throw new RuntimeException("Error in client", e);
         } finally {
             // Shut down the event loop to terminate all threads.
-            logger.info("Client is shutting down");
+            LOG.info("Client is shutting down");
             group.shutdownGracefully();
         }
     }
index 2a5791710a34cd7869ca4250cab4717c33b32f05..1d3cd5325cb99f77c40a270f8963abd2ed2e3a12 100644 (file)
@@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory;
  * the server.
  */
 public class EchoClientHandler extends ChannelInboundHandlerAdapter implements ChannelFutureListener {
-    private static final Logger logger = LoggerFactory.getLogger(EchoClientHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EchoClientHandler.class);
 
     private ChannelHandlerContext ctx;
     private final StringBuilder fromServer = new StringBuilder();
@@ -39,7 +39,7 @@ public class EchoClientHandler extends ChannelInboundHandlerAdapter implements C
     @Override
     public synchronized void channelActive(ChannelHandlerContext ctx) {
         checkState(this.ctx == null);
-        logger.info("channelActive");
+        LOG.info("channelActive");
         this.ctx = ctx;
         state = State.CONNECTED;
     }
@@ -54,14 +54,14 @@ public class EchoClientHandler extends ChannelInboundHandlerAdapter implements C
         ByteBuf bb = (ByteBuf) msg;
         String string = bb.toString(Charsets.UTF_8);
         fromServer.append(string);
-        logger.info(">{}", string);
+        LOG.info(">{}", string);
         bb.release();
     }
 
     @Override
     public synchronized void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
         // Close the connection when an exception is raised.
-        logger.warn("Unexpected exception from downstream.", cause);
+        LOG.warn("Unexpected exception from downstream.", cause);
         checkState(this.ctx.equals(ctx));
         ctx.close();
         this.ctx = null;
@@ -84,7 +84,7 @@ public class EchoClientHandler extends ChannelInboundHandlerAdapter implements C
     public synchronized void operationComplete(ChannelFuture future) throws Exception {
         checkState(state == State.CONNECTING);
         if (future.isSuccess()) {
-            logger.trace("Successfully connected, state will be switched in channelActive");
+            LOG.trace("Successfully connected, state will be switched in channelActive");
         } else {
             state = State.FAILED_TO_CONNECT;
         }
index ec89d75f297720c88aa7e1c76fb90857c79b62d3..5d184411f9bd01d6a0e475c6001440c136c4ff94 100644 (file)
@@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory;
  * Echoes back any received data from a client.
  */
 public class EchoServer implements Runnable {
-    private static final Logger logger = LoggerFactory.getLogger(EchoServer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EchoServer.class);
 
     public void run() {
         // Configure the server.
@@ -76,7 +76,7 @@ public class EchoServer implements Runnable {
             if (message == null ||  "exit".equalsIgnoreCase(message)) {
                 break;
             }
-            logger.debug("Got '{}'", message);
+            LOG.debug("Got '{}'", message);
             clientHandler.write(message);
         } while (true);
         System.exit(0);
index 1286ec6875b3b61bf393775082a447ae2cc55463..abb6658b11a7dcfff730ded38b096aa22349bd6a 100644 (file)
@@ -23,26 +23,26 @@ import org.slf4j.LoggerFactory;
 @Sharable
 public class EchoServerHandler extends ChannelInboundHandlerAdapter {
 
-    private static final Logger logger = LoggerFactory.getLogger(EchoServerHandler.class.getName());
+    private static final Logger LOG = LoggerFactory.getLogger(EchoServerHandler.class);
     private String fromLastNewLine = "";
     private final Splitter splitter = Splitter.onPattern("\r?\n");
     @Override
     public void channelActive(ChannelHandlerContext ctx) throws Exception {
-        logger.debug("sleep start");
+        LOG.debug("sleep start");
         Thread.sleep(1000);
-        logger.debug("sleep done");
+        LOG.debug("sleep done");
     }
 
     @Override
     public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
         ByteBuf byteBuf = (ByteBuf) msg;
         String message = byteBuf.toString(Charsets.UTF_8);
-        logger.info("writing back '{}'", message);
+        LOG.info("writing back '{}'", message);
         ctx.write(msg);
         fromLastNewLine += message;
         for (String line : splitter.split(fromLastNewLine)) {
             if ("quit".equals(line)) {
-                logger.info("closing server ctx");
+                LOG.info("closing server ctx");
                 ctx.flush();
                 ctx.close();
                 break;
@@ -55,7 +55,7 @@ public class EchoServerHandler extends ChannelInboundHandlerAdapter {
 
     @Override
     public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
-        logger.debug("flushing");
+        LOG.debug("flushing");
         ctx.flush();
     }
 }
index ecab21256ec9bfc9f8c6122360fd22b62cd88d5b..e633ab2332dde4a7426b7ebb8cef9b1efdc73000 100644 (file)
@@ -24,7 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class ProxyServerHandler extends ChannelInboundHandlerAdapter {
-    private static final Logger logger = LoggerFactory.getLogger(ProxyServerHandler.class.getName());
+    private static final Logger LOG = LoggerFactory.getLogger(ProxyServerHandler.class);
     private final Bootstrap clientBootstrap;
     private final LocalAddress localAddress;
 
@@ -53,32 +53,32 @@ public class ProxyServerHandler extends ChannelInboundHandlerAdapter {
 
     @Override
     public void channelInactive(ChannelHandlerContext ctx) {
-        logger.info("channelInactive - closing client connection");
+        LOG.info("channelInactive - closing client connection");
         clientChannel.close();
     }
 
     @Override
     public void channelRead(ChannelHandlerContext ctx, final Object msg) {
-        logger.debug("Writing to client {}", msg);
+        LOG.debug("Writing to client {}", msg);
         clientChannel.write(msg);
     }
 
     @Override
     public void channelReadComplete(ChannelHandlerContext ctx) {
-        logger.debug("flushing");
+        LOG.debug("flushing");
         clientChannel.flush();
     }
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
         // Close the connection when an exception is raised.
-        logger.warn("Unexpected exception from downstream.", cause);
+        LOG.warn("Unexpected exception from downstream.", cause);
         ctx.close();
     }
 }
 
 class ProxyClientHandler extends ChannelInboundHandlerAdapter {
-    private static final Logger logger = LoggerFactory.getLogger(ProxyClientHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ProxyClientHandler.class);
 
     private final ChannelHandlerContext remoteCtx;
 
@@ -89,33 +89,33 @@ class ProxyClientHandler extends ChannelInboundHandlerAdapter {
 
     @Override
     public void channelActive(ChannelHandlerContext ctx) {
-        logger.info("client active");
+        LOG.info("client active");
     }
 
     @Override
     public void channelRead(ChannelHandlerContext ctx, Object msg) {
         ByteBuf bb = (ByteBuf) msg;
-        logger.info(">{}", bb.toString(Charsets.UTF_8));
+        LOG.info(">{}", bb.toString(Charsets.UTF_8));
         remoteCtx.write(msg);
     }
 
     @Override
     public void channelReadComplete(ChannelHandlerContext ctx) {
-        logger.debug("Flushing server ctx");
+        LOG.debug("Flushing server ctx");
         remoteCtx.flush();
     }
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
         // Close the connection when an exception is raised.
-        logger.warn("Unexpected exception from downstream", cause);
+        LOG.warn("Unexpected exception from downstream", cause);
         ctx.close();
     }
 
     // called both when local or remote connection dies
     @Override
     public void channelInactive(ChannelHandlerContext ctx) {
-        logger.debug("channelInactive() called, closing remote client ctx");
+        LOG.debug("channelInactive() called, closing remote client ctx");
         remoteCtx.close();
     }
 }
index 34b236b4611f0e8586388e7ff22b1022effc2cd0..2802488170ef9cc9ef6d27b3846592b19219ca9b 100644 (file)
@@ -19,6 +19,7 @@ import io.netty.channel.EventLoopGroup;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.channel.socket.nio.NioSocketChannel;
 import io.netty.util.HashedWheelTimer;
+import java.io.File;
 import java.net.InetSocketAddress;
 import java.nio.file.Files;
 import java.util.concurrent.ExecutorService;
@@ -41,7 +42,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class SSHTest {
-    public static final Logger logger = LoggerFactory.getLogger(SSHTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SSHTest.class);
     public static final String AHOJ = "ahoj\n";
 
     private static EventLoopGroup nettyGroup;
@@ -67,6 +68,8 @@ public class SSHTest {
 
     @Test
     public void test() throws Exception {
+        File sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+        sshKeyPair.deleteOnExit();
         new Thread(new EchoServer(), "EchoServer").start();
 
         final InetSocketAddress addr = new InetSocketAddress("127.0.0.1", 10831);
@@ -77,7 +80,7 @@ public class SSHTest {
                     public boolean authenticate(final String username, final String password, final ServerSession session) {
                         return true;
                     }
-                }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
+                }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
 
         final EchoClientHandler echoClientHandler = connectClient(addr);
 
@@ -86,7 +89,7 @@ public class SSHTest {
             Thread.sleep(500);
         }
         assertTrue(echoClientHandler.isConnected());
-        logger.info("connected, writing to client");
+        LOG.info("connected, writing to client");
         echoClientHandler.write(AHOJ);
 
         // check that server sent back the same string
@@ -99,7 +102,7 @@ public class SSHTest {
             final String read = echoClientHandler.read();
             assertTrue(read + " should end with " + AHOJ, read.endsWith(AHOJ));
         } finally {
-            logger.info("Closing socket");
+            LOG.info("Closing socket");
             sshProxyServer.close();
         }
     }
index 38aa2e71ace1850cdb42a02e7b018e09ab387170..7a76285dd2a125ac7cbabbf72b69ff90166ac5b9 100644 (file)
@@ -14,6 +14,7 @@ import static org.mockito.Mockito.doReturn;
 
 import io.netty.channel.EventLoopGroup;
 import io.netty.channel.nio.NioEventLoopGroup;
+import java.io.File;
 import java.net.InetSocketAddress;
 import java.nio.file.Files;
 import java.util.concurrent.ExecutorService;
@@ -47,8 +48,9 @@ public class SSHServerTest {
     private static final String PASSWORD = "netconf";
     private static final String HOST = "127.0.0.1";
     private static final int PORT = 1830;
-    private static final Logger logger = LoggerFactory.getLogger(SSHServerTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SSHServerTest.class);
 
+    private File sshKeyPair;
     private SshProxyServer server;
 
     @Mock
@@ -59,12 +61,15 @@ public class SSHServerTest {
 
     @Before
     public void setUp() throws Exception {
+        sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+        sshKeyPair.deleteOnExit();
+
         MockitoAnnotations.initMocks(this);
         doReturn(null).when(mockedContext).createFilter(anyString());
         doNothing().when(mockedContext).addServiceListener(any(ServiceListener.class), anyString());
         doReturn(new ServiceReference[0]).when(mockedContext).getServiceReferences(anyString(), anyString());
 
-        logger.info("Creating SSH server");
+        LOG.info("Creating SSH server");
 
         final InetSocketAddress addr = InetSocketAddress.createUnresolved(HOST, PORT);
         server = new SshProxyServer(minaTimerEx, clientGroup, nioExec);
@@ -74,8 +79,8 @@ public class SSHServerTest {
                     public boolean authenticate(final String username, final String password, final ServerSession session) {
                         return true;
                     }
-                }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
-        logger.info("SSH server started on " + PORT);
+                }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
+        LOG.info("SSH server started on {}", PORT);
     }
 
     @Test