</dependenciesToScan>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.8</version>
+ <executions>
+ <execution>
+ <id>unpack-static-documentation</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>generate-resources</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/generated-resources/swagger-api-documentation</outputDirectory>
+ <includeArtifactIds>sal-rest-docgen</includeArtifactIds>
+ <includes>**/explorer/css/**/*, **/explorer/images/**/*, **/explorer/lib/**/*, **/explorer/static/**/*,</includes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ <version>${yangtools.version}</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>maven-sal-api-gen-plugin</artifactId>
+ <version>${yangtools.version}</version>
+ <type>jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ <version>${yangtools.version}</version>
+ <type>jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-rest-docgen</artifactId>
+ <version>${mdsal.version}</version>
+ <type>jar</type>
+ </dependency>
+ </dependencies>
+ <executions>
+ <execution>
+ <goals>
+ <goal>generate-sources</goal>
+ </goals>
+ <configuration>
+ <yangFilesRootDir>src</yangFilesRootDir>
+ <codeGenerators>
+ <generator>
+ <codeGeneratorClass>org.opendaylight.controller.sal.rest.doc.impl.StaticDocGenerator</codeGeneratorClass>
+ <outputBaseDir>${project.build.directory}/generated-resources/swagger-api-documentation/explorer/static</outputBaseDir>
+ </generator>
+ </codeGenerators>
+ <inspectDependencies>true</inspectDependencies>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
<scm>
@Before
public void setUp() throws Exception {
file = Files.createTempFile("testFilePersist", ".txt").toFile();
+ file.deleteOnExit();
if (!file.exists()) {
return;
}
@Test
public void testNoLastConfig() throws Exception {
File file = Files.createTempFile("testFilePersist", ".txt").toFile();
+ file.deleteOnExit();
if (!file.exists()) {
return;
}
import java.io.File;
import java.util.List;
-
import org.opendaylight.controller.logback.config.loader.impl.LogbackConfigUtil;
import org.opendaylight.controller.logback.config.loader.impl.LogbackConfigurationLoader;
import org.osgi.framework.BundleActivator;
* logback configurations
*/
private static final String LOGBACK_CONFIG_D = "logback.config.d";
- private static Logger LOG = LoggerFactory.getLogger(Activator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Activator.class);
@Override
public void start(BundleContext context) {
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
*/
package org.opendaylight.controller.logback.config.loader.impl;
+import ch.qos.logback.classic.LoggerContext;
+import ch.qos.logback.classic.joran.JoranConfigurator;
+import ch.qos.logback.core.joran.spi.JoranException;
+import ch.qos.logback.core.util.StatusPrinter;
import java.io.File;
import java.net.URL;
-
import org.slf4j.ILoggerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import ch.qos.logback.classic.LoggerContext;
-import ch.qos.logback.classic.joran.JoranConfigurator;
-import ch.qos.logback.core.joran.spi.JoranException;
-import ch.qos.logback.core.util.StatusPrinter;
-
/**
* Logback configuration loader.
* Strategy:
import java.io.File;
import java.util.ArrayList;
import java.util.List;
-
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/** logback config root */
private static final String LOGBACK_D = "/logback.d";
- private static Logger LOG = LoggerFactory
+ private static final Logger LOG = LoggerFactory
.getLogger(LogbackConfigurationLoaderTest.class);
/**
*/
package org.opendaylight.controller.logback.config.loader.test;
-import java.util.ArrayList;
-import java.util.List;
-
import ch.qos.logback.classic.spi.LoggingEvent;
import ch.qos.logback.core.Appender;
import ch.qos.logback.core.Context;
import ch.qos.logback.core.filter.Filter;
import ch.qos.logback.core.spi.FilterReply;
import ch.qos.logback.core.status.Status;
+import java.util.ArrayList;
+import java.util.List;
/**
* dummy appender for collecting log messages
*/
public class Debugger {
- private static Logger LOG = LoggerFactory.getLogger(Debugger.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Debugger.class);
/**
* all logging
*/
public class Errorer {
- private static Logger LOG = LoggerFactory.getLogger(Errorer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Errorer.class);
/**
* all logging
*/
public class Informer {
- private static Logger LOG = LoggerFactory.getLogger(Informer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Informer.class);
/**
* all logging
*/
public class Tracer {
- private static Logger LOG = LoggerFactory.getLogger(Tracer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Tracer.class);
/**
* all logging
*/
public class Warner {
- private static Logger LOG = LoggerFactory.getLogger(Warner.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Warner.class);
/**
* all logging
package org.opendaylight.controller.cluster.datastore.node.utils.stream;
import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
import org.opendaylight.controller.cluster.datastore.node.utils.QNameFactory;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.Node;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.AugmentationNode;
-import org.opendaylight.yangtools.yang.data.api.schema.ChoiceNode;
-import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
-import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifierWithPredicates;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeWithValue;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
+import org.opendaylight.yangtools.yang.data.api.schema.LeafNode;
import org.opendaylight.yangtools.yang.data.api.schema.LeafSetEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.OrderedMapNode;
-import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.UnkeyedListNode;
import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.CollectionNodeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeAttrBuilder;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.DataContainerNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.ListNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeAttrBuilder;
+import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeContainerBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
private static final String REVISION_ARG = "?revision=";
- private final DataInputStream reader;
+ private final DataInput input;
private final Map<Integer, String> codedStringMap = new HashMap<>();
private QName lastLeafSetQName;
+ private NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier,
+ Object, LeafNode<Object>> leafBuilder;
+
+ private NormalizedNodeAttrBuilder<NodeWithValue, Object,
+ LeafSetEntryNode<Object>> leafSetEntryBuilder;
+
public NormalizedNodeInputStreamReader(InputStream stream) throws IOException {
Preconditions.checkNotNull(stream);
- reader = new DataInputStream(stream);
+ input = new DataInputStream(stream);
+ }
+
+ public NormalizedNodeInputStreamReader(DataInput input) throws IOException {
+ this.input = Preconditions.checkNotNull(input);
}
@Override
public NormalizedNode<?, ?> readNormalizedNode() throws IOException {
- NormalizedNode<?, ?> node = null;
-
// each node should start with a byte
- byte nodeType = reader.readByte();
+ byte nodeType = input.readByte();
if(nodeType == NodeTypes.END_NODE) {
LOG.debug("End node reached. return");
return null;
}
- else if(nodeType == NodeTypes.AUGMENTATION_NODE) {
- LOG.debug("Reading augmentation node. will create augmentation identifier");
- YangInstanceIdentifier.AugmentationIdentifier identifier =
- new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
- DataContainerNodeBuilder<YangInstanceIdentifier.AugmentationIdentifier, AugmentationNode> augmentationBuilder =
- Builders.augmentationBuilder().withNodeIdentifier(identifier);
- augmentationBuilder = addDataContainerChildren(augmentationBuilder);
- node = augmentationBuilder.build();
+ switch(nodeType) {
+ case NodeTypes.AUGMENTATION_NODE :
+ YangInstanceIdentifier.AugmentationIdentifier augIdentifier =
+ new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
- } else {
- if(nodeType == NodeTypes.LEAF_SET_ENTRY_NODE) {
- LOG.debug("Reading leaf set entry node. Will create NodeWithValue instance identifier");
+ LOG.debug("Reading augmentation node {} ", augIdentifier);
- // Read the object value
+ return addDataContainerChildren(Builders.augmentationBuilder().
+ withNodeIdentifier(augIdentifier)).build();
+
+ case NodeTypes.LEAF_SET_ENTRY_NODE :
Object value = readObject();
+ NodeWithValue leafIdentifier = new NodeWithValue(lastLeafSetQName, value);
+
+ LOG.debug("Reading leaf set entry node {}, value {}", leafIdentifier, value);
+
+ return leafSetEntryBuilder().withNodeIdentifier(leafIdentifier).withValue(value).build();
- YangInstanceIdentifier.NodeWithValue nodeWithValue = new YangInstanceIdentifier.NodeWithValue(
- lastLeafSetQName, value);
- node = Builders.leafSetEntryBuilder().withNodeIdentifier(nodeWithValue).
- withValue(value).build();
+ case NodeTypes.MAP_ENTRY_NODE :
+ NodeIdentifierWithPredicates entryIdentifier = new NodeIdentifierWithPredicates(
+ readQName(), readKeyValueMap());
- } else if(nodeType == NodeTypes.MAP_ENTRY_NODE) {
- LOG.debug("Reading map entry node. Will create node identifier with predicates.");
+ LOG.debug("Reading map entry node {} ", entryIdentifier);
- QName qName = readQName();
- YangInstanceIdentifier.NodeIdentifierWithPredicates nodeIdentifier =
- new YangInstanceIdentifier.NodeIdentifierWithPredicates(qName, readKeyValueMap());
- DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifierWithPredicates, MapEntryNode> mapEntryBuilder
- = Builders.mapEntryBuilder().withNodeIdentifier(nodeIdentifier);
+ return addDataContainerChildren(Builders.mapEntryBuilder().
+ withNodeIdentifier(entryIdentifier)).build();
- mapEntryBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifierWithPredicates,
- MapEntryNode>)addDataContainerChildren(mapEntryBuilder);
- node = mapEntryBuilder.build();
+ default :
+ return readNodeIdentifierDependentNode(nodeType, new NodeIdentifier(readQName()));
+ }
+ }
- } else {
- LOG.debug("Creating standard node identifier. ");
+ private NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier,
+ Object, LeafNode<Object>> leafBuilder() {
+ if(leafBuilder == null) {
+ leafBuilder = Builders.leafBuilder();
+ }
- QName qName = readQName();
- YangInstanceIdentifier.NodeIdentifier identifier = new YangInstanceIdentifier.NodeIdentifier(qName);
- node = readNodeIdentifierDependentNode(nodeType, identifier);
+ return leafBuilder;
+ }
- }
+ private NormalizedNodeAttrBuilder<NodeWithValue, Object,
+ LeafSetEntryNode<Object>> leafSetEntryBuilder() {
+ if(leafSetEntryBuilder == null) {
+ leafSetEntryBuilder = Builders.leafSetEntryBuilder();
}
- return node;
+
+ return leafSetEntryBuilder;
}
- private NormalizedNode<?, ?> readNodeIdentifierDependentNode(byte nodeType, YangInstanceIdentifier.NodeIdentifier identifier)
+ private NormalizedNode<?, ?> readNodeIdentifierDependentNode(byte nodeType, NodeIdentifier identifier)
throws IOException {
switch(nodeType) {
case NodeTypes.LEAF_NODE :
- LOG.debug("Read leaf node");
+ LOG.debug("Read leaf node {}", identifier);
// Read the object value
- NormalizedNodeAttrBuilder leafBuilder = Builders.leafBuilder();
- return leafBuilder.withNodeIdentifier(identifier).withValue(readObject()).build();
+ return leafBuilder().withNodeIdentifier(identifier).withValue(readObject()).build();
case NodeTypes.ANY_XML_NODE :
LOG.debug("Read xml node");
Node<?> value = (Node<?>) readObject();
- return Builders.anyXmlBuilder().withValue(value).build();
+ return Builders.anyXmlBuilder().withValue((Node<?>) readObject()).build();
case NodeTypes.MAP_NODE :
- LOG.debug("Read map node");
- CollectionNodeBuilder<MapEntryNode, MapNode> mapBuilder = Builders.mapBuilder().withNodeIdentifier(identifier);
- mapBuilder = addMapNodeChildren(mapBuilder);
- return mapBuilder.build();
+ LOG.debug("Read map node {}", identifier);
+ return addDataContainerChildren(Builders.mapBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.CHOICE_NODE :
- LOG.debug("Read choice node");
- DataContainerNodeBuilder<YangInstanceIdentifier.NodeIdentifier, ChoiceNode> choiceBuilder =
- Builders.choiceBuilder().withNodeIdentifier(identifier);
- choiceBuilder = addDataContainerChildren(choiceBuilder);
- return choiceBuilder.build();
+ LOG.debug("Read choice node {}", identifier);
+ return addDataContainerChildren(Builders.choiceBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.ORDERED_MAP_NODE :
- LOG.debug("Reading ordered map node");
- CollectionNodeBuilder<MapEntryNode, OrderedMapNode> orderedMapBuilder =
- Builders.orderedMapBuilder().withNodeIdentifier(identifier);
- orderedMapBuilder = addMapNodeChildren(orderedMapBuilder);
- return orderedMapBuilder.build();
+ LOG.debug("Reading ordered map node {}", identifier);
+ return addDataContainerChildren(Builders.orderedMapBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.UNKEYED_LIST :
- LOG.debug("Read unkeyed list node");
- CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> unkeyedListBuilder =
- Builders.unkeyedListBuilder().withNodeIdentifier(identifier);
- unkeyedListBuilder = addUnkeyedListChildren(unkeyedListBuilder);
- return unkeyedListBuilder.build();
+ LOG.debug("Read unkeyed list node {}", identifier);
+ return addDataContainerChildren(Builders.unkeyedListBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.UNKEYED_LIST_ITEM :
- LOG.debug("Read unkeyed list item node");
- DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, UnkeyedListEntryNode> unkeyedListEntryBuilder
- = Builders.unkeyedListEntryBuilder().withNodeIdentifier(identifier);
-
- unkeyedListEntryBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, UnkeyedListEntryNode>)
- addDataContainerChildren(unkeyedListEntryBuilder);
- return unkeyedListEntryBuilder.build();
+ LOG.debug("Read unkeyed list item node {}", identifier);
+ return addDataContainerChildren(Builders.unkeyedListEntryBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.CONTAINER_NODE :
- LOG.debug("Read container node");
- DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, ContainerNode> containerBuilder =
- Builders.containerBuilder().withNodeIdentifier(identifier);
-
- containerBuilder = (DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, ContainerNode>)
- addDataContainerChildren(containerBuilder);
- return containerBuilder.build();
+ LOG.debug("Read container node {}", identifier);
+ return addDataContainerChildren(Builders.containerBuilder().
+ withNodeIdentifier(identifier)).build();
case NodeTypes.LEAF_SET :
- LOG.debug("Read leaf set node");
- ListNodeBuilder<Object, LeafSetEntryNode<Object>> leafSetBuilder =
- Builders.leafSetBuilder().withNodeIdentifier(identifier);
- leafSetBuilder = addLeafSetChildren(identifier.getNodeType(), leafSetBuilder);
- return leafSetBuilder.build();
+ LOG.debug("Read leaf set node {}", identifier);
+ return addLeafSetChildren(identifier.getNodeType(),
+ Builders.leafSetBuilder().withNodeIdentifier(identifier)).build();
default :
return null;
String localName = readCodedString();
String namespace = readCodedString();
String revision = readCodedString();
- String qName;
+
// Not using stringbuilder as compiler optimizes string concatenation of +
- if(revision != null){
- qName = "(" + namespace+ REVISION_ARG + revision + ")" +localName;
+ String qName;
+ if(!Strings.isNullOrEmpty(revision)) {
+ qName = "(" + namespace + REVISION_ARG + revision + ")" +localName;
} else {
- qName = "(" + namespace + ")" +localName;
+ qName = "(" + namespace + ")" + localName;
}
return QNameFactory.create(qName);
private String readCodedString() throws IOException {
- boolean readFromMap = reader.readBoolean();
- if(readFromMap) {
- return codedStringMap.get(reader.readInt());
- } else {
- String value = reader.readUTF();
- if(value != null) {
- codedStringMap.put(Integer.valueOf(codedStringMap.size()), value);
- }
+ byte valueType = input.readByte();
+ if(valueType == NormalizedNodeOutputStreamWriter.IS_CODE_VALUE) {
+ return codedStringMap.get(input.readInt());
+ } else if(valueType == NormalizedNodeOutputStreamWriter.IS_STRING_VALUE) {
+ String value = input.readUTF();
+ codedStringMap.put(Integer.valueOf(codedStringMap.size()), value);
return value;
}
+
+ return null;
}
private Set<QName> readQNameSet() throws IOException{
// Read the children count
- int count = reader.readInt();
+ int count = input.readInt();
Set<QName> children = new HashSet<>(count);
- for(int i = 0; i<count; i++) {
+ for(int i = 0; i < count; i++) {
children.add(readQName());
}
return children;
}
private Map<QName, Object> readKeyValueMap() throws IOException {
- int count = reader.readInt();
+ int count = input.readInt();
Map<QName, Object> keyValueMap = new HashMap<>(count);
- for(int i = 0; i<count; i++) {
+ for(int i = 0; i < count; i++) {
keyValueMap.put(readQName(), readObject());
}
}
private Object readObject() throws IOException {
- byte objectType = reader.readByte();
+ byte objectType = input.readByte();
switch(objectType) {
case ValueTypes.BITS_TYPE:
return readObjSet();
case ValueTypes.BOOL_TYPE :
- return reader.readBoolean();
+ return input.readBoolean();
case ValueTypes.BYTE_TYPE :
- return reader.readByte();
+ return input.readByte();
case ValueTypes.INT_TYPE :
- return reader.readInt();
+ return input.readInt();
case ValueTypes.LONG_TYPE :
- return reader.readLong();
+ return input.readLong();
case ValueTypes.QNAME_TYPE :
return readQName();
case ValueTypes.SHORT_TYPE :
- return reader.readShort();
+ return input.readShort();
case ValueTypes.STRING_TYPE :
- return reader.readUTF();
+ return input.readUTF();
case ValueTypes.BIG_DECIMAL_TYPE :
- return new BigDecimal(reader.readUTF());
+ return new BigDecimal(input.readUTF());
case ValueTypes.BIG_INTEGER_TYPE :
- return new BigInteger(reader.readUTF());
+ return new BigInteger(input.readUTF());
case ValueTypes.YANG_IDENTIFIER_TYPE :
- int size = reader.readInt();
-
- List<YangInstanceIdentifier.PathArgument> pathArguments = new ArrayList<>(size);
-
- for(int i=0; i<size; i++) {
- pathArguments.add(readPathArgument());
- }
- return YangInstanceIdentifier.create(pathArguments);
+ return readYangInstanceIdentifier();
default :
return null;
}
}
+ public YangInstanceIdentifier readYangInstanceIdentifier() throws IOException {
+ int size = input.readInt();
+
+ List<PathArgument> pathArguments = new ArrayList<>(size);
+
+ for(int i = 0; i < size; i++) {
+ pathArguments.add(readPathArgument());
+ }
+ return YangInstanceIdentifier.create(pathArguments);
+ }
+
private Set<String> readObjSet() throws IOException {
- int count = reader.readInt();
+ int count = input.readInt();
Set<String> children = new HashSet<>(count);
- for(int i = 0; i<count; i++) {
+ for(int i = 0; i < count; i++) {
children.add(readCodedString());
}
return children;
}
- private YangInstanceIdentifier.PathArgument readPathArgument() throws IOException {
+ private PathArgument readPathArgument() throws IOException {
// read Type
- int type = reader.readByte();
+ int type = input.readByte();
switch(type) {
return new YangInstanceIdentifier.AugmentationIdentifier(readQNameSet());
case PathArgumentTypes.NODE_IDENTIFIER :
- return new YangInstanceIdentifier.NodeIdentifier(readQName());
+ return new NodeIdentifier(readQName());
case PathArgumentTypes.NODE_IDENTIFIER_WITH_PREDICATES :
- return new YangInstanceIdentifier.NodeIdentifierWithPredicates(readQName(), readKeyValueMap());
+ return new NodeIdentifierWithPredicates(readQName(), readKeyValueMap());
case PathArgumentTypes.NODE_IDENTIFIER_WITH_VALUE :
- return new YangInstanceIdentifier.NodeWithValue(readQName(), readObject());
+ return new NodeWithValue(readQName(), readObject());
default :
return null;
}
}
+ @SuppressWarnings("unchecked")
private ListNodeBuilder<Object, LeafSetEntryNode<Object>> addLeafSetChildren(QName nodeType,
- ListNodeBuilder<Object, LeafSetEntryNode<Object>> builder)
- throws IOException {
+ ListNodeBuilder<Object, LeafSetEntryNode<Object>> builder) throws IOException {
LOG.debug("Reading children of leaf set");
return builder;
}
- private CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> addUnkeyedListChildren(
- CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> builder)
- throws IOException{
-
- LOG.debug("Reading children of unkeyed list");
- UnkeyedListEntryNode child = (UnkeyedListEntryNode)readNormalizedNode();
-
- while(child != null) {
- builder.withChild(child);
- child = (UnkeyedListEntryNode)readNormalizedNode();
- }
- return builder;
- }
-
- private DataContainerNodeBuilder addDataContainerChildren(DataContainerNodeBuilder builder)
- throws IOException {
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ private NormalizedNodeContainerBuilder addDataContainerChildren(
+ NormalizedNodeContainerBuilder builder) throws IOException {
LOG.debug("Reading data container (leaf nodes) nodes");
- DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?> child =
- (DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?>) readNormalizedNode();
+ NormalizedNode<?, ?> child = readNormalizedNode();
while(child != null) {
- builder.withChild(child);
- child =
- (DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?>) readNormalizedNode();
+ builder.addChild(child);
+ child = readNormalizedNode();
}
return builder;
}
-
-
- private CollectionNodeBuilder addMapNodeChildren(CollectionNodeBuilder builder)
- throws IOException {
- LOG.debug("Reading map node children");
- MapEntryNode child = (MapEntryNode)readNormalizedNode();
-
- while(child != null){
- builder.withChild(child);
- child = (MapEntryNode)readNormalizedNode();
- }
-
- return builder;
- }
-
-
- @Override
- public void close() throws IOException {
- reader.close();
- }
-
}
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
+import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
*
*/
-public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWriter{
+public class NormalizedNodeOutputStreamWriter implements NormalizedNodeStreamWriter {
private static final Logger LOG = LoggerFactory.getLogger(NormalizedNodeOutputStreamWriter.class);
- private final DataOutputStream writer;
+ static final byte IS_CODE_VALUE = 1;
+ static final byte IS_STRING_VALUE = 2;
+ static final byte IS_NULL_VALUE = 3;
+
+ private final DataOutput output;
private final Map<String, Integer> stringCodeMap = new HashMap<>();
public NormalizedNodeOutputStreamWriter(OutputStream stream) throws IOException {
Preconditions.checkNotNull(stream);
- writer = new DataOutputStream(stream);
+ output = new DataOutputStream(stream);
+ }
+
+ public NormalizedNodeOutputStreamWriter(DataOutput output) throws IOException {
+ this.output = Preconditions.checkNotNull(output);
}
@Override
public void leafSetEntryNode(Object value) throws IOException, IllegalArgumentException {
LOG.debug("Writing a new leaf set entry node");
- writer.writeByte(NodeTypes.LEAF_SET_ENTRY_NODE);
+ output.writeByte(NodeTypes.LEAF_SET_ENTRY_NODE);
writeObject(value);
}
Preconditions.checkNotNull(identifier, "Node identifier should not be null");
LOG.debug("Starting a new augmentation node");
- writer.writeByte(NodeTypes.AUGMENTATION_NODE);
+ output.writeByte(NodeTypes.AUGMENTATION_NODE);
writeQNameSet(identifier.getPossibleChildNames());
}
public void endNode() throws IOException, IllegalStateException {
LOG.debug("Ending the node");
- writer.writeByte(NodeTypes.END_NODE);
+ output.writeByte(NodeTypes.END_NODE);
}
@Override
public void close() throws IOException {
- writer.close();
}
@Override
public void flush() throws IOException {
- writer.flush();
}
private void startNode(final QName qName, byte nodeType) throws IOException {
Preconditions.checkNotNull(qName, "QName of node identifier should not be null.");
// First write the type of node
- writer.writeByte(nodeType);
+ output.writeByte(nodeType);
// Write Start Tag
writeQName(qName);
}
private void writeCodedString(String key) throws IOException {
Integer value = stringCodeMap.get(key);
-
if(value != null) {
- writer.writeBoolean(true);
- writer.writeInt(value);
+ output.writeByte(IS_CODE_VALUE);
+ output.writeInt(value);
} else {
if(key != null) {
+ output.writeByte(IS_STRING_VALUE);
stringCodeMap.put(key, Integer.valueOf(stringCodeMap.size()));
+ output.writeUTF(key);
+ } else {
+ output.writeByte(IS_NULL_VALUE);
}
- writer.writeBoolean(false);
- writer.writeUTF(key);
}
}
private void writeObjSet(Set<?> set) throws IOException {
if(!set.isEmpty()){
- writer.writeInt(set.size());
+ output.writeInt(set.size());
for(Object o : set){
if(o instanceof String){
writeCodedString(o.toString());
}
}
} else {
- writer.writeInt(0);
+ output.writeInt(0);
}
}
- private void writeYangInstanceIdentifier(YangInstanceIdentifier identifier) throws IOException {
+ public void writeYangInstanceIdentifier(YangInstanceIdentifier identifier) throws IOException {
Iterable<YangInstanceIdentifier.PathArgument> pathArguments = identifier.getPathArguments();
int size = Iterables.size(pathArguments);
- writer.writeInt(size);
+ output.writeInt(size);
for(YangInstanceIdentifier.PathArgument pathArgument : pathArguments) {
writePathArgument(pathArgument);
byte type = PathArgumentTypes.getSerializablePathArgumentType(pathArgument);
- writer.writeByte(type);
+ output.writeByte(type);
switch(type) {
case PathArgumentTypes.NODE_IDENTIFIER :
private void writeKeyValueMap(Map<QName, Object> keyValueMap) throws IOException {
if(keyValueMap != null && !keyValueMap.isEmpty()) {
- writer.writeInt(keyValueMap.size());
+ output.writeInt(keyValueMap.size());
Set<QName> qNameSet = keyValueMap.keySet();
for(QName qName : qNameSet) {
writeObject(keyValueMap.get(qName));
}
} else {
- writer.writeInt(0);
+ output.writeInt(0);
}
}
private void writeQNameSet(Set<QName> children) throws IOException {
// Write each child's qname separately, if list is empty send count as 0
if(children != null && !children.isEmpty()) {
- writer.writeInt(children.size());
+ output.writeInt(children.size());
for(QName qName : children) {
writeQName(qName);
}
} else {
LOG.debug("augmentation node does not have any child");
- writer.writeInt(0);
+ output.writeInt(0);
}
}
+ @SuppressWarnings("rawtypes")
private void writeObject(Object value) throws IOException {
byte type = ValueTypes.getSerializableType(value);
// Write object type first
- writer.writeByte(type);
+ output.writeByte(type);
switch(type) {
case ValueTypes.BOOL_TYPE:
- writer.writeBoolean((Boolean) value);
+ output.writeBoolean((Boolean) value);
break;
case ValueTypes.QNAME_TYPE:
writeQName((QName) value);
break;
case ValueTypes.INT_TYPE:
- writer.writeInt((Integer) value);
+ output.writeInt((Integer) value);
break;
case ValueTypes.BYTE_TYPE:
- writer.writeByte((Byte) value);
+ output.writeByte((Byte) value);
break;
case ValueTypes.LONG_TYPE:
- writer.writeLong((Long) value);
+ output.writeLong((Long) value);
break;
case ValueTypes.SHORT_TYPE:
- writer.writeShort((Short) value);
+ output.writeShort((Short) value);
break;
case ValueTypes.BITS_TYPE:
writeObjSet((Set<?>) value);
writeYangInstanceIdentifier((YangInstanceIdentifier) value);
break;
default:
- writer.writeUTF(value.toString());
+ output.writeUTF(value.toString());
break;
}
}
package org.opendaylight.controller.cluster.datastore.node.utils.stream;
-
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-
import java.io.IOException;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-
-public interface NormalizedNodeStreamReader extends AutoCloseable {
+/**
+ * Interface for a class that can read serialized NormalizedNode instances from a stream.
+ */
+public interface NormalizedNodeStreamReader {
NormalizedNode<?, ?> readNormalizedNode() throws IOException;
}
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
*/
-
package org.opendaylight.controller.cluster.datastore.node.utils.stream;
-
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
import org.apache.commons.lang.SerializationUtils;
import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.util.TestModel;
+import org.opendaylight.yangtools.yang.common.QName;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.NodeIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
+import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
public class NormalizedNodeStreamReaderWriterTest {
- final NormalizedNode<?, ?> input = TestModel.createTestContainer();
-
@Test
public void testNormalizedNodeStreamReaderWriter() throws IOException {
+ testNormalizedNodeStreamReaderWriter(TestModel.createTestContainer());
+
+ QName toaster = QName.create("http://netconfcentral.org/ns/toaster","2009-11-20","toaster");
+ QName darknessFactor = QName.create("http://netconfcentral.org/ns/toaster","2009-11-20","darknessFactor");
+ ContainerNode toasterNode = Builders.containerBuilder().
+ withNodeIdentifier(new NodeIdentifier(toaster)).
+ withChild(ImmutableNodes.leafNode(darknessFactor, "1000")).build();
+
+ testNormalizedNodeStreamReaderWriter(Builders.containerBuilder().
+ withNodeIdentifier(new NodeIdentifier(SchemaContext.NAME)).
+ withChild(toasterNode).build());
+ }
+
+ private void testNormalizedNodeStreamReaderWriter(NormalizedNode<?, ?> input) throws IOException {
+
byte[] byteData = null;
try(ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
}
- try(NormalizedNodeInputStreamReader reader = new NormalizedNodeInputStreamReader(
- new ByteArrayInputStream(byteData))) {
-
- NormalizedNode<?,?> node = reader.readNormalizedNode();
- Assert.assertEquals(input, node);
+ NormalizedNodeInputStreamReader reader = new NormalizedNodeInputStreamReader(
+ new ByteArrayInputStream(byteData));
- }
+ NormalizedNode<?,?> node = reader.readNormalizedNode();
+ Assert.assertEquals(input, node);
}
@Test
public void testWithSerializable() {
- SampleNormalizedNodeSerializable serializable = new SampleNormalizedNodeSerializable(input);
+ NormalizedNode<?, ?> input = TestModel.createTestContainer();
+ SampleNormalizedNodeSerializable serializable = new SampleNormalizedNodeSerializable(input );
SampleNormalizedNodeSerializable clone = (SampleNormalizedNodeSerializable)SerializationUtils.clone(serializable);
Assert.assertEquals(input, clone.getInput());
}
-
}
package org.opendaylight.controller.cluster.datastore.node.utils.stream;
-
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
-import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
-
+import java.io.DataInput;
+import java.io.DataOutput;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.net.URISyntaxException;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
+import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
public class SampleNormalizedNodeSerializable implements Serializable {
private static final long serialVersionUID = 1L;
}
private void readObject(final ObjectInputStream stream) throws IOException, ClassNotFoundException, URISyntaxException {
- NormalizedNodeStreamReader reader = new NormalizedNodeInputStreamReader(stream);
+ NormalizedNodeStreamReader reader = new NormalizedNodeInputStreamReader((DataInput)stream);
this.input = reader.readNormalizedNode();
}
private void writeObject(final ObjectOutputStream stream) throws IOException {
- NormalizedNodeStreamWriter writer = new NormalizedNodeOutputStreamWriter(stream);
+ NormalizedNodeStreamWriter writer = new NormalizedNodeOutputStreamWriter((DataOutput)stream);
NormalizedNodeWriter normalizedNodeWriter = NormalizedNodeWriter.forStreamWriter(writer);
normalizedNodeWriter.write(this.input);
<artifactId>yang-parser-impl</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin-spi</artifactId>
+ </dependency>
+
<dependency>
<groupId>org.osgi</groupId>
<artifactId>org.osgi.core</artifactId>
<instructions>
<Bundle-Name>MD SAL Rest Api Doc Generator</Bundle-Name>
- <Import-Package>*,
- com.sun.jersey.spi.container.servlet, org.eclipse.jetty.servlets</Import-Package>
+ <Import-Package>!org.apache.maven.plugin.logging,
+ !org.apache.maven.project,
+ !org.opendaylight.yangtools.yang2sources.spi,
+ *,
+ com.sun.jersey.spi.container.servlet, org.eclipse.jetty.servlets
+ </Import-Package>
<Bundle-Activator>org.opendaylight.controller.sal.rest.doc.DocProvider</Bundle-Activator>
<Web-ContextPath>/apidoc</Web-ContextPath>
</instructions>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.sal.rest.doc.impl;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+
+import org.apache.maven.plugin.logging.Log;
+import org.apache.maven.project.MavenProject;
+import org.opendaylight.controller.sal.rest.doc.swagger.ApiDeclaration;
+import org.opendaylight.controller.sal.rest.doc.swagger.Resource;
+import org.opendaylight.controller.sal.rest.doc.swagger.ResourceList;
+import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.opendaylight.yangtools.yang2sources.spi.CodeGenerator;
+
+import javax.ws.rs.core.UriInfo;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * This class gathers all yang defined {@link Module}s and generates Swagger compliant documentation.
+ */
+public class StaticDocGenerator extends ApiDocGenerator implements CodeGenerator {
+
+ private static final String DEFAULT_OUTPUT_BASE_DIR_PATH = "target" + File.separator + "generated-resources"
+ + File.separator + "swagger-api-documentation";
+
+ private static Logger _logger = LoggerFactory.getLogger(ApiDocGenerator.class);
+
+ private MavenProject mavenProject;
+ private File projectBaseDir;
+ private Map<String, String> additionalConfig;
+ private File resourceBaseDir;
+ private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+
+ /**
+ *
+ * @param context
+ * @param outputDir
+ * @param yangModules
+ * @return
+ * @throws IOException
+ */
+ @Override
+ public Collection<File> generateSources(SchemaContext context, File outputDir, Set<Module> yangModules) throws IOException {
+ List<File> result = new ArrayList<>();
+
+ // Create Base Directory
+ final File outputBaseDir;
+ if (outputDir == null) {
+ outputBaseDir = new File(DEFAULT_OUTPUT_BASE_DIR_PATH);
+ }
+ else outputBaseDir = outputDir;
+ outputBaseDir.mkdirs();
+
+ // Create Resources directory
+ File resourcesDir = new File(outputBaseDir, "resources");
+ resourcesDir.mkdirs();
+
+ // Create JS file
+ File resourcesJsFile = new File(outputBaseDir, "resources.js");
+ resourcesJsFile.createNewFile();
+ BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(resourcesJsFile));
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+
+ // Write resource listing to JS file
+ ResourceList resourceList = super.getResourceListing(null, context, "");
+ String resourceListJson = mapper.writeValueAsString(resourceList);
+ resourceListJson = resourceListJson.replace("\'", "\\\'").replace("\\n", "\\\\n");
+ bufferedWriter.write("function getSpec() {\n\treturn \'" + resourceListJson + "\';\n}\n\n");
+
+ // Write resources/APIs to JS file and to disk
+ bufferedWriter.write("function jsonFor(resource) {\n\tswitch(resource) {\n");
+ for (Resource resource : resourceList.getApis()) {
+ int revisionIndex = resource.getPath().indexOf('(');
+ String name = resource.getPath().substring(0, revisionIndex);
+ String revision = resource.getPath().substring(revisionIndex + 1, resource.getPath().length() - 1);
+ ApiDeclaration apiDeclaration = super.getApiDeclaration(name, revision, null, context, "");
+ String json = mapper.writeValueAsString(apiDeclaration);
+ // Manually insert models because org.json.JSONObject cannot be serialized by ObjectMapper
+ json = json.replace("\"models\":{}", "\"models\":" + apiDeclaration.getModels().toString().replace("\\\"", "\""));
+ // Escape single quotes and new lines
+ json = json.replace("\'", "\\\'").replace("\\n", "\\\\n");
+ bufferedWriter.write("\t\tcase \"" + name + "(" + revision + ")\": return \'" + json + "\';\n");
+
+ File resourceFile = new File(resourcesDir, name + "(" + revision + ").json");
+ BufferedWriter resourceFileWriter = new BufferedWriter(new FileWriter(resourceFile));
+ resourceFileWriter.write(json);
+ resourceFileWriter.close();
+ result.add(resourceFile);
+ }
+ bufferedWriter.write("\t}\n\treturn \"\";\n}");
+ bufferedWriter.close();
+
+ result.add(resourcesJsFile);
+ return result;
+ }
+
+ @Override
+ protected String generatePath(UriInfo uriInfo, String name, String revision) {
+ if (uriInfo == null) {
+ return name + "(" + revision + ")";
+ }
+ return super.generatePath(uriInfo, name, revision);
+ }
+
+ @Override
+ protected String createBasePathFromUriInfo(UriInfo uriInfo) {
+ if (uriInfo == null) {
+ return RESTCONF_CONTEXT_ROOT;
+ }
+ return super.createBasePathFromUriInfo(uriInfo);
+ }
+
+ @Override
+ public void setLog(Log log) {
+ }
+
+ @Override
+ public void setAdditionalConfig(Map<String, String> additionalConfig) {
+ this.additionalConfig = additionalConfig;
+ }
+
+ @Override
+ public void setResourceBaseDir(File resourceBaseDir) {
+ this.resourceBaseDir = resourceBaseDir;
+ }
+
+ @Override
+ public void setMavenProject(MavenProject mavenProject) {
+ this.mavenProject = mavenProject;
+ this.projectBaseDir = mavenProject.getBasedir();
+ }
+}
--- /dev/null
+This component offers Swagger documentation of the RestConf APIs.
+
+This Swagger documentation can be accessed in two ways:
+I. Running server
+Open a browser and go to http://<host>:8181/apidoc/explorer/index.html
+
+II. Static documentation generation
+By adding a reference to the StaticDocGenerator class in any pom.xml,
+static documentation will be generated. This static documentation will
+document all the RestConf APIs for the YANG files in that artifact and
+all the YANG files in that artifact's dependencies.
+
+In order to generate static documentation for all resources,
+this should be placed in a downstream project.
+
+Below is what you would add to the <plugins> section under <build>.
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.8</version>
+ <executions>
+ <execution>
+ <id>unpack-static-documentation</id>
+ <goals>
+ <goal>unpack-dependencies</goal>
+ </goals>
+ <phase>generate-resources</phase>
+ <configuration>
+ <outputDirectory>${project.build.directory}/generated-resources/swagger-api-documentation</outputDirectory>
+ <includeArtifactIds>sal-rest-docgen</includeArtifactIds>
+ <includes>**/explorer/css/**/*, **/explorer/images/**/*, **/explorer/lib/**/*, **/explorer/static/**/*,</includes>
+ <excludeTransitive>true</excludeTransitive>
+ <ignorePermissions>false</ignorePermissions>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-maven-plugin</artifactId>
+ <version>${yangtools.version}</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>maven-sal-api-gen-plugin</artifactId>
+ <version>${yangtools.version}</version>
+ <type>jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools</groupId>
+ <artifactId>yang-binding</artifactId>
+ <version>${yangtools.version}</version>
+ <type>jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-rest-docgen</artifactId>
+ <version>${mdsal.version}</version>
+ <type>jar</type>
+ </dependency>
+ </dependencies>
+ <executions>
+ <execution>
+ <goals>
+ <goal>generate-sources</goal>
+ </goals>
+ <configuration>
+ <yangFilesRootDir>src</yangFilesRootDir>
+ <codeGenerators>
+ <generator>
+ <codeGeneratorClass>org.opendaylight.controller.sal.rest.doc.impl.StaticDocGenerator</codeGeneratorClass>
+ <outputBaseDir>${project.build.directory}/generated-resources/swagger-api-documentation/explorer/static</outputBaseDir>
+ </generator>
+ </codeGenerators>
+ <inspectDependencies>true</inspectDependencies>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
--- /dev/null
+/* latin */
+@font-face {
+ font-family: 'Droid Sans';
+ font-style: normal;
+ font-weight: 400;
+ src: local('Droid Sans'), local('DroidSans'), url(http://fonts.gstatic.com/s/droidsans/v6/s-BiyweUPV0v-yRb-cjciAsYbbCjybiHxArTLjt7FRU.woff2) format('woff2');
+ unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2212, U+2215, U+E0FF, U+EFFD, U+F000;
+}
+/* latin */
+@font-face {
+ font-family: 'Droid Sans';
+ font-style: normal;
+ font-weight: 700;
+ src: local('Droid Sans Bold'), local('DroidSans-Bold'), url(http://fonts.gstatic.com/s/droidsans/v6/EFpQQyG9GqCrobXxL-KRMX9tREKe1lLHLCwOC24WjMs.woff2) format('woff2');
+ unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2212, U+2215, U+E0FF, U+EFFD, U+F000;
+}
\ No newline at end of file
--- /dev/null
+<!DOCTYPE html>
+<html>
+<head>
+ <title>RestConf Documentation</title>
+ <link href='fonts.css'
+ rel='stylesheet' type='text/css' /> <!--original location: //fonts.googleapis.com/css?family=Droid+Sans:400,700 -->
+ <link href='../css/highlight.default.css' media='screen' rel='stylesheet'
+ type='text/css' />
+ <link href='../css/screen.css' media='screen' rel='stylesheet'
+ type='text/css' />
+ <link rel="stylesheet" type="text/css" href="opendaylight.css">
+ <link rel="stylesheet" type="text/css"
+ href="../css/ui-lightness/jquery-ui-1.10.4.custom.min.css">
+ <script type="text/javascript" src="../lib/shred.bundle.js"></script>
+ <script src='../lib/jquery-1.8.0.min.js' type='text/javascript'></script>
+ <script src='../lib/jquery-ui-1.11.0.min.js' type="text/javascript"></script>
+ <script src='../lib/jquery.slideto.min.js' type='text/javascript'></script>
+ <script src='../lib/jquery.wiggle.min.js' type='text/javascript'></script>
+ <script src='../lib/jquery.ba-bbq.min.js' type='text/javascript'></script>
+ <script src='../lib/handlebars-1.0.0.js' type='text/javascript'></script>
+ <script src='../lib/underscore-min.js' type='text/javascript'></script>
+ <script src='../lib/backbone-min.js' type='text/javascript'></script>
+ <script src='swagger.js' type='text/javascript'></script>
+ <script src='swagger-ui.js' type='text/javascript'></script>
+ <script src='../lib/odl/list_mounts.js' type='text/javascript'></script>
+ <script src='../lib/highlight.7.3.pack.js' type='text/javascript'></script>
+ <script src='../lib/odl/swagger.js' type='text/javascript'></script>
+ <script src='resources.js' type='text/javascript'></script>
+
+ <script type="text/javascript">
+
+ //reloads the swagger UI documentation for the specified mount.
+ var loadMount = function(mountIndex, mountPath) {
+ $("#message").empty();
+ $("#message").append( "<p>Loading...</p>" );
+ loadSwagger("/apidoc/apis/mounts/" + mountIndex,
+ "swagger-ui-container");
+ $("#message").empty();
+ $("#message").append( "<h2><b>Showing mount points for " + mountPath + "</b></h2>");
+ }
+
+ //clears the swagger UI and adds text prompting use to select a mount point.
+ var selectAMount = function(string) {
+ $("#swagger-ui-container").empty();
+ $("#message").empty();
+ $("#message").append("<p>Select a mount point.</p>");
+ }
+
+ //loads the root swagger documenation (which comes from RestConf)
+ var loadRootSwagger = function() {
+ $("#message").empty();
+ loadSwagger("/apidoc/apis", "swagger-ui-container");
+ }
+
+ //main method to initialize the mount list / swagger docs / tabs on page load
+ $(function() {
+ $("#tabs").tabs();
+
+ loadMountList($("#mountlist"));
+
+ loadRootSwagger();
+ });
+ </script>
+</head>
+
+<body>
+<div>
+ <!-- style="background-color: #FCA000;" -->
+ <div class="swagger-ui-wrap ui-tabs">
+ <table>
+ <tr>
+ <td><img src="../images/logo_small.png" /></td>
+ <td><h1 width="100%">OpenDaylight RestConf API
+ Documentation</h1></td>
+ </tr>
+ </table>
+ </div>
+</div>
+
+<div class="navbar-inner">
+ <div class="brand"></div>
+</div>
+
+<!-- defines the div shells which represent the jquery tabs -->
+<div id="tabs" class="swagger-ui-wrap">
+ <ul>
+ <li><a href="#Controller" onclick="loadRootSwagger()">Controller
+ Resources</a></li>
+ </ul>
+
+ <div id="Controller">
+ <div>
+ <h3>Below are the list of APIs supported by the Controller.</h3>
+ </div>
+ </div>
+</div>
+
+<div class="swagger-ui-wrap"><hr/></div>
+
+<!-- messages -->
+<div id="message" class="swagger-ui-wrap"></div>
+
+<!-- the swagger is always loaded in this div -->
+<div id="swagger-ui-container" class="swagger-ui-wrap"></div>
+
+<div id="message-bar" class="swagger-ui-wrap"> </div>
+</body>
+
+</html>
--- /dev/null
+.fuelux [class^="icon-"],
+.fuelux [class*=" icon-"] {
+ display: inline-block;
+ width: 14px;
+ height: 14px;
+ margin-top: 1px;
+ *margin-right: .3em;
+ line-height: 14px;
+ vertical-align: text-top;
+ background-position: 14px 14px;
+ background-repeat: no-repeat;
+}
+
+.fuelux .icon-white,
+.fuelux .nav-pills > .active > a > [class^="icon-"],
+.fuelux .nav-pills > .active > a > [class*=" icon-"],
+.fuelux .nav-list > .active > a > [class^="icon-"],
+.fuelux .nav-list > .active > a > [class*=" icon-"],
+.fuelux .navbar-inverse .nav > .active > a > [class^="icon-"],
+.fuelux .navbar-inverse .nav > .active > a > [class*=" icon-"],
+.fuelux .dropdown-menu > li > a:hover > [class^="icon-"],
+.fuelux .dropdown-menu > li > a:focus > [class^="icon-"],
+.fuelux .dropdown-menu > li > a:hover > [class*=" icon-"],
+.fuelux .dropdown-menu > li > a:focus > [class*=" icon-"],
+.fuelux .dropdown-menu > .active > a > [class^="icon-"],
+.fuelux .dropdown-menu > .active > a > [class*=" icon-"],
+.fuelux .dropdown-submenu:hover > a > [class^="icon-"],
+.fuelux .dropdown-submenu:focus > a > [class^="icon-"],
+.fuelux .dropdown-submenu:hover > a > [class*=" icon-"],
+.fuelux .dropdown-submenu:focus > a > [class*=" icon-"] {
+}
+
+.fuelux .icon-search {
+ background:url('../img/search.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+.fuelux .icon-remove {
+ background:url('../img/searchremove.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.fuelux .icon-chevron-up {
+ background:url('../img/sort_up.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.fuelux .icon-chevron-down {
+ background:url('../img/sort_down.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.fuelux .icon-chevron-left {
+ background:url('../img/nextpageleft.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.fuelux .icon-chevron-right {
+ background:url('../img/nextpageright.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.fuelux .icon-chevron-right {
+ background:url('../img/nextpageright.png') 0 0 no-repeat;
+ height: 16px;
+ line-height: 16px;
+ width: 16px;
+}
+
+.tip {
+ background-color: white;
+ border: 1px solid #CCCCCC;
+ box-shadow: 2px 2px 8px #555555;
+ color: #111111;
+ font-family: Verdana,Geneva,Arial,Helvetica,sans-serif;
+ font-size: 14px;
+ opacity: 0.9;
+ padding: 7px;
+ width: 260px;
+ border-radius: 8px;
+}
--- /dev/null
+$(function() {
+
+ // Helper function for vertically aligning DOM elements
+ // http://www.seodenver.com/simple-vertical-align-plugin-for-jquery/
+ $.fn.vAlign = function() {
+ return this.each(function(i){
+ var ah = $(this).height();
+ var ph = $(this).parent().height();
+ var mh = (ph - ah) / 2;
+ $(this).css('margin-top', mh);
+ });
+ };
+
+ $.fn.stretchFormtasticInputWidthToParent = function() {
+ return this.each(function(i){
+ var p_width = $(this).closest("form").innerWidth();
+ var p_padding = parseInt($(this).closest("form").css('padding-left') ,10) + parseInt($(this).closest("form").css('padding-right'), 10);
+ var this_padding = parseInt($(this).css('padding-left'), 10) + parseInt($(this).css('padding-right'), 10);
+ $(this).css('width', p_width - p_padding - this_padding);
+ });
+ };
+
+ $('form.formtastic li.string input, form.formtastic textarea').stretchFormtasticInputWidthToParent();
+
+ // Vertically center these paragraphs
+ // Parent may need a min-height for this to work..
+ $('ul.downplayed li div.content p').vAlign();
+
+ // When a sandbox form is submitted..
+ $("form.sandbox").submit(function(){
+
+ var error_free = true;
+
+ // Cycle through the forms required inputs
+ $(this).find("input.required").each(function() {
+
+ // Remove any existing error styles from the input
+ $(this).removeClass('error');
+
+ // Tack the error style on if the input is empty..
+ if ($(this).val() == '') {
+ $(this).addClass('error');
+ $(this).wiggle();
+ error_free = false;
+ }
+
+ });
+
+ return error_free;
+ });
+
+});
+
+function clippyCopiedCallback(a) {
+ $('#api_key_copied').fadeIn().delay(1000).fadeOut();
+
+ // var b = $("#clippy_tooltip_" + a);
+ // b.length != 0 && (b.attr("title", "copied!").trigger("tipsy.reload"), setTimeout(function() {
+ // b.attr("title", "copy to clipboard")
+ // },
+ // 500))
+}
+
+// Logging function that accounts for browsers that don't have window.console
+function log() {
+ if (window.console) console.log.apply(console,arguments);
+}
+// Handle browsers that do console incorrectly (IE9 and below, see http://stackoverflow.com/a/5539378/7913)
+if (Function.prototype.bind && console && typeof console.log == "object") {
+ [
+ "log","info","warn","error","assert","dir","clear","profile","profileEnd"
+ ].forEach(function (method) {
+ console[method] = this.bind(console[method], console);
+ }, Function.prototype.call);
+}
+
+var Docs = {
+
+ shebang: function() {
+
+ // If shebang has an operation nickname in it..
+ // e.g. /docs/#!/words/get_search
+ var fragments = $.param.fragment().split('/');
+ fragments.shift(); // get rid of the bang
+
+ switch (fragments.length) {
+ case 1:
+ // Expand all operations for the resource and scroll to it
+// log('shebang resource:' + fragments[0]);
+ var dom_id = 'resource_' + fragments[0];
+
+ Docs.expandEndpointListForResource(fragments[0]);
+ $("#"+dom_id).slideto({highlight: false});
+ break;
+ case 2:
+ // Refer to the endpoint DOM element, e.g. #words_get_search
+// log('shebang endpoint: ' + fragments.join('_'));
+
+ // Expand Resource
+ Docs.expandEndpointListForResource(fragments[0]);
+ $("#"+dom_id).slideto({highlight: false});
+
+ // Expand operation
+ var li_dom_id = fragments.join('_');
+ var li_content_dom_id = li_dom_id + "_content";
+
+// log("li_dom_id " + li_dom_id);
+// log("li_content_dom_id " + li_content_dom_id);
+
+ Docs.expandOperation($('#'+li_content_dom_id));
+ $('#'+li_dom_id).slideto({highlight: false});
+ break;
+ }
+
+ },
+
+ toggleEndpointListForResource: function(resource) {
+ var elem = $('li#resource_' + Docs.escapeResourceName(resource) + ' ul.endpoints');
+ if (elem.is(':visible')) {
+ Docs.collapseEndpointListForResource(resource);
+ } else {
+ Docs.expandEndpointListForResource(resource);
+ }
+ },
+
+ // Expand resource
+ expandEndpointListForResource: function(resource) {
+ var resource = Docs.escapeResourceName(resource);
+ if (resource == '') {
+ $('.resource ul.endpoints').slideDown();
+ return;
+ }
+
+ $('li#resource_' + resource).addClass('active');
+
+ var elem = $('li#resource_' + resource + ' ul.endpoints');
+ elem.slideDown();
+ },
+
+ // Collapse resource and mark as explicitly closed
+ collapseEndpointListForResource: function(resource) {
+ var resource = Docs.escapeResourceName(resource);
+ $('li#resource_' + resource).removeClass('active');
+
+ var elem = $('li#resource_' + resource + ' ul.endpoints');
+ elem.slideUp();
+ },
+
+ expandOperationsForResource: function(resource) {
+ // Make sure the resource container is open..
+ Docs.expandEndpointListForResource(resource);
+
+ if (resource == '') {
+ $('.resource ul.endpoints li.operation div.content').slideDown();
+ return;
+ }
+
+ $('li#resource_' + Docs.escapeResourceName(resource) + ' li.operation div.content').each(function() {
+ Docs.expandOperation($(this));
+ });
+ },
+
+ collapseOperationsForResource: function(resource) {
+ // Make sure the resource container is open..
+ Docs.expandEndpointListForResource(resource);
+
+ $('li#resource_' + Docs.escapeResourceName(resource) + ' li.operation div.content').each(function() {
+ Docs.collapseOperation($(this));
+ });
+ },
+
+ escapeResourceName: function(resource) {
+ return resource.replace(/[!"#$%&'()*+,.\/:;<=>?@\[\\\]\^`{|}~]/g, "\\$&");
+ },
+
+ expandOperation: function(elem) {
+ elem.slideDown();
+ },
+
+ collapseOperation: function(elem) {
+ elem.slideUp();
+ }
+
+};
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers.each.call(depth0, depth0.produces, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <option value=\"";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\">";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</option>\n ";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+
+ return "\n <option value=\"application/json\">application/json</option>\n";
+ }
+
+ buffer += "<label for=\"contentType\"></label>\n<select name=\"contentType\">\n";
+ stack1 = helpers['if'].call(depth0, depth0.produces, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</select>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['main'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1, stack2;
+ buffer += "\n <div class=\"info_title\">"
+ + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.title)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+ + "</div>\n <div class=\"info_description\">";
+ stack2 = ((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.description)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1);
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "</div>\n ";
+ stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.termsOfServiceUrl), {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "\n ";
+ stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.contact), {hash:{},inverse:self.noop,fn:self.program(4, program4, data),data:data});
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "\n ";
+ stack2 = helpers['if'].call(depth0, ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.license), {hash:{},inverse:self.noop,fn:self.program(6, program6, data),data:data});
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "<div class=\"info_tos\"><a href=\""
+ + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.termsOfServiceUrl)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+ + "\">Terms of service</a></div>";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "<div class='info_contact'><a href=\"mailto:"
+ + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.contact)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+ + "\">Contact the developer</a></div>";
+ return buffer;
+ }
+
+ function program6(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "<div class='info_license'><a href='"
+ + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.licenseUrl)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+ + "'>"
+ + escapeExpression(((stack1 = ((stack1 = depth0.info),stack1 == null || stack1 === false ? stack1 : stack1.license)),typeof stack1 === functionType ? stack1.apply(depth0) : stack1))
+ + "</a></div>";
+ return buffer;
+ }
+
+ function program8(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n , <span style=\"font-variant: small-caps\">api version</span>: ";
+ if (stack1 = helpers.apiVersion) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.apiVersion; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "\n ";
+ return buffer;
+ }
+
+ buffer += "<div class='info' id='api_info'>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.info, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</div>\n<div class='container' id='resources_container'>\n <ul id='resources'>\n </ul>\n\n <div class=\"footer\">\n <br>\n <br>\n <h4 style=\"color: #999\">[ <span style=\"font-variant: small-caps\">base url</span>: ";
+ if (stack1 = helpers.basePath) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.basePath; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.apiVersion, {hash:{},inverse:self.noop,fn:self.program(8, program8, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "]</h4>\n </div>\n</div>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['operation'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <h4>Implementation Notes</h4>\n <p>";
+ if (stack1 = helpers.notes) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.notes; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</p>\n ";
+ return buffer;
+ }
+
+ function program3(depth0,data) {
+
+
+ return "\n <h4>Response Class</h4>\n <p><span class=\"model-signature\" /></p>\n <br/>\n <div class=\"response-content-type\" />\n ";
+ }
+
+ function program5(depth0,data) {
+
+
+ return "\n <h4>Parameters</h4>\n <table class='fullwidth'>\n <thead>\n <tr>\n <th style=\"width: 100px; max-width: 100px\">Parameter</th>\n <th style=\"width: 310px; max-width: 310px\">Value</th>\n <th style=\"width: 200px; max-width: 200px\">Description</th>\n <th style=\"width: 100px; max-width: 100px\">Parameter Type</th>\n <th style=\"width: 220px; max-width: 230px\">Data Type</th>\n </tr>\n </thead>\n <tbody class=\"operation-params\">\n\n </tbody>\n </table>\n ";
+ }
+
+ function program7(depth0,data) {
+
+
+ return "\n <div style='margin:0;padding:0;display:inline'></div>\n <h4>Error Status Codes</h4>\n <table class='fullwidth'>\n <thead>\n <tr>\n <th>HTTP Status Code</th>\n <th>Reason</th>\n </tr>\n </thead>\n <tbody class=\"operation-status\">\n \n </tbody>\n </table>\n ";
+ }
+
+ function program9(depth0,data) {
+
+
+ return "\n ";
+ }
+
+ function program11(depth0,data) {
+
+
+ return "\n <div class='sandbox_header'>\n <a href='#' class='response_hider' style='display:none'>Hide Response</a>\n <img alt='Throbber' class='response_throbber' src='../images/throbber.gif' style='display:none' />\n </div>\n ";
+ }
+
+ buffer += "\n <ul class='operations' >\n <li class='";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + " operation' id='";
+ if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>\n <div class='heading'>\n <h3>\n <span class='http_method'>\n <a href='#!/";
+ if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "/";
+ if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' class=\"toggleOperation\">";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</a>\n </span>\n <span class='path'>\n <a href='#!/";
+ if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "/";
+ if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' class=\"toggleOperation\">";
+ if (stack1 = helpers.path) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.path; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</a>\n </span>\n </h3>\n <ul class='options'>\n <li>\n <a href='#!/";
+ if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "/";
+ if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' class=\"toggleOperation\">";
+ if (stack1 = helpers.summary) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.summary; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</a>\n </li>\n </ul>\n </div>\n <div class='content' id='";
+ if (stack1 = helpers.resourceName) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.resourceName; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.nickname) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.nickname; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.method) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.method; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_";
+ if (stack1 = helpers.number) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.number; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_content' style='display:none'>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.notes, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.type, {hash:{},inverse:self.noop,fn:self.program(3, program3, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n <form accept-charset='UTF-8' class='sandbox'>\n <div style='margin:0;padding:0;display:inline'></div>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.parameters, {hash:{},inverse:self.noop,fn:self.program(5, program5, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.responseMessages, {hash:{},inverse:self.noop,fn:self.program(7, program7, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isReadOnly, {hash:{},inverse:self.program(11, program11, data),fn:self.program(9, program9, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n </form>\n <div class='response' style='display:none'>\n <h4>Request URL</h4>\n <div class='block request_url'></div>\n <h4>Response Body</h4>\n <div class='block response_body'></div>\n <h4>Response Code</h4>\n <div class='block response_code'></div>\n <h4>Response Headers</h4>\n <div class='block response_headers'></div>\n </div>\n </div>\n </li>\n </ul>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['param'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(4, program4, data),fn:self.program(2, program2, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input type=\"file\" name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'/>\n <div class=\"parameter-content-type\" />\n ";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(7, program7, data),fn:self.program(5, program5, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program5(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</textarea>\n ";
+ return buffer;
+ }
+
+ function program7(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'></textarea>\n <br />\n <div class=\"parameter-content-type\" />\n ";
+ return buffer;
+ }
+
+ function program9(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(12, program12, data),fn:self.program(10, program10, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program10(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input class='parameter' minlength='0' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' placeholder='' type='text' value='";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'/>\n ";
+ return buffer;
+ }
+
+ function program12(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input class='parameter' minlength='0' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' placeholder='' type='text' value=''/>\n ";
+ return buffer;
+ }
+
+ buffer += "<td class='code'>";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>\n\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(9, program9, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n\n</td>\n<td>";
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td>";
+ if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td>\n <span class=\"model-signature\"></span>\n</td>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['param_list'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, stack2, self=this, functionType="function", escapeExpression=this.escapeExpression;
+
+ function program1(depth0,data) {
+
+
+ return " multiple='multiple'";
+ }
+
+ function program3(depth0,data) {
+
+
+ return "\n ";
+ }
+
+ function program5(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(8, program8, data),fn:self.program(6, program6, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program6(depth0,data) {
+
+
+ return "\n ";
+ }
+
+ function program8(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.allowMultiple, {hash:{},inverse:self.program(11, program11, data),fn:self.program(9, program9, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program9(depth0,data) {
+
+
+ return "\n ";
+ }
+
+ function program11(depth0,data) {
+
+
+ return "\n <option selected=\"\" value=''></option>\n ";
+ }
+
+ function program13(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isDefault, {hash:{},inverse:self.program(16, program16, data),fn:self.program(14, program14, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program14(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <option selected=\"\" value='";
+ if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + " (default)</option>\n ";
+ return buffer;
+ }
+
+ function program16(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <option value='";
+ if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.value) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.value; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</option>\n ";
+ return buffer;
+ }
+
+ buffer += "<td class='code'>";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>\n <select ";
+ stack1 = helpers['if'].call(depth0, depth0.allowMultiple, {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += " class='parameter' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.required, {hash:{},inverse:self.program(5, program5, data),fn:self.program(3, program3, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ stack2 = helpers.each.call(depth0, ((stack1 = depth0.allowableValues),stack1 == null || stack1 === false ? stack1 : stack1.descriptiveValues), {hash:{},inverse:self.noop,fn:self.program(13, program13, data),data:data});
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "\n </select>\n</td>\n<td>";
+ if (stack2 = helpers.description) { stack2 = stack2.call(depth0, {hash:{},data:data}); }
+ else { stack2 = depth0.description; stack2 = typeof stack2 === functionType ? stack2.apply(depth0) : stack2; }
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "</td>\n<td>";
+ if (stack2 = helpers.paramType) { stack2 = stack2.call(depth0, {hash:{},data:data}); }
+ else { stack2 = depth0.paramType; stack2 = typeof stack2 === functionType ? stack2.apply(depth0) : stack2; }
+ if(stack2 || stack2 === 0) { buffer += stack2; }
+ buffer += "</td>\n<td><span class=\"model-signature\"></span></td>";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['param_readonly'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' readonly='readonly' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</textarea>\n ";
+ return buffer;
+ }
+
+ function program3(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(6, program6, data),fn:self.program(4, program4, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program4(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "\n ";
+ return buffer;
+ }
+
+ function program6(depth0,data) {
+
+
+ return "\n (empty)\n ";
+ }
+
+ buffer += "<td class='code'>";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(3, program3, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</td>\n<td>";
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td>";
+ if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['param_readonly_required'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' readonly='readonly' placeholder='(required)' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</textarea>\n ";
+ return buffer;
+ }
+
+ function program3(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(6, program6, data),fn:self.program(4, program4, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program4(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "\n ";
+ return buffer;
+ }
+
+ function program6(depth0,data) {
+
+
+ return "\n (empty)\n ";
+ }
+
+ buffer += "<td class='code required'>";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(3, program3, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</td>\n<td>";
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td>";
+ if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['param_required'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression, self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(4, program4, data),fn:self.program(2, program2, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input type=\"file\" name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'/>\n ";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(7, program7, data),fn:self.program(5, program5, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program5(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' placeholder='(required)' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'>";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</textarea>\n ";
+ return buffer;
+ }
+
+ function program7(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <textarea class='body-textarea' placeholder='(required)' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'></textarea>\n <br />\n <div class=\"parameter-content-type\" />\n ";
+ return buffer;
+ }
+
+ function program9(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isFile, {hash:{},inverse:self.program(12, program12, data),fn:self.program(10, program10, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program10(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input class='parameter' class='required' type='file' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'/>\n ";
+ return buffer;
+ }
+
+ function program12(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers['if'].call(depth0, depth0.defaultValue, {hash:{},inverse:self.program(15, program15, data),fn:self.program(13, program13, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n ";
+ return buffer;
+ }
+ function program13(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input class='parameter required' minlength='1' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' placeholder='(required)' type='text' value='";
+ if (stack1 = helpers.defaultValue) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.defaultValue; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'/>\n ";
+ return buffer;
+ }
+
+ function program15(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <input class='parameter required' minlength='1' name='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' placeholder='(required)' type='text' value=''/>\n ";
+ return buffer;
+ }
+
+ buffer += "<td class='code required'>";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>\n ";
+ stack1 = helpers['if'].call(depth0, depth0.isBody, {hash:{},inverse:self.program(9, program9, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</td>\n<td>\n <strong>";
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</strong>\n</td>\n<td>";
+ if (stack1 = helpers.paramType) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.paramType; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n<td><span class=\"model-signature\"></span></td>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['parameter_content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers.each.call(depth0, depth0.consumes, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <option value=\"";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\">";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</option>\n ";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+
+ return "\n <option value=\"application/json\">application/json</option>\n";
+ }
+
+ stack1 = helpers['if'].call(depth0, depth0.consumes, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</select>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['resource'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, options, functionType="function", escapeExpression=this.escapeExpression, self=this, blockHelperMissing=helpers.blockHelperMissing;
+
+ function program1(depth0,data) {
+
+
+ return " : ";
+ }
+
+ buffer += "<div class='heading'>\n <h2>\n <a href='#!/";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' onclick=\"Docs.toggleEndpointListForResource('";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "');\">";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</a> ";
+ options = {hash:{},inverse:self.noop,fn:self.program(1, program1, data),data:data};
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, options); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if (!helpers.description) { stack1 = blockHelperMissing.call(depth0, stack1, options); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ if (stack1 = helpers.description) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.description; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n </h2>\n <ul class='options'>\n <li>\n <a href='#!/";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "' id='endpointListTogger_";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'\n onclick=\"Docs.toggleEndpointListForResource('";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "');\">Show/Hide</a>\n </li>\n <li>\n <a href='#' onclick=\"Docs.collapseOperationsForResource('";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'); return false;\">\n List Operations\n </a>\n </li>\n <li>\n <a href='#' onclick=\"Docs.expandOperationsForResource('";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "'); return false;\">\n Expand Operations\n </a>\n </li>\n <li>\n <a href='";
+ stack1 = "resources/" + depth0.path + ".json";
+ buffer += escapeExpression(stack1)
+ + "'>Raw</a>\n </li>\n </ul>\n</div>\n<ul class='endpoints' id='";
+ if (stack1 = helpers.name) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.name; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "_endpoint_list' style='display:none'>\n\n</ul>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['response_content_type'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", self=this;
+
+ function program1(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n ";
+ stack1 = helpers.each.call(depth0, depth0.produces, {hash:{},inverse:self.noop,fn:self.program(2, program2, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n";
+ return buffer;
+ }
+ function program2(depth0,data) {
+
+ var buffer = "", stack1;
+ buffer += "\n <option value=\"";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\">";
+ stack1 = (typeof depth0 === functionType ? depth0.apply(depth0) : depth0);
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</option>\n ";
+ return buffer;
+ }
+
+ function program4(depth0,data) {
+
+
+ return "\n <option value=\"application/json\">application/json</option>\n";
+ }
+
+ stack1 = helpers['if'].call(depth0, depth0.produces, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data),data:data});
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n</select>\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['signature'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression;
+
+
+ buffer += "<div>\n<ul class=\"signature-nav\">\n <li><a class=\"description-link\" href=\"#\">Model</a></li>\n <li><a class=\"snippet-link\" href=\"#\">Model Schema</a></li>\n</ul>\n<div>\n\n<div class=\"signature-container\">\n <div class=\"description\">\n ";
+ if (stack1 = helpers.signature) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.signature; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\n </div>\n\n <div class=\"snippet\">\n <pre><code>";
+ if (stack1 = helpers.sampleJSON) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.sampleJSON; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</code></pre>\n <small class=\"notice\"></small>\n </div>\n</div>\n\n";
+ return buffer;
+ });
+})();
+
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+ templates['status_code'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ this.compilerInfo = [4,'>= 1.0.0'];
+ helpers = this.merge(helpers, Handlebars.helpers); data = data || {};
+ var buffer = "", stack1, functionType="function", escapeExpression=this.escapeExpression;
+
+
+ buffer += "<td width='15%' class='code'>";
+ if (stack1 = helpers.code) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.code; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ buffer += escapeExpression(stack1)
+ + "</td>\n<td>";
+ if (stack1 = helpers.message) { stack1 = stack1.call(depth0, {hash:{},data:data}); }
+ else { stack1 = depth0.message; stack1 = typeof stack1 === functionType ? stack1.apply(depth0) : stack1; }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "</td>\n";
+ return buffer;
+ });
+})();
+
+
+
+// Generated by CoffeeScript 1.6.3
+(function() {
+ var ContentTypeView, HeaderView, MainView, OperationView, ParameterContentTypeView, ParameterView, ResourceView, ResponseContentTypeView, SignatureView, StatusCodeView, SwaggerUi, _ref, _ref1, _ref10, _ref2, _ref3, _ref4, _ref5, _ref6, _ref7, _ref8, _ref9,
+ __hasProp = {}.hasOwnProperty,
+ __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
+
+ SwaggerUi = (function(_super) {
+ __extends(SwaggerUi, _super);
+
+ function SwaggerUi() {
+ _ref = SwaggerUi.__super__.constructor.apply(this, arguments);
+ return _ref;
+ }
+
+ SwaggerUi.prototype.dom_id = "swagger_ui";
+
+ SwaggerUi.prototype.options = null;
+
+ SwaggerUi.prototype.api = null;
+
+ SwaggerUi.prototype.headerView = null;
+
+ SwaggerUi.prototype.mainView = null;
+
+ SwaggerUi.prototype.initialize = function(options) {
+ var _this = this;
+ if (options == null) {
+ options = {};
+ }
+ if (options.dom_id != null) {
+ this.dom_id = options.dom_id;
+ delete options.dom_id;
+ }
+ if ($('#' + this.dom_id) == null) {
+ $('body').append('<div id="' + this.dom_id + '"></div>');
+ }
+ this.options = options;
+ this.options.success = function() {
+ return _this.render();
+ };
+ this.options.progress = function(d) {
+ return _this.showMessage(d);
+ };
+ this.options.failure = function(d) {
+ return _this.onLoadFailure(d);
+ };
+ this.headerView = new HeaderView({
+ el: $('#header')
+ });
+ return this.headerView.on('update-swagger-ui', function(data) {
+ return _this.updateSwaggerUi(data);
+ });
+ };
+
+ SwaggerUi.prototype.updateSwaggerUi = function(data) {
+ this.options.url = data.url;
+ return this.load();
+ };
+
+ SwaggerUi.prototype.load = function() {
+ var url, _ref1;
+ if ((_ref1 = this.mainView) != null) {
+ _ref1.clear();
+ }
+ url = this.options.url;
+ if (url.indexOf("http") !== 0) {
+ url = this.buildUrl(window.location.href.toString(), url);
+ }
+ this.options.url = url;
+ this.headerView.update(url);
+ this.api = new SwaggerApi(this.options);
+ this.api.build();
+ return this.api;
+ };
+
+ SwaggerUi.prototype.render = function() {
+ var _this = this;
+ this.showMessage('Finished Loading Resource Information. Rendering Swagger UI...');
+ this.mainView = new MainView({
+ model: this.api,
+ el: $('#' + this.dom_id)
+ }).render();
+ this.showMessage();
+ switch (this.options.docExpansion) {
+ case "full":
+ Docs.expandOperationsForResource('');
+ break;
+ case "list":
+ Docs.collapseOperationsForResource('');
+ }
+ if (this.options.onComplete) {
+ this.options.onComplete(this.api, this);
+ }
+ return setTimeout(function() {
+ return Docs.shebang();
+ }, 400);
+ };
+
+ SwaggerUi.prototype.buildUrl = function(base, url) {
+ var parts;
+ console.log("base is " + base);
+ parts = base.split("/");
+ base = parts[0] + "//" + parts[2];
+ if (url.indexOf("/") === 0) {
+ return base + url;
+ } else {
+ return base + "/" + url;
+ }
+ };
+
+ SwaggerUi.prototype.showMessage = function(data) {
+ if (data == null) {
+ data = '';
+ }
+ $('#message-bar').removeClass('message-fail');
+ $('#message-bar').addClass('message-success');
+ return $('#message-bar').html(data);
+ };
+
+ SwaggerUi.prototype.onLoadFailure = function(data) {
+ var val;
+ if (data == null) {
+ data = '';
+ }
+ $('#message-bar').removeClass('message-success');
+ $('#message-bar').addClass('message-fail');
+ val = $('#message-bar').html(data);
+ if (this.options.onFailure != null) {
+ this.options.onFailure(data);
+ }
+ return val;
+ };
+
+ return SwaggerUi;
+
+ })(Backbone.Router);
+
+ window.SwaggerUi = SwaggerUi;
+
+ HeaderView = (function(_super) {
+ __extends(HeaderView, _super);
+
+ function HeaderView() {
+ _ref1 = HeaderView.__super__.constructor.apply(this, arguments);
+ return _ref1;
+ }
+
+ HeaderView.prototype.events = {
+ 'click #show-pet-store-icon': 'showPetStore',
+ 'click #show-wordnik-dev-icon': 'showWordnikDev',
+ 'click #explore': 'showCustom',
+ 'keyup #input_baseUrl': 'showCustomOnKeyup',
+ 'keyup #input_apiKey': 'showCustomOnKeyup'
+ };
+
+ HeaderView.prototype.initialize = function() {};
+
+ HeaderView.prototype.showPetStore = function(e) {
+ return this.trigger('update-swagger-ui', {
+ url: "http://petstore.swagger.wordnik.com/api/api-docs"
+ });
+ };
+
+ HeaderView.prototype.showWordnikDev = function(e) {
+ return this.trigger('update-swagger-ui', {
+ url: "http://api.wordnik.com/v4/resources.json"
+ });
+ };
+
+ HeaderView.prototype.showCustomOnKeyup = function(e) {
+ if (e.keyCode === 13) {
+ return this.showCustom();
+ }
+ };
+
+ HeaderView.prototype.showCustom = function(e) {
+ if (e != null) {
+ e.preventDefault();
+ }
+ return this.trigger('update-swagger-ui', {
+ url: $('#input_baseUrl').val(),
+ apiKey: $('#input_apiKey').val()
+ });
+ };
+
+ HeaderView.prototype.update = function(url, apiKey, trigger) {
+ if (trigger == null) {
+ trigger = false;
+ }
+ $('#input_baseUrl').val(url);
+ if (trigger) {
+ return this.trigger('update-swagger-ui', {
+ url: url
+ });
+ }
+ };
+
+ return HeaderView;
+
+ })(Backbone.View);
+
+ MainView = (function(_super) {
+ __extends(MainView, _super);
+
+ function MainView() {
+ _ref2 = MainView.__super__.constructor.apply(this, arguments);
+ return _ref2;
+ }
+
+ MainView.prototype.initialize = function() {};
+
+ MainView.prototype.render = function() {
+ var resource, _i, _len, _ref3;
+ $(this.el).html(Handlebars.templates.main(this.model));
+ _ref3 = this.model.apisArray;
+ for (_i = 0, _len = _ref3.length; _i < _len; _i++) {
+ resource = _ref3[_i];
+ this.addResource(resource);
+ }
+ return this;
+ };
+
+ MainView.prototype.addResource = function(resource) {
+ var resourceView;
+ resourceView = new ResourceView({
+ model: resource,
+ tagName: 'li',
+ id: 'resource_' + resource.name,
+ className: 'resource'
+ });
+ return $('#resources').append(resourceView.render().el);
+ };
+
+ MainView.prototype.clear = function() {
+ return $(this.el).html('');
+ };
+
+ return MainView;
+
+ })(Backbone.View);
+
+ ResourceView = (function(_super) {
+ __extends(ResourceView, _super);
+
+ function ResourceView() {
+ _ref3 = ResourceView.__super__.constructor.apply(this, arguments);
+ return _ref3;
+ }
+
+ ResourceView.prototype.initialize = function() {};
+
+ ResourceView.prototype.render = function() {
+ var operation, _i, _len, _ref4;
+ console.log(this.model.description);
+ $(this.el).html(Handlebars.templates.resource(this.model));
+ this.number = 0;
+ _ref4 = this.model.operationsArray;
+ for (_i = 0, _len = _ref4.length; _i < _len; _i++) {
+ operation = _ref4[_i];
+ this.addOperation(operation);
+ }
+ return this;
+ };
+
+ ResourceView.prototype.addOperation = function(operation) {
+ var operationView;
+ operation.number = this.number;
+ operationView = new OperationView({
+ model: operation,
+ tagName: 'li',
+ className: 'endpoint'
+ });
+ $('.endpoints', $(this.el)).append(operationView.render().el);
+ return this.number++;
+ };
+
+ return ResourceView;
+
+ })(Backbone.View);
+
+ OperationView = (function(_super) {
+ __extends(OperationView, _super);
+
+ function OperationView() {
+ _ref4 = OperationView.__super__.constructor.apply(this, arguments);
+ return _ref4;
+ }
+
+ OperationView.prototype.invocationUrl = null;
+
+ OperationView.prototype.events = {
+ 'submit .sandbox': 'submitOperation',
+ 'click .submit': 'submitOperation',
+ 'click .response_hider': 'hideResponse',
+ 'click .toggleOperation': 'toggleOperationContent'
+ };
+
+ OperationView.prototype.initialize = function() {};
+
+ OperationView.prototype.render = function() {
+ var contentTypeModel, isMethodSubmissionSupported, param, responseContentTypeView, responseSignatureView, signatureModel, statusCode, type, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7;
+ isMethodSubmissionSupported = true;
+ if (!isMethodSubmissionSupported) {
+ this.model.isReadOnly = true;
+ }
+ $(this.el).html(Handlebars.templates.operation(this.model));
+ if (this.model.responseClassSignature && this.model.responseClassSignature !== 'string') {
+ signatureModel = {
+ sampleJSON: this.model.responseSampleJSON,
+ isParam: false,
+ signature: this.model.responseClassSignature
+ };
+ responseSignatureView = new SignatureView({
+ model: signatureModel,
+ tagName: 'div'
+ });
+ $('.model-signature', $(this.el)).append(responseSignatureView.render().el);
+ } else {
+ $('.model-signature', $(this.el)).html(this.model.type);
+ }
+ contentTypeModel = {
+ isParam: false
+ };
+ contentTypeModel.consumes = this.model.consumes;
+ contentTypeModel.produces = this.model.produces;
+ _ref5 = this.model.parameters;
+ for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+ param = _ref5[_i];
+ type = param.type || param.dataType;
+ if (type.toLowerCase() === 'file') {
+ if (!contentTypeModel.consumes) {
+ console.log("set content type ");
+ contentTypeModel.consumes = 'multipart/form-data';
+ }
+ }
+ }
+ responseContentTypeView = new ResponseContentTypeView({
+ model: contentTypeModel
+ });
+ $('.response-content-type', $(this.el)).append(responseContentTypeView.render().el);
+ _ref6 = this.model.parameters;
+ for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+ param = _ref6[_j];
+ this.addParameter(param, contentTypeModel.consumes);
+ }
+ _ref7 = this.model.responseMessages;
+ for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+ statusCode = _ref7[_k];
+ this.addStatusCode(statusCode);
+ }
+ return this;
+ };
+
+ OperationView.prototype.addParameter = function(param, consumes) {
+ var paramView;
+ param.consumes = consumes;
+ paramView = new ParameterView({
+ model: param,
+ tagName: 'tr',
+ readOnly: this.model.isReadOnly
+ });
+ return $('.operation-params', $(this.el)).append(paramView.render().el);
+ };
+
+ OperationView.prototype.addStatusCode = function(statusCode) {
+ var statusCodeView;
+ statusCodeView = new StatusCodeView({
+ model: statusCode,
+ tagName: 'tr'
+ });
+ return $('.operation-status', $(this.el)).append(statusCodeView.render().el);
+ };
+
+ OperationView.prototype.submitOperation = function(e) {
+ var error_free, form, isFileUpload, map, o, opts, val, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7;
+ if (e != null) {
+ e.preventDefault();
+ }
+ form = $('.sandbox', $(this.el));
+ error_free = true;
+ form.find("input.required").each(function() {
+ var _this = this;
+ $(this).removeClass("error");
+ if (jQuery.trim($(this).val()) === "") {
+ $(this).addClass("error");
+ $(this).wiggle({
+ callback: function() {
+ return $(_this).focus();
+ }
+ });
+ return error_free = false;
+ }
+ });
+ if (error_free) {
+ map = {};
+ opts = {
+ parent: this
+ };
+ isFileUpload = false;
+ _ref5 = form.find("input");
+ for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+ o = _ref5[_i];
+ if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+ map[o.name] = o.value;
+ }
+ if (o.type === "file") {
+ isFileUpload = true;
+ }
+ }
+ _ref6 = form.find("textarea");
+ for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+ o = _ref6[_j];
+ if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+ map["body"] = o.value;
+ }
+ }
+ _ref7 = form.find("select");
+ for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+ o = _ref7[_k];
+ val = this.getSelectedValue(o);
+ if ((val != null) && jQuery.trim(val).length > 0) {
+ map[o.name] = val;
+ }
+ }
+ opts.responseContentType = $("div select[name=responseContentType]", $(this.el)).val();
+ opts.requestContentType = $("div select[name=parameterContentType]", $(this.el)).val();
+ $(".response_throbber", $(this.el)).show();
+ if (isFileUpload) {
+ return this.handleFileUpload(map, form);
+ } else {
+ return this.model["do"](map, opts, this.showCompleteStatus, this.showErrorStatus, this);
+ }
+ }
+ };
+
+ OperationView.prototype.success = function(response, parent) {
+ return parent.showCompleteStatus(response);
+ };
+
+ OperationView.prototype.handleFileUpload = function(map, form) {
+ var bodyParam, headerParams, o, obj, param, _i, _j, _k, _len, _len1, _len2, _ref5, _ref6, _ref7,
+ _this = this;
+ console.log("it's a file upload");
+ _ref5 = form.serializeArray();
+ for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+ o = _ref5[_i];
+ if ((o.value != null) && jQuery.trim(o.value).length > 0) {
+ map[o.name] = o.value;
+ }
+ }
+ bodyParam = new FormData();
+ _ref6 = this.model.parameters;
+ for (_j = 0, _len1 = _ref6.length; _j < _len1; _j++) {
+ param = _ref6[_j];
+ if (param.paramType === 'form') {
+ bodyParam.append(param.name, map[param.name]);
+ }
+ }
+ headerParams = {};
+ _ref7 = this.model.parameters;
+ for (_k = 0, _len2 = _ref7.length; _k < _len2; _k++) {
+ param = _ref7[_k];
+ if (param.paramType === 'header') {
+ headerParams[param.name] = map[param.name];
+ }
+ }
+ console.log(headerParams);
+ $.each($('input[type~="file"]'), function(i, el) {
+ return bodyParam.append($(el).attr('name'), el.files[0]);
+ });
+ console.log(bodyParam);
+ this.invocationUrl = this.model.supportHeaderParams() ? (headerParams = this.model.getHeaderParams(map), this.model.urlify(map, false)) : this.model.urlify(map, true);
+ $(".request_url", $(this.el)).html("<pre>" + this.invocationUrl + "</pre>");
+ obj = {
+ type: this.model.method,
+ url: this.invocationUrl,
+ headers: headerParams,
+ data: bodyParam,
+ dataType: 'json',
+ contentType: false,
+ processData: false,
+ error: function(data, textStatus, error) {
+ return _this.showErrorStatus(_this.wrap(data), _this);
+ },
+ success: function(data) {
+ return _this.showResponse(data, _this);
+ },
+ complete: function(data) {
+ return _this.showCompleteStatus(_this.wrap(data), _this);
+ }
+ };
+ if (window.authorizations) {
+ window.authorizations.apply(obj);
+ }
+ jQuery.ajax(obj);
+ return false;
+ };
+
+ OperationView.prototype.wrap = function(data) {
+ var o,
+ _this = this;
+ o = {};
+ o.content = {};
+ o.content.data = data.responseText;
+ o.getHeaders = function() {
+ return {
+ "Content-Type": data.getResponseHeader("Content-Type")
+ };
+ };
+ o.request = {};
+ o.request.url = this.invocationUrl;
+ o.status = data.status;
+ return o;
+ };
+
+ OperationView.prototype.getSelectedValue = function(select) {
+ var opt, options, _i, _len, _ref5;
+ if (!select.multiple) {
+ return select.value;
+ } else {
+ options = [];
+ _ref5 = select.options;
+ for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
+ opt = _ref5[_i];
+ if (opt.selected) {
+ options.push(opt.value);
+ }
+ }
+ if (options.length > 0) {
+ return options.join(",");
+ } else {
+ return null;
+ }
+ }
+ };
+
+ OperationView.prototype.hideResponse = function(e) {
+ if (e != null) {
+ e.preventDefault();
+ }
+ $(".response", $(this.el)).slideUp();
+ return $(".response_hider", $(this.el)).fadeOut();
+ };
+
+ OperationView.prototype.showResponse = function(response) {
+ var prettyJson;
+ prettyJson = JSON.stringify(response, null, "\t").replace(/\n/g, "<br>");
+ return $(".response_body", $(this.el)).html(escape(prettyJson));
+ };
+
+ OperationView.prototype.showErrorStatus = function(data, parent) {
+ return parent.showStatus(data);
+ };
+
+ OperationView.prototype.showCompleteStatus = function(data, parent) {
+ return parent.showStatus(data);
+ };
+
+ OperationView.prototype.formatXml = function(xml) {
+ var contexp, formatted, indent, lastType, lines, ln, pad, reg, transitions, wsexp, _fn, _i, _len;
+ reg = /(>)(<)(\/*)/g;
+ wsexp = /[ ]*(.*)[ ]+\n/g;
+ contexp = /(<.+>)(.+\n)/g;
+ xml = xml.replace(reg, '$1\n$2$3').replace(wsexp, '$1\n').replace(contexp, '$1\n$2');
+ pad = 0;
+ formatted = '';
+ lines = xml.split('\n');
+ indent = 0;
+ lastType = 'other';
+ transitions = {
+ 'single->single': 0,
+ 'single->closing': -1,
+ 'single->opening': 0,
+ 'single->other': 0,
+ 'closing->single': 0,
+ 'closing->closing': -1,
+ 'closing->opening': 0,
+ 'closing->other': 0,
+ 'opening->single': 1,
+ 'opening->closing': 0,
+ 'opening->opening': 1,
+ 'opening->other': 1,
+ 'other->single': 0,
+ 'other->closing': -1,
+ 'other->opening': 0,
+ 'other->other': 0
+ };
+ _fn = function(ln) {
+ var fromTo, j, key, padding, type, types, value;
+ types = {
+ single: Boolean(ln.match(/<.+\/>/)),
+ closing: Boolean(ln.match(/<\/.+>/)),
+ opening: Boolean(ln.match(/<[^!?].*>/))
+ };
+ type = ((function() {
+ var _results;
+ _results = [];
+ for (key in types) {
+ value = types[key];
+ if (value) {
+ _results.push(key);
+ }
+ }
+ return _results;
+ })())[0];
+ type = type === void 0 ? 'other' : type;
+ fromTo = lastType + '->' + type;
+ lastType = type;
+ padding = '';
+ indent += transitions[fromTo];
+ padding = ((function() {
+ var _j, _ref5, _results;
+ _results = [];
+ for (j = _j = 0, _ref5 = indent; 0 <= _ref5 ? _j < _ref5 : _j > _ref5; j = 0 <= _ref5 ? ++_j : --_j) {
+ _results.push(' ');
+ }
+ return _results;
+ })()).join('');
+ if (fromTo === 'opening->closing') {
+ return formatted = formatted.substr(0, formatted.length - 1) + ln + '\n';
+ } else {
+ return formatted += padding + ln + '\n';
+ }
+ };
+ for (_i = 0, _len = lines.length; _i < _len; _i++) {
+ ln = lines[_i];
+ _fn(ln);
+ }
+ return formatted;
+ };
+
+ OperationView.prototype.showStatus = function(data) {
+ var code, content, contentType, headers, pre, response_body;
+ content = data.content.data;
+ headers = data.getHeaders();
+ contentType = headers["Content-Type"];
+ if (content === void 0) {
+ code = $('<code />').text("no content");
+ pre = $('<pre class="json" />').append(code);
+ } else if (contentType.indexOf("application/json") === 0 || contentType.indexOf("application/hal+json") === 0) {
+ code = $('<code />').text(JSON.stringify(JSON.parse(content), null, 2));
+ pre = $('<pre class="json" />').append(code);
+ } else if (contentType.indexOf("application/xml") === 0) {
+ code = $('<code />').text(this.formatXml(content));
+ pre = $('<pre class="xml" />').append(code);
+ } else if (contentType.indexOf("text/html") === 0) {
+ code = $('<code />').html(content);
+ pre = $('<pre class="xml" />').append(code);
+ } else if (contentType.indexOf("image/") === 0) {
+ pre = $('<img>').attr('src', data.request.url);
+ } else {
+ code = $('<code />').text(content);
+ pre = $('<pre class="json" />').append(code);
+ }
+ response_body = pre;
+ $(".request_url", $(this.el)).html("<pre>" + data.request.url + "</pre>");
+ $(".response_code", $(this.el)).html("<pre>" + data.status + "</pre>");
+ $(".response_body", $(this.el)).html(response_body);
+ $(".response_headers", $(this.el)).html("<pre>" + JSON.stringify(data.getHeaders(), null, " ").replace(/\n/g, "<br>") + "</pre>");
+ $(".response", $(this.el)).slideDown();
+ $(".response_hider", $(this.el)).show();
+ $(".response_throbber", $(this.el)).hide();
+ return hljs.highlightBlock($('.response_body', $(this.el))[0]);
+ };
+
+ OperationView.prototype.toggleOperationContent = function() {
+ var elem;
+ elem = $('#' + Docs.escapeResourceName(this.model.resourceName) + "_" + this.model.nickname + "_" + this.model.method + "_" + this.model.number + "_content");
+ if (elem.is(':visible')) {
+ return Docs.collapseOperation(elem);
+ } else {
+ return Docs.expandOperation(elem);
+ }
+ };
+
+ return OperationView;
+
+ })(Backbone.View);
+
+ StatusCodeView = (function(_super) {
+ __extends(StatusCodeView, _super);
+
+ function StatusCodeView() {
+ _ref5 = StatusCodeView.__super__.constructor.apply(this, arguments);
+ return _ref5;
+ }
+
+ StatusCodeView.prototype.initialize = function() {};
+
+ StatusCodeView.prototype.render = function() {
+ var template;
+ template = this.template();
+ $(this.el).html(template(this.model));
+ return this;
+ };
+
+ StatusCodeView.prototype.template = function() {
+ return Handlebars.templates.status_code;
+ };
+
+ return StatusCodeView;
+
+ })(Backbone.View);
+
+ ParameterView = (function(_super) {
+ __extends(ParameterView, _super);
+
+ function ParameterView() {
+ _ref6 = ParameterView.__super__.constructor.apply(this, arguments);
+ return _ref6;
+ }
+
+ ParameterView.prototype.initialize = function() {};
+
+ ParameterView.prototype.render = function() {
+ var contentTypeModel, isParam, parameterContentTypeView, responseContentTypeView, signatureModel, signatureView, template, type;
+ type = this.model.type || this.model.dataType;
+ if (this.model.paramType === 'body') {
+ this.model.isBody = true;
+ }
+ if (type.toLowerCase() === 'file') {
+ this.model.isFile = true;
+ }
+ template = this.template();
+ $(this.el).html(template(this.model));
+ signatureModel = {
+ sampleJSON: this.model.sampleJSON,
+ isParam: true,
+ signature: this.model.signature
+ };
+ if (this.model.sampleJSON) {
+ signatureView = new SignatureView({
+ model: signatureModel,
+ tagName: 'div'
+ });
+ $('.model-signature', $(this.el)).append(signatureView.render().el);
+ } else {
+ $('.model-signature', $(this.el)).html(this.model.signature);
+ }
+ isParam = false;
+ if (this.model.isBody) {
+ isParam = true;
+ }
+ contentTypeModel = {
+ isParam: isParam
+ };
+ contentTypeModel.consumes = this.model.consumes;
+ if (isParam) {
+ parameterContentTypeView = new ParameterContentTypeView({
+ model: contentTypeModel
+ });
+ $('.parameter-content-type', $(this.el)).append(parameterContentTypeView.render().el);
+ } else {
+ responseContentTypeView = new ResponseContentTypeView({
+ model: contentTypeModel
+ });
+ $('.response-content-type', $(this.el)).append(responseContentTypeView.render().el);
+ }
+ return this;
+ };
+
+ ParameterView.prototype.template = function() {
+ if (this.model.isList) {
+ return Handlebars.templates.param_list;
+ } else {
+ if (this.options.readOnly) {
+ if (this.model.required) {
+ return Handlebars.templates.param_readonly_required;
+ } else {
+ return Handlebars.templates.param_readonly;
+ }
+ } else {
+ if (this.model.required) {
+ return Handlebars.templates.param_required;
+ } else {
+ return Handlebars.templates.param;
+ }
+ }
+ }
+ };
+
+ return ParameterView;
+
+ })(Backbone.View);
+
+ SignatureView = (function(_super) {
+ __extends(SignatureView, _super);
+
+ function SignatureView() {
+ _ref7 = SignatureView.__super__.constructor.apply(this, arguments);
+ return _ref7;
+ }
+
+ SignatureView.prototype.events = {
+ 'click a.description-link': 'switchToDescription',
+ 'click a.snippet-link': 'switchToSnippet',
+ 'mousedown .snippet': 'snippetToTextArea'
+ };
+
+ SignatureView.prototype.initialize = function() {};
+
+ SignatureView.prototype.render = function() {
+ var template;
+ template = this.template();
+ $(this.el).html(template(this.model));
+ this.switchToDescription();
+ this.isParam = this.model.isParam;
+ if (this.isParam) {
+ $('.notice', $(this.el)).text('Click to set as parameter value');
+ }
+ return this;
+ };
+
+ SignatureView.prototype.template = function() {
+ return Handlebars.templates.signature;
+ };
+
+ SignatureView.prototype.switchToDescription = function(e) {
+ if (e != null) {
+ e.preventDefault();
+ }
+ $(".snippet", $(this.el)).hide();
+ $(".description", $(this.el)).show();
+ $('.description-link', $(this.el)).addClass('selected');
+ return $('.snippet-link', $(this.el)).removeClass('selected');
+ };
+
+ SignatureView.prototype.switchToSnippet = function(e) {
+ if (e != null) {
+ e.preventDefault();
+ }
+ $(".description", $(this.el)).hide();
+ $(".snippet", $(this.el)).show();
+ $('.snippet-link', $(this.el)).addClass('selected');
+ return $('.description-link', $(this.el)).removeClass('selected');
+ };
+
+ SignatureView.prototype.snippetToTextArea = function(e) {
+ var textArea;
+ if (this.isParam) {
+ if (e != null) {
+ e.preventDefault();
+ }
+ textArea = $('textarea', $(this.el.parentNode.parentNode.parentNode));
+ if ($.trim(textArea.val()) === '') {
+ return textArea.val(this.model.sampleJSON);
+ }
+ }
+ };
+
+ return SignatureView;
+
+ })(Backbone.View);
+
+ ContentTypeView = (function(_super) {
+ __extends(ContentTypeView, _super);
+
+ function ContentTypeView() {
+ _ref8 = ContentTypeView.__super__.constructor.apply(this, arguments);
+ return _ref8;
+ }
+
+ ContentTypeView.prototype.initialize = function() {};
+
+ ContentTypeView.prototype.render = function() {
+ var template;
+ template = this.template();
+ $(this.el).html(template(this.model));
+ $('label[for=contentType]', $(this.el)).text('Response Content Type');
+ return this;
+ };
+
+ ContentTypeView.prototype.template = function() {
+ return Handlebars.templates.content_type;
+ };
+
+ return ContentTypeView;
+
+ })(Backbone.View);
+
+ ResponseContentTypeView = (function(_super) {
+ __extends(ResponseContentTypeView, _super);
+
+ function ResponseContentTypeView() {
+ _ref9 = ResponseContentTypeView.__super__.constructor.apply(this, arguments);
+ return _ref9;
+ }
+
+ ResponseContentTypeView.prototype.initialize = function() {};
+
+ ResponseContentTypeView.prototype.render = function() {
+ var template;
+ template = this.template();
+ $(this.el).html(template(this.model));
+ $('label[for=responseContentType]', $(this.el)).text('Response Content Type');
+ return this;
+ };
+
+ ResponseContentTypeView.prototype.template = function() {
+ return Handlebars.templates.response_content_type;
+ };
+
+ return ResponseContentTypeView;
+
+ })(Backbone.View);
+
+ ParameterContentTypeView = (function(_super) {
+ __extends(ParameterContentTypeView, _super);
+
+ function ParameterContentTypeView() {
+ _ref10 = ParameterContentTypeView.__super__.constructor.apply(this, arguments);
+ return _ref10;
+ }
+
+ ParameterContentTypeView.prototype.initialize = function() {};
+
+ ParameterContentTypeView.prototype.render = function() {
+ var template;
+ template = this.template();
+ $(this.el).html(template(this.model));
+ $('label[for=parameterContentType]', $(this.el)).text('Parameter content type:');
+ return this;
+ };
+
+ ParameterContentTypeView.prototype.template = function() {
+ return Handlebars.templates.parameter_content_type;
+ };
+
+ return ParameterContentTypeView;
+
+ })(Backbone.View);
+
+}).call(this);
--- /dev/null
+// Generated by CoffeeScript 1.6.3
+(function() {
+ var ApiKeyAuthorization, PasswordAuthorization, SwaggerApi, SwaggerAuthorizations, SwaggerHttp, SwaggerModel, SwaggerModelProperty, SwaggerOperation, SwaggerRequest, SwaggerResource,
+ __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
+
+ SwaggerApi = (function() {
+ SwaggerApi.prototype.url = "http://api.wordnik.com/v4/resources.json";
+
+ SwaggerApi.prototype.debug = false;
+
+ SwaggerApi.prototype.basePath = null;
+
+ SwaggerApi.prototype.authorizations = null;
+
+ SwaggerApi.prototype.authorizationScheme = null;
+
+ SwaggerApi.prototype.info = null;
+
+ function SwaggerApi(url, options) {
+ if (options == null) {
+ options = {};
+ }
+ if (url) {
+ if (url.url) {
+ options = url;
+ } else {
+ this.url = url;
+ }
+ } else {
+ options = url;
+ }
+ if (options.url != null) {
+ this.url = options.url;
+ }
+ if (options.success != null) {
+ this.success = options.success;
+ }
+ this.failure = options.failure != null ? options.failure : function() {};
+ this.progress = options.progress != null ? options.progress : function() {};
+ if (options.success != null) {
+ this.build();
+ }
+ }
+
+ SwaggerApi.prototype.build = function() {
+ var e, obj,
+ _this = this;
+ this.progress('fetching resource list: ' + this.url);
+ var response;
+ response = JSON.parse(getSpec());
+
+ _this.swaggerVersion = response.swaggerVersion;
+ if (_this.swaggerVersion === "1.2") {
+ setTimeout(function(){return _this.buildFromSpec(response);}, 500);
+ } else {
+ return setTimeout(function(){return _this.buildFrom1_1Spec(response);}, 500);
+ }
+ return this;
+ };
+
+ SwaggerApi.prototype.buildFromSpec = function(response) {
+ var api, isApi, newName, operation, res, resource, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2;
+ if (response.apiVersion != null) {
+ this.apiVersion = response.apiVersion;
+ }
+ this.apis = {};
+ this.apisArray = [];
+ this.produces = response.produces;
+ this.authSchemes = response.authorizations;
+ if (response.info != null) {
+ this.info = response.info;
+ }
+ isApi = false;
+ _ref = response.apis;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ api = _ref[_i];
+ if (api.operations) {
+ _ref1 = api.operations;
+ for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+ operation = _ref1[_j];
+ isApi = true;
+ }
+ }
+ }
+ if (response.basePath) {
+ this.basePath = response.basePath;
+ } else if (this.url.indexOf('?') > 0) {
+ this.basePath = this.url.substring(0, this.url.lastIndexOf('?'));
+ } else {
+ this.basePath = this.url;
+ }
+ if (isApi) {
+ newName = response.resourcePath.replace(/\//g, '');
+ this.resourcePath = response.resourcePath;
+ res = new SwaggerResource(response, this);
+ this.apis[newName] = res;
+ this.apisArray.push(res);
+ } else {
+ _ref2 = response.apis;
+ for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
+ resource = _ref2[_k];
+ res = new SwaggerResource(resource, this);
+ this.apis[res.name] = res;
+ this.apisArray.push(res);
+ }
+ }
+ if (this.success) {
+ this.success();
+ }
+ return this;
+ };
+
+ SwaggerApi.prototype.buildFrom1_1Spec = function(response) {
+ var api, isApi, newName, operation, res, resource, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2;
+ console.log("This API is using a deprecated version of Swagger! Please see http://github.com/wordnik/swagger-core/wiki for more info");
+ if (response.apiVersion != null) {
+ this.apiVersion = response.apiVersion;
+ }
+ this.apis = {};
+ this.apisArray = [];
+ this.produces = response.produces;
+ if (response.info != null) {
+ this.info = response.info;
+ }
+ isApi = false;
+ _ref = response.apis;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ api = _ref[_i];
+ if (api.operations) {
+ _ref1 = api.operations;
+ for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+ operation = _ref1[_j];
+ isApi = true;
+ }
+ }
+ }
+ if (response.basePath) {
+ this.basePath = response.basePath;
+ } else if (this.url.indexOf('?') > 0) {
+ this.basePath = this.url.substring(0, this.url.lastIndexOf('?'));
+ } else {
+ this.basePath = this.url;
+ }
+ if (isApi) {
+ newName = response.resourcePath.replace(/\//g, '');
+ this.resourcePath = response.resourcePath;
+ res = new SwaggerResource(response, this);
+ this.apis[newName] = res;
+ this.apisArray.push(res);
+ } else {
+ _ref2 = response.apis;
+ for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
+ resource = _ref2[_k];
+ res = new SwaggerResource(resource, this);
+ this.apis[res.name] = res;
+ this.apisArray.push(res);
+ }
+ }
+ if (this.success) {
+ this.success();
+ }
+ return this;
+ };
+
+ SwaggerApi.prototype.selfReflect = function() {
+ var resource, resource_name, _ref;
+ if (this.apis == null) {
+ return false;
+ }
+ _ref = this.apis;
+ for (resource_name in _ref) {
+ resource = _ref[resource_name];
+ if (resource.ready == null) {
+ return false;
+ }
+ }
+ this.setConsolidatedModels();
+ this.ready = true;
+ if (this.success != null) {
+ return this.success();
+ }
+ };
+
+ SwaggerApi.prototype.fail = function(message) {
+ this.failure(message);
+ throw message;
+ };
+
+ SwaggerApi.prototype.setConsolidatedModels = function() {
+ var model, modelName, resource, resource_name, _i, _len, _ref, _ref1, _results;
+ this.modelsArray = [];
+ this.models = {};
+ _ref = this.apis;
+ for (resource_name in _ref) {
+ resource = _ref[resource_name];
+ for (modelName in resource.models) {
+ if (this.models[modelName] == null) {
+ this.models[modelName] = resource.models[modelName];
+ this.modelsArray.push(resource.models[modelName]);
+ }
+ }
+ }
+ _ref1 = this.modelsArray;
+ _results = [];
+ for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+ model = _ref1[_i];
+ _results.push(model.setReferencedModels(this.models));
+ }
+ return _results;
+ };
+
+ SwaggerApi.prototype.help = function() {
+ var operation, operation_name, parameter, resource, resource_name, _i, _len, _ref, _ref1, _ref2;
+ _ref = this.apis;
+ for (resource_name in _ref) {
+ resource = _ref[resource_name];
+ console.log(resource_name);
+ _ref1 = resource.operations;
+ for (operation_name in _ref1) {
+ operation = _ref1[operation_name];
+ console.log(" " + operation.nickname);
+ _ref2 = operation.parameters;
+ for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
+ parameter = _ref2[_i];
+ console.log(" " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description);
+ }
+ }
+ }
+ return this;
+ };
+
+ return SwaggerApi;
+
+ })();
+
+ SwaggerResource = (function() {
+ SwaggerResource.prototype.api = null;
+
+ SwaggerResource.prototype.produces = null;
+
+ SwaggerResource.prototype.consumes = null;
+
+ function SwaggerResource(resourceObj, api) {
+ var consumes, e, obj, parts, produces,
+ _this = this;
+ this.api = api;
+ this.api = this.api;
+ produces = [];
+ consumes = [];
+ this.path = this.api.resourcePath != null ? this.api.resourcePath : resourceObj.path;
+ this.description = resourceObj.description;
+ parts = this.path.split("/");
+ this.name = parts[parts.length - 1].replace('.{format}', '');
+ this.basePath = this.api.basePath;
+ this.operations = {};
+ this.operationsArray = [];
+ this.modelsArray = [];
+ this.models = {};
+ if ((resourceObj.apis != null) && (this.api.resourcePath != null)) {
+ this.addApiDeclaration(resourceObj);
+ } else {
+ if (this.path == null) {
+ this.api.fail("SwaggerResources must have a path.");
+ }
+ if (this.path.substring(0, 4) === 'http') {
+ this.url = this.path.replace('{format}', 'json');
+ } else {
+ this.url = this.api.basePath + this.path.replace('{format}', 'json');
+ }
+ this.api.progress('fetching resource ' + this.name + ': ' + this.url);
+ var response;
+ var split = resourceObj.path.split("/");
+ var resource = split[split.length-1];
+ response = JSON.parse(jsonFor(resource));
+ setTimeout(function(){return _this.addApiDeclaration(response);}, 500);
+ }
+ }
+
+ SwaggerResource.prototype.addApiDeclaration = function(response) {
+ var endpoint, _i, _len, _ref;
+ if (response.produces != null) {
+ this.produces = response.produces;
+ }
+ if (response.consumes != null) {
+ this.consumes = response.consumes;
+ }
+ if ((response.basePath != null) && response.basePath.replace(/\s/g, '').length > 0) {
+ this.basePath = response.basePath;
+ }
+ this.addModels(response.models);
+ if (response.apis) {
+ _ref = response.apis;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ endpoint = _ref[_i];
+ this.addOperations(endpoint.path, endpoint.operations, response.consumes, response.produces);
+ }
+ }
+ this.api[this.name] = this;
+ this.ready = true;
+ return this.api.selfReflect();
+ };
+
+ SwaggerResource.prototype.addModels = function(models) {
+ var model, modelName, swaggerModel, _i, _len, _ref, _results;
+ if (models != null) {
+ for (modelName in models) {
+ if (this.models[modelName] == null) {
+ swaggerModel = new SwaggerModel(modelName, models[modelName]);
+ this.modelsArray.push(swaggerModel);
+ this.models[modelName] = swaggerModel;
+ }
+ }
+ _ref = this.modelsArray;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ model = _ref[_i];
+ _results.push(model.setReferencedModels(this.models));
+ }
+ return _results;
+ }
+ };
+
+ SwaggerResource.prototype.addOperations = function(resource_path, ops, consumes, produces) {
+ var method, o, op, r, ref, responseMessages, type, _i, _j, _len, _len1, _results;
+ if (ops) {
+ _results = [];
+ for (_i = 0, _len = ops.length; _i < _len; _i++) {
+ o = ops[_i];
+ consumes = this.consumes;
+ produces = this.produces;
+ if (o.consumes != null) {
+ consumes = o.consumes;
+ } else {
+ consumes = this.consumes;
+ }
+ if (o.produces != null) {
+ produces = o.produces;
+ } else {
+ produces = this.produces;
+ }
+ type = o.type || o.responseClass;
+ if (type === "array") {
+ ref = null;
+ if (o.items) {
+ ref = o.items["type"] || o.items["$ref"];
+ }
+ type = "array[" + ref + "]";
+ }
+ responseMessages = o.responseMessages;
+ method = o.method;
+ if (o.httpMethod) {
+ method = o.httpMethod;
+ }
+ if (o.supportedContentTypes) {
+ consumes = o.supportedContentTypes;
+ }
+ if (o.errorResponses) {
+ responseMessages = o.errorResponses;
+ for (_j = 0, _len1 = responseMessages.length; _j < _len1; _j++) {
+ r = responseMessages[_j];
+ r.message = r.reason;
+ r.reason = null;
+ }
+ }
+ o.nickname = this.sanitize(o.nickname);
+ op = new SwaggerOperation(o.nickname, resource_path, method, o.parameters, o.summary, o.notes, type, responseMessages, this, consumes, produces);
+ this.operations[op.nickname] = op;
+ _results.push(this.operationsArray.push(op));
+ }
+ return _results;
+ }
+ };
+
+ SwaggerResource.prototype.sanitize = function(nickname) {
+ var op;
+ op = nickname.replace(/[\s!@#$%^&*()_+=\[{\]};:<>|./?,\\'""-]/g, '_');
+ op = op.replace(/((_){2,})/g, '_');
+ op = op.replace(/^(_)*/g, '');
+ op = op.replace(/([_])*$/g, '');
+ return op;
+ };
+
+ SwaggerResource.prototype.help = function() {
+ var msg, operation, operation_name, parameter, _i, _len, _ref, _ref1, _results;
+ _ref = this.operations;
+ _results = [];
+ for (operation_name in _ref) {
+ operation = _ref[operation_name];
+ msg = " " + operation.nickname;
+ _ref1 = operation.parameters;
+ for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+ parameter = _ref1[_i];
+ msg.concat(" " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description);
+ }
+ _results.push(msg);
+ }
+ return _results;
+ };
+
+ return SwaggerResource;
+
+ })();
+
+ SwaggerModel = (function() {
+ function SwaggerModel(modelName, obj) {
+ var prop, propertyName, value;
+ this.name = obj.id != null ? obj.id : modelName;
+ this.properties = [];
+ for (propertyName in obj.properties) {
+ if (obj.required != null) {
+ for (value in obj.required) {
+ if (propertyName === obj.required[value]) {
+ obj.properties[propertyName].required = true;
+ }
+ }
+ }
+ prop = new SwaggerModelProperty(propertyName, obj.properties[propertyName]);
+ this.properties.push(prop);
+ }
+ }
+
+ SwaggerModel.prototype.setReferencedModels = function(allModels) {
+ var prop, type, _i, _len, _ref, _results;
+ _ref = this.properties;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ prop = _ref[_i];
+ type = prop.type || prop.dataType;
+ if (allModels[type] != null) {
+ _results.push(prop.refModel = allModels[type]);
+ } else if ((prop.refDataType != null) && (allModels[prop.refDataType] != null)) {
+ _results.push(prop.refModel = allModels[prop.refDataType]);
+ } else {
+ _results.push(void 0);
+ }
+ }
+ return _results;
+ };
+
+ SwaggerModel.prototype.getMockSignature = function(modelsToIgnore) {
+ var classClose, classOpen, prop, propertiesStr, returnVal, strong, strongClose, stronger, _i, _j, _len, _len1, _ref, _ref1;
+ propertiesStr = [];
+ _ref = this.properties;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ prop = _ref[_i];
+ propertiesStr.push(prop.toString());
+ }
+ strong = '<span class="strong">';
+ stronger = '<span class="stronger">';
+ strongClose = '</span>';
+ classOpen = strong + this.name + ' {' + strongClose;
+ classClose = strong + '}' + strongClose;
+ returnVal = classOpen + '<div>' + propertiesStr.join(',</div><div>') + '</div>' + classClose;
+ if (!modelsToIgnore) {
+ modelsToIgnore = [];
+ }
+ modelsToIgnore.push(this);
+ _ref1 = this.properties;
+ for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+ prop = _ref1[_j];
+ if ((prop.refModel != null) && (modelsToIgnore.indexOf(prop.refModel)) === -1) {
+ returnVal = returnVal + ('<br>' + prop.refModel.getMockSignature(modelsToIgnore));
+ }
+ }
+ return returnVal;
+ };
+
+ SwaggerModel.prototype.createJSONSample = function(modelsToIgnore) {
+ var prop, result, _i, _len, _ref;
+ result = {};
+ modelsToIgnore = modelsToIgnore || [];
+ modelsToIgnore.push(this.name);
+ _ref = this.properties;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ prop = _ref[_i];
+ result[prop.name] = prop.getSampleValue(modelsToIgnore);
+ }
+ modelsToIgnore.pop(this.name);
+ return result;
+ };
+
+ return SwaggerModel;
+
+ })();
+
+ SwaggerModelProperty = (function() {
+ function SwaggerModelProperty(name, obj) {
+ this.name = name;
+ this.dataType = obj.type || obj.dataType || obj["$ref"];
+ this.isCollection = this.dataType && (this.dataType.toLowerCase() === 'array' || this.dataType.toLowerCase() === 'list' || this.dataType.toLowerCase() === 'set');
+ this.descr = obj.description;
+ this.required = obj.required;
+ if (obj.items != null) {
+ if (obj.items.type != null) {
+ this.refDataType = obj.items.type;
+ }
+ if (obj.items.$ref != null) {
+ this.refDataType = obj.items.$ref;
+ }
+ }
+ this.dataTypeWithRef = this.refDataType != null ? this.dataType + '[' + this.refDataType + ']' : this.dataType;
+ if (obj.allowableValues != null) {
+ this.valueType = obj.allowableValues.valueType;
+ this.values = obj.allowableValues.values;
+ if (this.values != null) {
+ this.valuesString = "'" + this.values.join("' or '") + "'";
+ }
+ }
+ if (obj["enum"] != null) {
+ this.valueType = "string";
+ this.values = obj["enum"];
+ if (this.values != null) {
+ this.valueString = "'" + this.values.join("' or '") + "'";
+ }
+ }
+ }
+
+ SwaggerModelProperty.prototype.getSampleValue = function(modelsToIgnore) {
+ var result;
+ if ((this.refModel != null) && (modelsToIgnore.indexOf(this.refModel.name) === -1)) {
+ result = this.refModel.createJSONSample(modelsToIgnore);
+ } else {
+ if (this.isCollection) {
+ result = this.refDataType;
+ } else {
+ result = this.dataType;
+ }
+ }
+ if (this.isCollection) {
+ return [result];
+ } else {
+ return result;
+ }
+ };
+
+ SwaggerModelProperty.prototype.toString = function() {
+ var req, str;
+ req = this.required ? 'propReq' : 'propOpt';
+ str = '<span class="propName ' + req + '">' + this.name + '</span> (<span class="propType">' + this.dataTypeWithRef + '</span>';
+ if (!this.required) {
+ str += ', <span class="propOptKey">optional</span>';
+ }
+ str += ')';
+ if (this.values != null) {
+ str += " = <span class='propVals'>['" + this.values.join("' or '") + "']</span>";
+ }
+ if (this.descr != null) {
+ str += ': <span class="propDesc">' + this.descr + '</span>';
+ }
+ return str;
+ };
+
+ return SwaggerModelProperty;
+
+ })();
+
+ SwaggerOperation = (function() {
+ function SwaggerOperation(nickname, path, method, parameters, summary, notes, type, responseMessages, resource, consumes, produces) {
+ var parameter, v, _i, _j, _k, _len, _len1, _len2, _ref, _ref1, _ref2, _ref3,
+ _this = this;
+ this.nickname = nickname;
+ this.path = path;
+ this.method = method;
+ this.parameters = parameters != null ? parameters : [];
+ this.summary = summary;
+ this.notes = notes;
+ this.type = type;
+ this.responseMessages = responseMessages;
+ this.resource = resource;
+ this.consumes = consumes;
+ this.produces = produces;
+ this["do"] = __bind(this["do"], this);
+ if (this.nickname == null) {
+ this.resource.api.fail("SwaggerOperations must have a nickname.");
+ }
+ if (this.path == null) {
+ this.resource.api.fail("SwaggerOperation " + nickname + " is missing path.");
+ }
+ if (this.method == null) {
+ this.resource.api.fail("SwaggerOperation " + nickname + " is missing method.");
+ }
+ this.path = this.path.replace('{format}', 'json');
+ this.method = this.method.toLowerCase();
+ this.isGetMethod = this.method === "get";
+ this.resourceName = this.resource.name;
+ if (((_ref = this.type) != null ? _ref.toLowerCase() : void 0) === 'void') {
+ this.type = void 0;
+ }
+ if (this.type != null) {
+ this.responseClassSignature = this.getSignature(this.type, this.resource.models);
+ this.responseSampleJSON = this.getSampleJSON(this.type, this.resource.models);
+ }
+ this.responseMessages = this.responseMessages || [];
+ _ref1 = this.parameters;
+ for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+ parameter = _ref1[_i];
+ parameter.name = parameter.name || parameter.type || parameter.dataType;
+ type = parameter.type || parameter.dataType;
+ if (type.toLowerCase() === 'boolean') {
+ parameter.allowableValues = {};
+ parameter.allowableValues.values = ["true", "false"];
+ }
+ parameter.signature = this.getSignature(type, this.resource.models);
+ parameter.sampleJSON = this.getSampleJSON(type, this.resource.models);
+ if (parameter["enum"] != null) {
+ parameter.isList = true;
+ parameter.allowableValues = {};
+ parameter.allowableValues.descriptiveValues = [];
+ _ref2 = parameter["enum"];
+ for (_j = 0, _len1 = _ref2.length; _j < _len1; _j++) {
+ v = _ref2[_j];
+ if ((parameter.defaultValue != null) && parameter.defaultValue === v) {
+ parameter.allowableValues.descriptiveValues.push({
+ value: v,
+ isDefault: true
+ });
+ } else {
+ parameter.allowableValues.descriptiveValues.push({
+ value: v,
+ isDefault: false
+ });
+ }
+ }
+ }
+ if (parameter.allowableValues != null) {
+ if (parameter.allowableValues.valueType === "RANGE") {
+ parameter.isRange = true;
+ } else {
+ parameter.isList = true;
+ }
+ if (parameter.allowableValues.values != null) {
+ parameter.allowableValues.descriptiveValues = [];
+ _ref3 = parameter.allowableValues.values;
+ for (_k = 0, _len2 = _ref3.length; _k < _len2; _k++) {
+ v = _ref3[_k];
+ if ((parameter.defaultValue != null) && parameter.defaultValue === v) {
+ parameter.allowableValues.descriptiveValues.push({
+ value: v,
+ isDefault: true
+ });
+ } else {
+ parameter.allowableValues.descriptiveValues.push({
+ value: v,
+ isDefault: false
+ });
+ }
+ }
+ }
+ }
+ }
+ this.resource[this.nickname] = function(args, callback, error) {
+ return _this["do"](args, callback, error);
+ };
+ this.resource[this.nickname].help = function() {
+ return _this.help();
+ };
+ }
+
+ SwaggerOperation.prototype.isListType = function(type) {
+ if (type.indexOf('[') >= 0) {
+ return type.substring(type.indexOf('[') + 1, type.indexOf(']'));
+ } else {
+ return void 0;
+ }
+ };
+
+ SwaggerOperation.prototype.getSignature = function(type, models) {
+ var isPrimitive, listType;
+ listType = this.isListType(type);
+ isPrimitive = ((listType != null) && models[listType]) || (models[type] != null) ? false : true;
+ if (isPrimitive) {
+ return type;
+ } else {
+ if (listType != null) {
+ return models[listType].getMockSignature();
+ } else {
+ return models[type].getMockSignature();
+ }
+ }
+ };
+
+ SwaggerOperation.prototype.getSampleJSON = function(type, models) {
+ var isPrimitive, listType, val;
+ listType = this.isListType(type);
+ isPrimitive = ((listType != null) && models[listType]) || (models[type] != null) ? false : true;
+ val = isPrimitive ? void 0 : (listType != null ? models[listType].createJSONSample() : models[type].createJSONSample());
+ if (val) {
+ val = listType ? [val] : val;
+ return JSON.stringify(val, null, 2);
+ }
+ };
+
+ SwaggerOperation.prototype["do"] = function(args, opts, callback, error) {
+ var key, param, params, possibleParams, req, requestContentType, responseContentType, value, _i, _len, _ref;
+ if (args == null) {
+ args = {};
+ }
+ if (opts == null) {
+ opts = {};
+ }
+ requestContentType = null;
+ responseContentType = null;
+ if ((typeof args) === "function") {
+ error = opts;
+ callback = args;
+ args = {};
+ }
+ if ((typeof opts) === "function") {
+ error = callback;
+ callback = opts;
+ }
+ if (error == null) {
+ error = function(xhr, textStatus, error) {
+ return console.log(xhr, textStatus, error);
+ };
+ }
+ if (callback == null) {
+ callback = function(data) {
+ var content;
+ content = null;
+ if (data.content != null) {
+ content = data.content.data;
+ } else {
+ content = "no data";
+ }
+ return console.log("default callback: " + content);
+ };
+ }
+ params = {};
+ params.headers = [];
+ if (args.headers != null) {
+ params.headers = args.headers;
+ delete args.headers;
+ }
+ _ref = this.parameters;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (param.paramType === "header") {
+ if (args[param.name]) {
+ params.headers[param.name] = args[param.name];
+ }
+ }
+ }
+ if (args.body != null) {
+ params.body = args.body;
+ delete args.body;
+ }
+ possibleParams = (function() {
+ var _j, _len1, _ref1, _results;
+ _ref1 = this.parameters;
+ _results = [];
+ for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+ param = _ref1[_j];
+ if (param.paramType === "form" || param.paramType.toLowerCase() === "file") {
+ _results.push(param);
+ }
+ }
+ return _results;
+ }).call(this);
+ if (possibleParams) {
+ for (key in possibleParams) {
+ value = possibleParams[key];
+ if (args[value.name]) {
+ params[value.name] = args[value.name];
+ }
+ }
+ }
+ req = new SwaggerRequest(this.method, this.urlify(args), params, opts, callback, error, this);
+ if (opts.mock != null) {
+ return req;
+ } else {
+ return true;
+ }
+ };
+
+ SwaggerOperation.prototype.pathJson = function() {
+ return this.path.replace("{format}", "json");
+ };
+
+ SwaggerOperation.prototype.pathXml = function() {
+ return this.path.replace("{format}", "xml");
+ };
+
+ SwaggerOperation.prototype.urlify = function(args) {
+ var param, queryParams, reg, url, _i, _j, _len, _len1, _ref, _ref1;
+ url = this.resource.basePath + this.pathJson();
+ _ref = this.parameters;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (param.paramType === 'path') {
+ if (args[param.name]) {
+ reg = new RegExp('\{' + param.name + '[^\}]*\}', 'gi');
+ url = url.replace(reg, encodeURIComponent(args[param.name]));
+ delete args[param.name];
+ } else {
+ throw "" + param.name + " is a required path param.";
+ }
+ }
+ }
+ queryParams = "";
+ _ref1 = this.parameters;
+ for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
+ param = _ref1[_j];
+ if (param.paramType === 'query') {
+ if (args[param.name]) {
+ if (queryParams !== "") {
+ queryParams += "&";
+ }
+ queryParams += encodeURIComponent(param.name) + '=' + encodeURIComponent(args[param.name]);
+ }
+ }
+ }
+ if ((queryParams != null) && queryParams.length > 0) {
+ url += "?" + queryParams;
+ }
+ return url;
+ };
+
+ SwaggerOperation.prototype.supportHeaderParams = function() {
+ return this.resource.api.supportHeaderParams;
+ };
+
+ SwaggerOperation.prototype.supportedSubmitMethods = function() {
+ return this.resource.api.supportedSubmitMethods;
+ };
+
+ SwaggerOperation.prototype.getQueryParams = function(args) {
+ return this.getMatchingParams(['query'], args);
+ };
+
+ SwaggerOperation.prototype.getHeaderParams = function(args) {
+ return this.getMatchingParams(['header'], args);
+ };
+
+ SwaggerOperation.prototype.getMatchingParams = function(paramTypes, args) {
+ var matchingParams, name, param, value, _i, _len, _ref, _ref1;
+ matchingParams = {};
+ _ref = this.parameters;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (args && args[param.name]) {
+ matchingParams[param.name] = args[param.name];
+ }
+ }
+ _ref1 = this.resource.api.headers;
+ for (name in _ref1) {
+ value = _ref1[name];
+ matchingParams[name] = value;
+ }
+ return matchingParams;
+ };
+
+ SwaggerOperation.prototype.help = function() {
+ var msg, parameter, _i, _len, _ref;
+ msg = "";
+ _ref = this.parameters;
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ parameter = _ref[_i];
+ if (msg !== "") {
+ msg += "\n";
+ }
+ msg += "* " + parameter.name + (parameter.required ? ' (required)' : '') + " - " + parameter.description;
+ }
+ return msg;
+ };
+
+ return SwaggerOperation;
+
+ })();
+
+ SwaggerRequest = (function() {
+ function SwaggerRequest(type, url, params, opts, successCallback, errorCallback, operation, execution) {
+ var body, e, fields, headers, key, myHeaders, name, obj, param, parent, possibleParams, requestContentType, responseContentType, urlEncoded, value, values,
+ _this = this;
+ this.type = type;
+ this.url = url;
+ this.params = params;
+ this.opts = opts;
+ this.successCallback = successCallback;
+ this.errorCallback = errorCallback;
+ this.operation = operation;
+ this.execution = execution;
+ if (this.type == null) {
+ throw "SwaggerRequest type is required (get/post/put/delete).";
+ }
+ if (this.url == null) {
+ throw "SwaggerRequest url is required.";
+ }
+ if (this.successCallback == null) {
+ throw "SwaggerRequest successCallback is required.";
+ }
+ if (this.errorCallback == null) {
+ throw "SwaggerRequest error callback is required.";
+ }
+ if (this.operation == null) {
+ throw "SwaggerRequest operation is required.";
+ }
+ this.type = this.type.toUpperCase();
+ headers = params.headers;
+ myHeaders = {};
+ body = params.body;
+ parent = params["parent"];
+ requestContentType = "application/json";
+ if (body && (this.type === "POST" || this.type === "PUT" || this.type === "PATCH")) {
+ if (this.opts.requestContentType) {
+ requestContentType = this.opts.requestContentType;
+ }
+ } else {
+ if (((function() {
+ var _i, _len, _ref, _results;
+ _ref = this.operation.parameters;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (param.paramType === "form") {
+ _results.push(param);
+ }
+ }
+ return _results;
+ }).call(this)).length > 0) {
+ type = param.type || param.dataType;
+ if (((function() {
+ var _i, _len, _ref, _results;
+ _ref = this.operation.parameters;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (type.toLowerCase() === "file") {
+ _results.push(param);
+ }
+ }
+ return _results;
+ }).call(this)).length > 0) {
+ requestContentType = "multipart/form-data";
+ } else {
+ requestContentType = "application/x-www-form-urlencoded";
+ }
+ } else if (this.type !== "DELETE") {
+ requestContentType = null;
+ }
+ }
+ if (requestContentType && this.operation.consumes) {
+ if (this.operation.consumes.indexOf(requestContentType) === -1) {
+ console.log("server doesn't consume " + requestContentType + ", try " + JSON.stringify(this.operation.consumes));
+ if (this.requestContentType === null) {
+ requestContentType = this.operation.consumes[0];
+ }
+ }
+ }
+ responseContentType = null;
+ if (this.type === "POST" || this.type === "GET" || this.type === "PATCH") {
+ if (this.opts.responseContentType) {
+ responseContentType = this.opts.responseContentType;
+ } else {
+ responseContentType = "application/json";
+ }
+ } else {
+ responseContentType = null;
+ }
+ if (responseContentType && this.operation.produces) {
+ if (this.operation.produces.indexOf(responseContentType) === -1) {
+ console.log("server can't produce " + responseContentType);
+ }
+ }
+ if (requestContentType && requestContentType.indexOf("application/x-www-form-urlencoded") === 0) {
+ fields = {};
+ possibleParams = (function() {
+ var _i, _len, _ref, _results;
+ _ref = this.operation.parameters;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ param = _ref[_i];
+ if (param.paramType === "form") {
+ _results.push(param);
+ }
+ }
+ return _results;
+ }).call(this);
+ values = {};
+ for (key in possibleParams) {
+ value = possibleParams[key];
+ if (this.params[value.name]) {
+ values[value.name] = this.params[value.name];
+ }
+ }
+ urlEncoded = "";
+ for (key in values) {
+ value = values[key];
+ if (urlEncoded !== "") {
+ urlEncoded += "&";
+ }
+ urlEncoded += encodeURIComponent(key) + '=' + encodeURIComponent(value);
+ }
+ body = urlEncoded;
+ }
+ for (name in headers) {
+ myHeaders[name] = headers[name];
+ }
+ if (requestContentType) {
+ myHeaders["Content-Type"] = requestContentType;
+ }
+ if (responseContentType) {
+ myHeaders["Accept"] = responseContentType;
+ }
+ if (!((headers != null) && (headers.mock != null))) {
+ obj = {
+ url: this.url,
+ method: this.type,
+ headers: myHeaders,
+ body: body,
+ on: {
+ error: function(response) {
+ return _this.errorCallback(response, _this.opts.parent);
+ },
+ redirect: function(response) {
+ return _this.successCallback(response, _this.opts.parent);
+ },
+ 307: function(response) {
+ return _this.successCallback(response, _this.opts.parent);
+ },
+ response: function(response) {
+ return _this.successCallback(response, _this.opts.parent);
+ }
+ }
+ };
+ e = {};
+ if (typeof window !== 'undefined') {
+ e = window;
+ } else {
+ e = exports;
+ }
+ e.authorizations.apply(obj);
+ if (opts.mock == null) {
+ new SwaggerHttp().execute(obj);
+ } else {
+ console.log(obj);
+ return obj;
+ }
+ }
+ }
+
+ SwaggerRequest.prototype.asCurl = function() {
+ var header_args, k, v;
+ header_args = (function() {
+ var _ref, _results;
+ _ref = this.headers;
+ _results = [];
+ for (k in _ref) {
+ v = _ref[k];
+ _results.push("--header \"" + k + ": " + v + "\"");
+ }
+ return _results;
+ }).call(this);
+ return "curl " + (header_args.join(" ")) + " " + this.url;
+ };
+
+ return SwaggerRequest;
+
+ })();
+
+ SwaggerHttp = (function() {
+ SwaggerHttp.prototype.Shred = null;
+
+ SwaggerHttp.prototype.shred = null;
+
+ SwaggerHttp.prototype.content = null;
+
+ function SwaggerHttp() {
+ var identity, toString,
+ _this = this;
+ if (typeof window !== 'undefined') {
+ this.Shred = require("./shred");
+ } else {
+ this.Shred = require("shred");
+ }
+ this.shred = new this.Shred();
+ identity = function(x) {
+ return x;
+ };
+ toString = function(x) {
+ return x.toString();
+ };
+ if (typeof window !== 'undefined') {
+ this.content = require("./shred/content");
+ this.content.registerProcessor(["application/json; charset=utf-8", "application/json", "json"], {
+ parser: identity,
+ stringify: toString
+ });
+ } else {
+ this.Shred.registerProcessor(["application/json; charset=utf-8", "application/json", "json"], {
+ parser: identity,
+ stringify: toString
+ });
+ }
+ }
+
+ SwaggerHttp.prototype.execute = function(obj) {
+ return this.shred.request(obj);
+ };
+
+ return SwaggerHttp;
+
+ })();
+
+ SwaggerAuthorizations = (function() {
+ SwaggerAuthorizations.prototype.authz = null;
+
+ function SwaggerAuthorizations() {
+ this.authz = {};
+ }
+
+ SwaggerAuthorizations.prototype.add = function(name, auth) {
+ this.authz[name] = auth;
+ return auth;
+ };
+
+ SwaggerAuthorizations.prototype.apply = function(obj) {
+ var key, value, _ref, _results;
+ _ref = this.authz;
+ _results = [];
+ for (key in _ref) {
+ value = _ref[key];
+ _results.push(value.apply(obj));
+ }
+ return _results;
+ };
+
+ return SwaggerAuthorizations;
+
+ })();
+
+ ApiKeyAuthorization = (function() {
+ ApiKeyAuthorization.prototype.type = null;
+
+ ApiKeyAuthorization.prototype.name = null;
+
+ ApiKeyAuthorization.prototype.value = null;
+
+ function ApiKeyAuthorization(name, value, type) {
+ this.name = name;
+ this.value = value;
+ this.type = type;
+ }
+
+ ApiKeyAuthorization.prototype.apply = function(obj) {
+ if (this.type === "query") {
+ if (obj.url.indexOf('?') > 0) {
+ obj.url = obj.url + "&" + this.name + "=" + this.value;
+ } else {
+ obj.url = obj.url + "?" + this.name + "=" + this.value;
+ }
+ return true;
+ } else if (this.type === "header") {
+ return obj.headers[this.name] = this.value;
+ }
+ };
+
+ return ApiKeyAuthorization;
+
+ })();
+
+ PasswordAuthorization = (function() {
+ PasswordAuthorization.prototype.name = null;
+
+ PasswordAuthorization.prototype.username = null;
+
+ PasswordAuthorization.prototype.password = null;
+
+ function PasswordAuthorization(name, username, password) {
+ this.name = name;
+ this.username = username;
+ this.password = password;
+ }
+
+ PasswordAuthorization.prototype.apply = function(obj) {
+ return obj.headers["Authorization"] = "Basic " + btoa(this.username + ":" + this.password);
+ };
+
+ return PasswordAuthorization;
+
+ })();
+
+ this.SwaggerApi = SwaggerApi;
+
+ this.SwaggerResource = SwaggerResource;
+
+ this.SwaggerOperation = SwaggerOperation;
+
+ this.SwaggerRequest = SwaggerRequest;
+
+ this.SwaggerModelProperty = SwaggerModelProperty;
+
+ this.ApiKeyAuthorization = ApiKeyAuthorization;
+
+ this.PasswordAuthorization = PasswordAuthorization;
+
+ this.authorizations = new SwaggerAuthorizations();
+
+}).call(this);
<dependency>
<groupId>org.opendaylight.controller.samples</groupId>
<artifactId>clustering-it-model</artifactId>
- <version>${version}</version>
+ <version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
/**
* All disconnected Nodes need be removed from stat list Nodes
+ *
* @param flowNode
* @return true/false if the {@link Node} removed successful
*/
boolean disconnectedNodeUnregistration(InstanceIdentifier<Node> nodeIdent);
+ /**
+ * Method add new feature {@link StatCapabTypes} to Node identified by
+ * nodeIdent -> InstanceIdentifier<Node>
+ *
+ * @param flowNode
+ * @return true/false if the {@link StatCapabTypes} add successful
+ */
+ boolean registerAdditionalNodeFeature(InstanceIdentifier<Node> nodeIdent, StatCapabTypes statCapab);
+
/**
* Method return true only and only if {@link StatPermCollector} contain
* valid node registration in its internal {@link Node} map.
* Otherwise return false.
*
- * @param InstanceIdentifier<FlowCapableNode> flowNode
+ * @param flowNode
* @return
*/
boolean isProvidedFlowNodeActive(InstanceIdentifier<Node> nodeIdent);
*/
void disconnectedNodeUnregistration(InstanceIdentifier<Node> nodeIdent);
+ /**
+ * Method wraps {@link StatPermCollector}.registerAdditionalNodeFeature to provide
+ * possibility to register additional Node Feature {@link StatCapabTypes} for
+ * statistics collecting.
+ *
+ * @param nodeIdent
+ * @param statCapab
+ */
+ void registerAdditionalNodeFeature(InstanceIdentifier<Node> nodeIdent, StatCapabTypes statCapab);
+
/**
* Method provides access to Device RPC methods by wrapped
* internal method. In next {@link StatRpcMsgManager} is registered all
private ListenerRegistration<DataChangeListener> listenerRegistration;
protected final Map<InstanceIdentifier<Node>, Map<InstanceIdentifier<T>, Integer>> mapNodesForDelete = new ConcurrentHashMap<>();
+ protected final Map<InstanceIdentifier<Node>, Integer> mapNodeFeautureRepeater = new ConcurrentHashMap<>();
private final Class<T> clazz;
super.close();
}
+ /**
+ * Method return actual DataObject identified by InstanceIdentifier from Config/DS
+ * @param path
+ * @return
+ */
protected final <K extends DataObject> Optional<K> readLatestConfiguration(final InstanceIdentifier<K> path) {
if(currentReadTx == null) {
currentReadTx = dataBroker.newReadOnlyTransaction();
return txContainer;
}
+ /**
+ * Method validate TransactionCacheContainer. It needs to call before every txCacheContainer processing.
+ *
+ * @param txCacheContainer
+ * @return
+ */
+ protected boolean isTransactionCacheContainerValid(final Optional<TransactionCacheContainer<?>> txCacheContainer) {
+ if ( ! txCacheContainer.isPresent()) {
+ LOG.debug("Transaction Cache Container is not presented!");
+ return false;
+ }
+ if (txCacheContainer.get().getNodeId() == null) {
+ LOG.debug("Transaction Cache Container {} don't have Node ID!", txCacheContainer.get().getId());
+ return false;
+ }
+ if (txCacheContainer.get().getNotifications() == null) {
+ LOG.debug("Transaction Cache Container {} for {} node don't have Notifications!",
+ txCacheContainer.get().getId(), txCacheContainer.get().getNodeId());
+ return false;
+ }
+ return true;
+ }
+
/**
* Wrapping Future object call to {@link org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager}
* isExpectedStatistics with 10sec TimeOut.
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
import org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager.TransactionCacheContainer;
import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
import org.opendaylight.controller.md.statistics.manager.StatisticsManager.StatDataStoreOperation;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupDescStats;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupDescStatsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupFeatures;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupFeaturesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupStatistics;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.NodeGroupStatisticsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.OpendaylightGroupStatisticsListener;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.group.desc.GroupDescBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.group.statistics.rev131111.group.features.GroupFeatures;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
/**
* statistics-manager
final TransactionId transId = notification.getTransactionId();
final NodeId nodeId = notification.getId();
if ( ! isExpectedStatistics(transId, nodeId)) {
- LOG.debug("STAT-MANAGER - GroupDescStatsUpdated: unregistred notification detect TransactionId {}", transId);
+ LOG.debug("Unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
return;
}
- final List<GroupDescStats> groupStats = notification.getGroupDescStats() != null
- ? new ArrayList<>(notification.getGroupDescStats()) : new ArrayList<GroupDescStats>(10);
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if (txContainer.isPresent()) {
- final List<? extends TransactionAware> cacheNotifs =
- txContainer.get().getNotifications();
- for (final TransactionAware notif : cacheNotifs) {
- if (notif instanceof GroupDescStatsUpdated) {
- groupStats.addAll(((GroupDescStatsUpdated) notif).getGroupDescStats());
- }
- }
- }
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
- .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- statGroupDescCommit(groupStats, nodeIdent, tx);
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+ /* Validate exist FlowCapableNode */
+ final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+ Optional<FlowCapableNode> fNode = Optional.absent();
+ try {
+ fNode = tx.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+ }
+ if ( ! fNode.isPresent()) {
+ return;
+ }
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
+ }
+ /* Prepare List actual Groups and not updated Groups will be removed */
+ final List<Group> existGroups = fNode.get().getGroup() != null
+ ? fNode.get().getGroup() : Collections.<Group> emptyList();
+ final List<GroupKey> existGroupKeys = new ArrayList<>();
+ for (final Group group : existGroups) {
+ existGroupKeys.add(group.getKey());
+ }
+ /* GroupDesc processing */
+ statGroupDescCommit(txContainer, tx, fNodeIdent, existGroupKeys);
+ /* Delete all not presented Group Nodes */
+ deleteAllNotPresentNode(fNodeIdent, tx, Collections.unmodifiableList(existGroupKeys));
/* Notification for continue collecting statistics */
notifyToCollectNextStatistics(nodeIdent);
}
@Override
public void onGroupFeaturesUpdated(final GroupFeaturesUpdated notification) {
+ Preconditions.checkNotNull(notification);
final TransactionId transId = notification.getTransactionId();
final NodeId nodeId = notification.getId();
if ( ! isExpectedStatistics(transId, nodeId)) {
- LOG.debug("STAT-MANAGER - MeterFeaturesUpdated: unregistred notification detect TransactionId {}", transId);
+ LOG.debug("Unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
- return;
- }
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if ( ! txContainer.isPresent()) {
return;
}
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
- .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- notifyToCollectNextStatistics(nodeIdent);
- final GroupFeatures stats = new GroupFeaturesBuilder(notification).build();
- final InstanceIdentifier<GroupFeatures> groupFeatureIdent = nodeIdent
- .augmentation(NodeGroupFeatures.class).child(GroupFeatures.class);
- Optional<Node> node = Optional.absent();
- try {
- node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
}
- if (node.isPresent()) {
- tx.put(LogicalDatastoreType.OPERATIONAL, groupFeatureIdent, stats);
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof GroupFeaturesUpdated)) {
+ break;
+ }
+ final GroupFeatures stats = new GroupFeaturesBuilder((GroupFeaturesUpdated)notif).build();
+ final InstanceIdentifier<NodeGroupFeatures> nodeGroupFeatureIdent =
+ nodeIdent.augmentation(NodeGroupFeatures.class);
+ final InstanceIdentifier<GroupFeatures> groupFeatureIdent = nodeGroupFeatureIdent
+ .child(GroupFeatures.class);
+ Optional<Node> node = Optional.absent();
+ try {
+ node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ }
+ if (node.isPresent()) {
+ tx.merge(LogicalDatastoreType.OPERATIONAL, nodeGroupFeatureIdent, new NodeGroupFeaturesBuilder().build(), true);
+ tx.put(LogicalDatastoreType.OPERATIONAL, groupFeatureIdent, stats);
+ manager.registerAdditionalNodeFeature(nodeIdent, StatCapabTypes.GROUP_STATS);
+ }
}
}
});
@Override
public void onGroupStatisticsUpdated(final GroupStatisticsUpdated notification) {
+ Preconditions.checkNotNull(notification);
final TransactionId transId = notification.getTransactionId();
final NodeId nodeId = notification.getId();
if ( ! isExpectedStatistics(transId, nodeId)) {
LOG.debug("STAT-MANAGER - GroupStatisticsUpdated: unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
return;
}
- final List<GroupStats> groupStats = notification.getGroupStats() != null
- ? new ArrayList<>(notification.getGroupStats()) : new ArrayList<GroupStats>(10);
- Optional<Group> notifGroup = Optional.absent();
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if (txContainer.isPresent()) {
- final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
- if (inputObj.isPresent() && inputObj.get() instanceof Group) {
- notifGroup = Optional.<Group> of((Group)inputObj.get());
- }
- final List<? extends TransactionAware> cacheNotifs =
- txContainer.get().getNotifications();
- for (final TransactionAware notif : cacheNotifs) {
- if (notif instanceof GroupStatisticsUpdated) {
- groupStats.addAll(((GroupStatisticsUpdated) notif).getGroupStats());
- }
- }
- }
- final Optional<Group> group = notifGroup;
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
- .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- /* Notification for continue collecting statistics */
- if ( ! group.isPresent()) {
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+ /* Node exist check */
+ Optional<Node> node = Optional.absent();
+ try {
+ node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ }
+ if ( ! node.isPresent()) {
+ return;
+ }
+
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
+ }
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+
+ Optional<Group> notifGroup = Optional.absent();
+ final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
+ if (inputObj.isPresent() && inputObj.get() instanceof Group) {
+ notifGroup = Optional.<Group> of((Group)inputObj.get());
+ }
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof GroupStatisticsUpdated)) {
+ break;
+ }
+ statGroupCommit(((GroupStatisticsUpdated) notif).getGroupStats(), nodeIdent, tx);
+ }
+ if (notifGroup.isPresent()) {
notifyToCollectNextStatistics(nodeIdent);
}
- statGroupCommit(groupStats, nodeIdent, group, tx);
}
});
}
private void statGroupCommit(final List<GroupStats> groupStats, final InstanceIdentifier<Node> nodeIdent,
- final Optional<Group> group, final ReadWriteTransaction trans) {
+ final ReadWriteTransaction tx) {
+
+ Preconditions.checkNotNull(groupStats);
+ Preconditions.checkNotNull(nodeIdent);
+ Preconditions.checkNotNull(tx);
+
final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
- for (final GroupStats groupStat : groupStats) {
- final GroupStatistics stats = new GroupStatisticsBuilder(groupStat).build();
+ for (final GroupStats gStat : groupStats) {
+ final GroupStatistics stats = new GroupStatisticsBuilder(gStat).build();
- final GroupKey groupKey = new GroupKey(groupStat.getGroupId());
- final InstanceIdentifier<GroupStatistics> gsIdent = fNodeIdent
- .child(Group.class,groupKey).augmentation(NodeGroupStatistics.class)
- .child(GroupStatistics.class);
+ final InstanceIdentifier<Group> groupIdent = fNodeIdent.child(Group.class, new GroupKey(gStat.getGroupId()));
+ final InstanceIdentifier<NodeGroupStatistics> nGroupStatIdent =groupIdent
+ .augmentation(NodeGroupStatistics.class);
+ final InstanceIdentifier<GroupStatistics> gsIdent = nGroupStatIdent.child(GroupStatistics.class);
/* Statistics Writing */
- Optional<FlowCapableNode> fNode = Optional.absent();
+ Optional<Group> group = Optional.absent();
try {
- fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
+ group = tx.read(LogicalDatastoreType.OPERATIONAL, groupIdent).checkedGet();
}
catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+ LOG.debug("Read Operational/DS for Group node fail! {}", groupIdent, e);
}
- if (fNode.isPresent()) {
- trans.put(LogicalDatastoreType.OPERATIONAL, gsIdent, stats);
+ if (group.isPresent()) {
+ tx.merge(LogicalDatastoreType.OPERATIONAL, nGroupStatIdent, new NodeGroupStatisticsBuilder().build(), true);
+ tx.put(LogicalDatastoreType.OPERATIONAL, gsIdent, stats);
}
}
}
- private void statGroupDescCommit(final List<GroupDescStats> groupStats, final InstanceIdentifier<Node> nodeIdent,
- final ReadWriteTransaction trans) {
- final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+ private void statGroupDescCommit(final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+ final InstanceIdentifier<FlowCapableNode> fNodeIdent, final List<GroupKey> existGroupKeys) {
- final List<GroupKey> deviceGroupKeys = new ArrayList<>();
+ Preconditions.checkNotNull(existGroupKeys);
+ Preconditions.checkNotNull(txContainer);
+ Preconditions.checkNotNull(fNodeIdent);
+ Preconditions.checkNotNull(tx);
- for (final GroupDescStats group : groupStats) {
- if (group.getGroupId() != null) {
- final GroupBuilder groupBuilder = new GroupBuilder(group);
- final GroupKey groupKey = new GroupKey(group.getGroupId());
- final InstanceIdentifier<Group> groupRef = fNodeIdent.child(Group.class,groupKey);
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof GroupDescStatsUpdated)) {
+ break;
+ }
+ final List<GroupDescStats> groupStats = ((GroupDescStatsUpdated) notif).getGroupDescStats();
+ if (groupStats == null) {
+ break;
+ }
+ for (final GroupDescStats group : groupStats) {
+ if (group.getGroupId() != null) {
+ final GroupBuilder groupBuilder = new GroupBuilder(group);
+ final GroupKey groupKey = new GroupKey(group.getGroupId());
+ final InstanceIdentifier<Group> groupRef = fNodeIdent.child(Group.class,groupKey);
- final NodeGroupDescStatsBuilder groupDesc= new NodeGroupDescStatsBuilder();
- groupDesc.setGroupDesc(new GroupDescBuilder(group).build());
- //Update augmented data
- groupBuilder.addAugmentation(NodeGroupDescStats.class, groupDesc.build());
- deviceGroupKeys.add(groupKey);
- Optional<FlowCapableNode> hashIdUpd = Optional.absent();
- try {
- hashIdUpd = trans.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
- }
- if (hashIdUpd.isPresent()) {
- trans.put(LogicalDatastoreType.OPERATIONAL, groupRef, groupBuilder.build());
+ final NodeGroupDescStatsBuilder groupDesc= new NodeGroupDescStatsBuilder();
+ groupDesc.setGroupDesc(new GroupDescBuilder(group).build());
+ //Update augmented data
+ groupBuilder.addAugmentation(NodeGroupDescStats.class, groupDesc.build());
+ existGroupKeys.remove(groupKey);
+ tx.put(LogicalDatastoreType.OPERATIONAL, groupRef, groupBuilder.build());
}
}
}
- /* Delete all not presented Group Nodes */
- deleteAllNotPresentNode(fNodeIdent, trans, deviceGroupKeys);
}
private void deleteAllNotPresentNode(final InstanceIdentifier<FlowCapableNode> fNodeIdent,
final ReadWriteTransaction trans, final List<GroupKey> deviceGroupKeys) {
- final Optional<FlowCapableNode> fNode = readLatestConfiguration(fNodeIdent);
- if ( ! fNode.isPresent()) {
- LOG.trace("Read Operational/DS for FlowCapableNode fail! Node {} doesn't exist.", fNodeIdent);
+ Preconditions.checkNotNull(fNodeIdent);
+ Preconditions.checkNotNull(trans);
+
+ if (deviceGroupKeys == null) {
return;
}
- final List<Group> existGroups = fNode.get().getGroup() != null
- ? fNode.get().getGroup() : Collections.<Group> emptyList();
- /* Add all existed groups paths - no updated paths has to be removed */
- for (final Group group : existGroups) {
- if (deviceGroupKeys.remove(group.getKey())) {
- break; // group still exist on device
- }
- LOG.trace("Group {} has to removed.", group);
- final InstanceIdentifier<Group> delGroupIdent = fNodeIdent.child(Group.class, group.getKey());
+
+ for (final GroupKey key : deviceGroupKeys) {
+ final InstanceIdentifier<Group> delGroupIdent = fNodeIdent.child(Group.class, key);
+ LOG.trace("Group {} has to removed.", key);
Optional<Group> delGroup = Optional.absent();
try {
delGroup = trans.read(LogicalDatastoreType.OPERATIONAL, delGroupIdent).checkedGet();
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
+import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
import org.opendaylight.controller.md.statistics.manager.StatRpcMsgManager.TransactionCacheContainer;
import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
import org.opendaylight.controller.md.statistics.manager.StatisticsManager.StatDataStoreOperation;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterConfigStats;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterConfigStatsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeatures;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeaturesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterStatistics;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterStatisticsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.OpendaylightMeterStatisticsListener;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.MeterFeatures;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.MeterFeaturesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.nodes.node.meter.MeterStatisticsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.meter.config.stats.reply.MeterConfigStats;
import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.types.rev130918.meter.statistics.reply.MeterStats;
+import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
/**
* statistics-manager
LOG.debug("STAT-MANAGER - MeterConfigStatsUpdated: unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
return;
}
- final List<MeterConfigStats> meterConfStat = notification.getMeterConfigStats() != null
- ? new ArrayList<>(notification.getMeterConfigStats()) : new ArrayList<MeterConfigStats>(10);
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if (txContainer.isPresent()) {
- final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
- for (final TransactionAware notif : cacheNotifs) {
- if (notif instanceof MeterConfigStatsUpdated) {
- meterConfStat.addAll(((MeterConfigStatsUpdated) notif).getMeterConfigStats());
- }
- }
- }
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ /* Validate exist FlowCapableNode */
+ final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+ Optional<FlowCapableNode> fNode = Optional.absent();
+ try {
+ fNode = tx.read(LogicalDatastoreType.OPERATIONAL,fNodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
+ }
+ if ( ! fNode.isPresent()) {
+ return;
+ }
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
+ }
+ /* Prepare List actual Meters and not updated Meters will be removed */
+ final List<Meter> existMeters = fNode.get().getMeter() != null
+ ? fNode.get().getMeter() : Collections.<Meter> emptyList();
+ final List<MeterKey> existMeterKeys = new ArrayList<>();
+ for (final Meter meter : existMeters) {
+ existMeterKeys.add(meter.getKey());
+ }
+ /* MeterConfig processing */
+ comitConfMeterStats(txContainer, tx, fNodeIdent, existMeterKeys);
+ /* Delete all not presented Meter Nodes */
+ deleteAllNotPresentedNodes(fNodeIdent, tx, Collections.unmodifiableList(existMeterKeys));
/* Notification for continue collecting statistics */
notifyToCollectNextStatistics(nodeIdent);
- comitConfMeterStats(meterConfStat, nodeIdent, tx);
}
});
}
@Override
public void onMeterFeaturesUpdated(final MeterFeaturesUpdated notification) {
+ Preconditions.checkNotNull(notification);
final TransactionId transId = notification.getTransactionId();
final NodeId nodeId = notification.getId();
if ( ! isExpectedStatistics(transId, nodeId)) {
LOG.debug("STAT-MANAGER - MeterFeaturesUpdated: unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
- return;
- }
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if ( ! txContainer.isPresent()) {
return;
}
- final MeterFeatures stats = new MeterFeaturesBuilder(notification).build();
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
- .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
- final InstanceIdentifier<MeterFeatures> meterFeatureIdent = nodeIdent
- .augmentation(NodeMeterFeatures.class).child(MeterFeatures.class);
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- /* Notification for continue collecting statistics */
- notifyToCollectNextStatistics(nodeIdent);
- Optional<Node> node = Optional.absent();
- try {
- node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
}
- if (node.isPresent()) {
- tx.put(LogicalDatastoreType.OPERATIONAL, meterFeatureIdent, stats);
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof MeterFeaturesUpdated)) {
+ break;
+ }
+ final MeterFeatures stats = new MeterFeaturesBuilder((MeterFeaturesUpdated)notif).build();
+ final InstanceIdentifier<NodeMeterFeatures> nodeMeterFeatureIdent =
+ nodeIdent.augmentation(NodeMeterFeatures.class);
+ final InstanceIdentifier<MeterFeatures> meterFeatureIdent = nodeMeterFeatureIdent
+ .child(MeterFeatures.class);
+ Optional<Node> node = Optional.absent();
+ try {
+ node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ }
+ if (node.isPresent()) {
+ tx.merge(LogicalDatastoreType.OPERATIONAL, nodeMeterFeatureIdent, new NodeMeterFeaturesBuilder().build(), true);
+ tx.put(LogicalDatastoreType.OPERATIONAL, meterFeatureIdent, stats);
+ manager.registerAdditionalNodeFeature(nodeIdent, StatCapabTypes.METER_STATS);
+ }
}
}
});
@Override
public void onMeterStatisticsUpdated(final MeterStatisticsUpdated notification) {
+ Preconditions.checkNotNull(notification);
final TransactionId transId = notification.getTransactionId();
final NodeId nodeId = notification.getId();
if ( ! isExpectedStatistics(transId, nodeId)) {
LOG.debug("STAT-MANAGER - MeterStatisticsUpdated: unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
return;
}
- final List<MeterStats> meterStat = notification.getMeterStats() != null
- ? new ArrayList<>(notification.getMeterStats()) : new ArrayList<MeterStats>(10);
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if (txContainer.isPresent()) {
- final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
- for (final TransactionAware notif : cacheNotifs) {
- if (notif instanceof MeterConfigStatsUpdated) {
- meterStat.addAll(((MeterStatisticsUpdated) notif).getMeterStats());
- }
- }
- }
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
public void applyOperation(final ReadWriteTransaction tx) {
- statMeterCommit(meterStat, nodeIdent, tx);
- /* Notification for continue collecting statistics */
- notifyToCollectNextStatistics(nodeIdent);
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier
+ .create(Nodes.class).child(Node.class, new NodeKey(nodeId));
+ /* Node exist check */
+ Optional<Node> node = Optional.absent();
+ try {
+ node = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ }
+ if ( ! node.isPresent()) {
+ return;
+ }
+
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
+ }
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+
+ Optional<Meter> notifMeter = Optional.absent();
+ final Optional<? extends DataObject> inputObj = txContainer.get().getConfInput();
+ if (inputObj.isPresent() && inputObj.get() instanceof Meter) {
+ notifMeter = Optional.<Meter> of((Meter)inputObj.get());
+ }
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof MeterStatisticsUpdated)) {
+ break;
+ }
+ statMeterCommit(((MeterStatisticsUpdated) notif).getMeterStats(), nodeIdent, tx);
+ }
+ if (notifMeter.isPresent()) {
+ notifyToCollectNextStatistics(nodeIdent);
+ }
}
});
}
private void statMeterCommit(final List<MeterStats> meterStats,
- final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+ final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction tx) {
+
+ Preconditions.checkNotNull(meterStats);
+ Preconditions.checkNotNull(nodeIdent);
+ Preconditions.checkNotNull(tx);
final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
+
for (final MeterStats mStat : meterStats) {
final MeterStatistics stats = new MeterStatisticsBuilder(mStat).build();
- final MeterKey mKey = new MeterKey(mStat.getMeterId());
- final InstanceIdentifier<MeterStatistics> msIdent = fNodeIdent
- .child(Meter.class, mKey).augmentation(NodeMeterStatistics.class)
- .child(MeterStatistics.class);
+ final InstanceIdentifier<Meter> meterIdent = fNodeIdent.child(Meter.class, new MeterKey(mStat.getMeterId()));
+ final InstanceIdentifier<NodeMeterStatistics> nodeMeterStatIdent = meterIdent
+ .augmentation(NodeMeterStatistics.class);
+ final InstanceIdentifier<MeterStatistics> msIdent = nodeMeterStatIdent.child(MeterStatistics.class);
/* Meter Statistics commit */
- Optional<FlowCapableNode> fNode = Optional.absent();
+ Optional<Meter> meter = Optional.absent();
try {
- fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
+ meter = tx.read(LogicalDatastoreType.OPERATIONAL, meterIdent).checkedGet();
}
catch (final ReadFailedException e) {
LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
}
- if (fNode.isPresent()) {
- trans.put(LogicalDatastoreType.OPERATIONAL, msIdent, stats);
+ if (meter.isPresent()) {
+ tx.merge(LogicalDatastoreType.OPERATIONAL, nodeMeterStatIdent, new NodeMeterStatisticsBuilder().build(), true);
+ tx.put(LogicalDatastoreType.OPERATIONAL, msIdent, stats);
}
}
}
- private void comitConfMeterStats(final List<MeterConfigStats> meterConfStat,
- final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+ private void comitConfMeterStats(final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+ final InstanceIdentifier<FlowCapableNode> fNodeIdent, final List<MeterKey> existMeterKeys) {
- final InstanceIdentifier<FlowCapableNode> fNodeIdent = nodeIdent.augmentation(FlowCapableNode.class);
- final List<MeterKey> deviceMeterKeys = new ArrayList<>();
-
- for (final MeterConfigStats meterConf : meterConfStat) {
- final MeterBuilder meterBuilder = new MeterBuilder(meterConf);
- if (meterConf.getMeterId() != null) {
- final MeterKey meterKey = new MeterKey(meterConf.getMeterId());
- meterBuilder.setKey(meterKey);
- final InstanceIdentifier<Meter> meterRef = nodeIdent
- .augmentation(FlowCapableNode.class).child(Meter.class,meterKey);
- final NodeMeterConfigStatsBuilder meterConfig = new NodeMeterConfigStatsBuilder();
- meterConfig.setMeterConfigStats(new MeterConfigStatsBuilder(meterConf).build());
- //Update augmented data
- meterBuilder.addAugmentation(NodeMeterConfigStats.class, meterConfig.build());
- deviceMeterKeys.add(meterKey);
- Optional<FlowCapableNode> fNode = Optional.absent();
- try {
- fNode = trans.read(LogicalDatastoreType.OPERATIONAL, fNodeIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for FlowCapableNode fail! {}", fNodeIdent, e);
- }
- if (fNode.isPresent()) {
- trans.put(LogicalDatastoreType.OPERATIONAL, meterRef, meterBuilder.build());
+ Preconditions.checkNotNull(existMeterKeys);
+ Preconditions.checkNotNull(txContainer);
+ Preconditions.checkNotNull(fNodeIdent);
+ Preconditions.checkNotNull(tx);
+
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof MeterConfigStatsUpdated)) {
+ break;
+ }
+ final List<MeterConfigStats> meterStats = ((MeterConfigStatsUpdated) notif).getMeterConfigStats();
+ if (meterStats == null) {
+ break;
+ }
+ for (final MeterConfigStats meterStat : meterStats) {
+ if (meterStat.getMeterId() != null) {
+ final MeterBuilder meterBuilder = new MeterBuilder(meterStat);
+ final MeterKey meterKey = new MeterKey(meterStat.getMeterId());
+ final InstanceIdentifier<Meter> meterRef = fNodeIdent.child(Meter.class, meterKey);
+
+ final NodeMeterConfigStatsBuilder meterConfig = new NodeMeterConfigStatsBuilder();
+ meterConfig.setMeterConfigStats(new MeterConfigStatsBuilder(meterStat).build());
+ //Update augmented data
+ meterBuilder.addAugmentation(NodeMeterConfigStats.class, meterConfig.build());
+ existMeterKeys.remove(meterKey);
+ tx.put(LogicalDatastoreType.OPERATIONAL, meterRef, meterBuilder.build());
}
}
}
- /* Delete all not presented Meter Nodes */
- deleteAllNotPresentedNodes(fNodeIdent, trans, deviceMeterKeys);
}
private void deleteAllNotPresentedNodes(final InstanceIdentifier<FlowCapableNode> fNodeIdent,
- final ReadWriteTransaction trans, final List<MeterKey> deviceMeterKeys) {
- /* Delete all not presented meters */
- final Optional<FlowCapableNode> fNode = readLatestConfiguration(fNodeIdent);
+ final ReadWriteTransaction tx, final List<MeterKey> deviceMeterKeys) {
+
+ Preconditions.checkNotNull(fNodeIdent);
+ Preconditions.checkNotNull(tx);
- if ( ! fNode.isPresent()) {
- LOG.trace("Read Operational/DS for FlowCapableNode fail! Node {} doesn't exist.", fNodeIdent);
+ if (deviceMeterKeys == null) {
return;
}
- final List<Meter> existMeters = fNode.get().getMeter() != null
- ? fNode.get().getMeter() : Collections.<Meter> emptyList();
- /* Add all existed groups paths - no updated paths has to be removed */
- for (final Meter meter : existMeters) {
- if (deviceMeterKeys.remove(meter.getKey())) {
- break; // Meter still exist on device
- }
- final InstanceIdentifier<Meter> delMeterIdent = fNodeIdent.child(Meter.class, meter.getKey());
+
+ for (final MeterKey key : deviceMeterKeys) {
+ final InstanceIdentifier<Meter> delMeterIdent = fNodeIdent.child(Meter.class, key);
+ LOG.trace("Meter {} has to removed.", key);
Optional<Meter> delMeter = Optional.absent();
try {
- delMeter = trans.read(LogicalDatastoreType.OPERATIONAL, delMeterIdent).checkedGet();
+ delMeter = tx.read(LogicalDatastoreType.OPERATIONAL, delMeterIdent).checkedGet();
}
catch (final ReadFailedException e) {
// NOOP - probably another transaction delete that node
}
if (delMeter.isPresent()) {
- trans.delete(LogicalDatastoreType.OPERATIONAL, delMeterIdent);
+ tx.delete(LogicalDatastoreType.OPERATIONAL, delMeterIdent);
}
}
}
package org.opendaylight.controller.md.statistics.manager.impl;
-import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
/**
* statistics-manager
LOG.debug("STAT-MANAGER - QueueStatisticsUpdate: unregistred notification detect TransactionId {}", transId);
return;
}
+ manager.getRpcMsgManager().addNotification(notification, nodeId);
if (notification.isMoreReplies()) {
- manager.getRpcMsgManager().addNotification(notification, nodeId);
return;
}
- final List<QueueIdAndStatisticsMap> queueStats = notification.getQueueIdAndStatisticsMap() != null
- ? new ArrayList<>(notification.getQueueIdAndStatisticsMap()) : new ArrayList<QueueIdAndStatisticsMap>(10);
- final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
- if (txContainer.isPresent()) {
- final List<? extends TransactionAware> cachedNotifs =
- txContainer.get().getNotifications();
- for (final TransactionAware notif : cachedNotifs) {
- if (notif instanceof QueueStatisticsUpdate) {
- queueStats.addAll(((QueueStatisticsUpdate) notif).getQueueIdAndStatisticsMap());
- }
- }
- }
- final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class)
- .child(Node.class, new NodeKey(nodeId));
- /* Queue statistics are small size and we are not able to change for OF cross controller
- * - don't need to make are atomic */
+
+ /* Don't block RPC Notification thread */
manager.enqueue(new StatDataStoreOperation() {
@Override
- public void applyOperation(final ReadWriteTransaction trans) {
- /* Notification for continue */
+ public void applyOperation(final ReadWriteTransaction tx) {
+
+ final InstanceIdentifier<Node> nodeIdent = InstanceIdentifier.create(Nodes.class)
+ .child(Node.class, new NodeKey(nodeId));
+
+ /* Validate exist Node */
+ Optional<Node> fNode = Optional.absent();
+ try {
+ fNode = tx.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
+ }
+ if ( ! fNode.isPresent()) {
+ LOG.trace("Read Operational/DS for Node fail! Node {} doesn't exist.", nodeIdent);
+ return;
+ }
+
+ /* Get and Validate TransactionCacheContainer */
+ final Optional<TransactionCacheContainer<?>> txContainer = getTransactionCacheContainer(transId, nodeId);
+ if ( ! isTransactionCacheContainerValid(txContainer)) {
+ return;
+ }
+ /* Prepare List actual Queues and not updated Queues will be removed */
+ final List<NodeConnector> existConnectors = fNode.get().getNodeConnector() != null
+ ? fNode.get().getNodeConnector() : Collections.<NodeConnector> emptyList();
+ final Map<QueueKey, NodeConnectorKey> existQueueKeys = new HashMap<>();
+ for (final NodeConnector connect : existConnectors) {
+ final List<Queue> listQueues = connect.getAugmentation(FlowCapableNodeConnector.class).getQueue();
+ if (listQueues != null) {
+ for (final Queue queue : listQueues) {
+ existQueueKeys.put(queue.getKey(), connect.getKey());
+ }
+ }
+ }
+ /* Queue processing */
+ statQueueCommit(txContainer, tx, nodeIdent, existQueueKeys);
+ /* Delete all not presented Group Nodes */
+ deleteAllNotPresentedNodes(nodeIdent, tx, Collections.unmodifiableMap(existQueueKeys));
+ /* Notification for continue collecting statistics */
notifyToCollectNextStatistics(nodeIdent);
- statQueueCommit(queueStats, nodeIdent, trans);
}
});
}
- private void statQueueCommit(final List<QueueIdAndStatisticsMap> queueStats,
- final InstanceIdentifier<Node> nodeIdent, final ReadWriteTransaction trans) {
+ private void statQueueCommit(
+ final Optional<TransactionCacheContainer<?>> txContainer, final ReadWriteTransaction tx,
+ final InstanceIdentifier<Node> nodeIdent, final Map<QueueKey, NodeConnectorKey> existQueueKeys) {
- /* check exist FlowCapableNode and write statistics */
- Optional<Node> fNode = Optional.absent();
- try {
- fNode = trans.read(LogicalDatastoreType.OPERATIONAL, nodeIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.debug("Read Operational/DS for Node fail! {}", nodeIdent, e);
- return;
+ Preconditions.checkNotNull(existQueueKeys);
+ Preconditions.checkNotNull(txContainer);
+ Preconditions.checkNotNull(nodeIdent);
+ Preconditions.checkNotNull(tx);
+
+ final List<? extends TransactionAware> cacheNotifs = txContainer.get().getNotifications();
+ for (final TransactionAware notif : cacheNotifs) {
+ if ( ! (notif instanceof QueueStatisticsUpdate)) {
+ break;
+ }
+ final List<QueueIdAndStatisticsMap> queueStats = ((QueueStatisticsUpdate) notif).getQueueIdAndStatisticsMap();
+ if (queueStats == null) {
+ break;
+ }
+ for (final QueueIdAndStatisticsMap queueStat : queueStats) {
+ if (queueStat.getQueueId() != null) {
+ final FlowCapableNodeConnectorQueueStatistics statChild =
+ new FlowCapableNodeConnectorQueueStatisticsBuilder(queueStat).build();
+ final FlowCapableNodeConnectorQueueStatisticsDataBuilder statBuild =
+ new FlowCapableNodeConnectorQueueStatisticsDataBuilder();
+ statBuild.setFlowCapableNodeConnectorQueueStatistics(statChild);
+ final QueueKey qKey = new QueueKey(queueStat.getQueueId());
+ final InstanceIdentifier<FlowCapableNodeConnectorQueueStatisticsData> queueStatIdent = nodeIdent
+ .child(NodeConnector.class, new NodeConnectorKey(queueStat.getNodeConnectorId()))
+ .augmentation(FlowCapableNodeConnector.class)
+ .child(Queue.class, qKey).augmentation(FlowCapableNodeConnectorQueueStatisticsData.class);
+ existQueueKeys.remove(qKey);
+ tx.put(LogicalDatastoreType.OPERATIONAL, queueStatIdent, statBuild.build());
+ }
+ }
}
- if ( ! fNode.isPresent()) {
- LOG.trace("Read Operational/DS for Node fail! Node {} doesn't exist.", nodeIdent);
+ }
+
+ private void deleteAllNotPresentedNodes(final InstanceIdentifier<Node> nodeIdent,
+ final ReadWriteTransaction tx, final Map<QueueKey, NodeConnectorKey> existQueueKeys) {
+
+ Preconditions.checkNotNull(nodeIdent);
+ Preconditions.checkNotNull(tx);
+
+ if (existQueueKeys == null) {
return;
}
- for (final QueueIdAndStatisticsMap queueEntry : queueStats) {
- final FlowCapableNodeConnectorQueueStatistics statChild =
- new FlowCapableNodeConnectorQueueStatisticsBuilder(queueEntry).build();
- final FlowCapableNodeConnectorQueueStatisticsDataBuilder statBuild =
- new FlowCapableNodeConnectorQueueStatisticsDataBuilder();
- statBuild.setFlowCapableNodeConnectorQueueStatistics(statChild);
- final QueueKey qKey = new QueueKey(queueEntry.getQueueId());
- final InstanceIdentifier<FlowCapableNodeConnectorQueueStatisticsData> queueStatIdent = nodeIdent
- .child(NodeConnector.class, new NodeConnectorKey(queueEntry.getNodeConnectorId()))
- .augmentation(FlowCapableNodeConnector.class)
- .child(Queue.class, qKey).augmentation(FlowCapableNodeConnectorQueueStatisticsData.class);
- trans.put(LogicalDatastoreType.OPERATIONAL, queueStatIdent, statBuild.build());
+ for (final Entry<QueueKey, NodeConnectorKey> entry : existQueueKeys.entrySet()) {
+ final InstanceIdentifier<Queue> queueIdent = nodeIdent.child(NodeConnector.class, entry.getValue())
+ .augmentation(FlowCapableNodeConnector.class).child(Queue.class, entry.getKey());
+ LOG.trace("Queue {} has to removed.", queueIdent);
+ Optional<Queue> delQueue = Optional.absent();
+ try {
+ delQueue = tx.read(LogicalDatastoreType.OPERATIONAL, queueIdent).checkedGet();
+ }
+ catch (final ReadFailedException e) {
+ // NOOP - probably another transaction delete that node
+ }
+ if (delQueue.isPresent()) {
+ tx.delete(LogicalDatastoreType.OPERATIONAL, queueIdent);
+ }
}
}
}
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Set;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.DataChangeListener;
import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataBroker.DataChangeScope;
+import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeEvent;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
-import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import org.opendaylight.controller.md.statistics.manager.StatNodeRegistration;
import org.opendaylight.controller.md.statistics.manager.StatPermCollector.StatCapabTypes;
import org.opendaylight.controller.md.statistics.manager.StatisticsManager;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRemoved;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeUpdated;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.meter.statistics.rev131111.NodeMeterFeatures;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
+import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
*
* Created: Aug 28, 2014
*/
-public class StatNodeRegistrationImpl implements StatNodeRegistration {
+public class StatNodeRegistrationImpl implements StatNodeRegistration, DataChangeListener {
private static final Logger LOG = LoggerFactory.getLogger(StatNodeRegistrationImpl.class);
Preconditions.checkArgument(db != null, "DataBroker can not be null!");
Preconditions.checkArgument(notificationService != null, "NotificationProviderService can not be null!");
notifListenerRegistration = notificationService.registerNotificationListener(this);
+ /* Build Path */
+ final InstanceIdentifier<FlowCapableNode> flowNodeWildCardIdentifier = InstanceIdentifier.create(Nodes.class)
+ .child(Node.class).augmentation(FlowCapableNode.class);
+ listenerRegistration = db.registerDataChangeListener(LogicalDatastoreType.OPERATIONAL,
+ flowNodeWildCardIdentifier, StatNodeRegistrationImpl.this, DataChangeScope.BASE);
}
@Override
maxCapTables = data.getMaxTables();
final Optional<Short> maxTables = Optional.<Short> of(maxCapTables);
-
- /* Meters management */
- final InstanceIdentifier<NodeMeterFeatures> meterFeaturesIdent = nodeIdent.augmentation(NodeMeterFeatures.class);
-
-
- Optional<NodeMeterFeatures> meterFeatures = Optional.absent();
- try {
- meterFeatures = tx.read(LogicalDatastoreType.OPERATIONAL, meterFeaturesIdent).checkedGet();
- }
- catch (final ReadFailedException e) {
- LOG.warn("Read NodeMeterFeatures {} fail!", meterFeaturesIdent, e);
- }
- if (meterFeatures.isPresent()) {
- statCapabTypes.add(StatCapabTypes.METER_STATS);
- }
manager.connectedNodeRegistration(nodeIdent,
Collections.unmodifiableList(statCapabTypes), maxTables.get());
}
@Override
public void onNodeRemoved(final NodeRemoved notification) {
+ Preconditions.checkNotNull(notification);
final NodeRef nodeRef = notification.getNodeRef();
final InstanceIdentifier<?> nodeRefIdent = nodeRef.getValue();
final InstanceIdentifier<Node> nodeIdent =
@Override
public void onNodeUpdated(final NodeUpdated notification) {
+ Preconditions.checkNotNull(notification);
final FlowCapableNodeUpdated newFlowNode =
notification.getAugmentation(FlowCapableNodeUpdated.class);
if (newFlowNode != null && newFlowNode.getSwitchFeatures() != null) {
connectFlowCapableNode(swichFeaturesIdent, switchFeatures, nodeIdent);
}
}
+
+ @Override
+ public void onDataChanged(final AsyncDataChangeEvent<InstanceIdentifier<?>, DataObject> changeEvent) {
+ Preconditions.checkNotNull(changeEvent,"Async ChangeEvent can not be null!");
+ /* All DataObjects for create */
+ final Set<InstanceIdentifier<?>> createdData = changeEvent.getCreatedData() != null
+ ? changeEvent.getCreatedData().keySet() : Collections.<InstanceIdentifier<?>> emptySet();
+
+ for (final InstanceIdentifier<?> entryKey : createdData) {
+ final InstanceIdentifier<Node> nodeIdent = entryKey
+ .firstIdentifierOf(Node.class);
+ if ( ! nodeIdent.isWildcarded()) {
+ final NodeRef nodeRef = new NodeRef(nodeIdent);
+ // FIXME: these calls is a job for handshake or for inventory manager
+ /* check Group and Meter future */
+ manager.getRpcMsgManager().getGroupFeaturesStat(nodeRef);
+ manager.getRpcMsgManager().getMeterFeaturesStat(nodeRef);
+ }
+ }
+ }
}
package org.opendaylight.controller.md.statistics.manager.impl;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
statNetCollectorServ.shutdown();
}
+ @Override
+ public boolean hasActiveNodes() {
+ return ( ! statNodeHolder.isEmpty());
+ }
+
@Override
public boolean isProvidedFlowNodeActive(
final InstanceIdentifier<Node> flowNode) {
@Override
public boolean connectedNodeRegistration(final InstanceIdentifier<Node> ident,
final List<StatCapabTypes> statTypes, final Short nrOfSwitchTables) {
- if (ident.isWildcarded()) {
- LOG.warn("FlowCapableNode IstanceIdentifier {} registration can not be wildcarded!", ident);
- } else {
+ if (isNodeIdentValidForUse(ident)) {
if ( ! statNodeHolder.containsKey(ident)) {
synchronized (statNodeHolderLock) {
final boolean startStatCollecting = statNodeHolder.size() == 0;
@Override
public boolean disconnectedNodeUnregistration(final InstanceIdentifier<Node> ident) {
- if (ident.isWildcarded()) {
- LOG.warn("FlowCapableNode IstanceIdentifier {} unregistration can not be wildcarded!", ident);
- } else {
+ if (isNodeIdentValidForUse(ident)) {
if (statNodeHolder.containsKey(ident)) {
synchronized (statNodeHolderLock) {
if (statNodeHolder.containsKey(ident)) {
return false;
}
+ @Override
+ public boolean registerAdditionalNodeFeature(final InstanceIdentifier<Node> ident,
+ final StatCapabTypes statCapab) {
+ if (isNodeIdentValidForUse(ident)) {
+ if ( ! statNodeHolder.containsKey(ident)) {
+ return false;
+ }
+ final StatNodeInfoHolder statNode = statNodeHolder.get(ident);
+ if ( ! statNode.getStatMarkers().contains(statCapab)) {
+ synchronized (statNodeHolderLock) {
+ if ( ! statNode.getStatMarkers().contains(statCapab)) {
+ final List<StatCapabTypes> statCapabForEdit = new ArrayList<>(statNode.getStatMarkers());
+ statCapabForEdit.add(statCapab);
+ final StatNodeInfoHolder nodeInfoHolder = new StatNodeInfoHolder(statNode.getNodeRef(),
+ Collections.unmodifiableList(statCapabForEdit), statNode.getMaxTables());
+
+ final Map<InstanceIdentifier<Node>, StatNodeInfoHolder> statNodes =
+ new HashMap<>(statNodeHolder);
+ statNodes.put(ident, nodeInfoHolder);
+ statNodeHolder = Collections.unmodifiableMap(statNodes);
+ }
+ }
+ }
+ }
+ return true;
+ }
+
@Override
public void collectNextStatistics() {
if (wakeMe) {
break;
case GROUP_STATS:
LOG.trace("STAT-MANAGER-collecting GROUP-STATS for NodeRef {}", actualNodeRef);
- manager.getRpcMsgManager().getGroupFeaturesStat(actualNodeRef);
- waitingForNotification();
manager.getRpcMsgManager().getAllGroupsConfStats(actualNodeRef);
waitingForNotification();
manager.getRpcMsgManager().getAllGroupsStat(actualNodeRef);
break;
case METER_STATS:
LOG.trace("STAT-MANAGER-collecting METER-STATS for NodeRef {}", actualNodeRef);
- manager.getRpcMsgManager().getMeterFeaturesStat(actualNodeRef);
- waitingForNotification();
manager.getRpcMsgManager().getAllMeterConfigStat(actualNodeRef);
waitingForNotification();
manager.getRpcMsgManager().getAllMetersStat(actualNodeRef);
}
}
- @Override
- public boolean hasActiveNodes() {
- return ( ! statNodeHolder.isEmpty());
+ private boolean isNodeIdentValidForUse(final InstanceIdentifier<Node> ident) {
+ if (ident == null) {
+ LOG.warn("FlowCapableNode InstanceIdentifier {} can not be null!");
+ return false;
+ }
+ if (ident.isWildcarded()) {
+ LOG.warn("FlowCapableNode InstanceIdentifier {} can not be wildcarded!", ident);
+ return false;
+ }
+ return true;
}
}
@Override
public Void call() throws Exception {
- Preconditions.checkArgument(nodeRef != null, "NodeRef can not be null!");
final GetGroupDescriptionInputBuilder builder =
new GetGroupDescriptionInputBuilder();
builder.setNode(nodeRef);
return;
}
}
- LOG.debug("Node {} has not removed.", nodeIdent);
+ LOG.debug("Node {} has not been removed.", nodeIdent);
+ }
+
+ @Override
+ public void registerAdditionalNodeFeature(final InstanceIdentifier<Node> nodeIdent,
+ final StatCapabTypes statCapab) {
+ for (final StatPermCollector collector : statCollectors) {
+ if (collector.registerAdditionalNodeFeature(nodeIdent, statCapab)) {
+ return;
+ }
+ }
+ LOG.debug("Node {} has not been extended for feature {}!", nodeIdent, statCapab);
}
/* Getter internal Statistic Manager Job Classes */
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
-import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
-import org.opendaylight.controller.netconf.mapping.api.Capability;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
+import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.mapping.api.Capability;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class CapabilityProviderImpl implements CapabilityProvider {
private final NetconfOperationServiceSnapshot netconfOperationServiceSnapshot;
private final Set<String> capabilityURIs;
- private static final Logger logger = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(CapabilityProviderImpl.class);
public CapabilityProviderImpl(NetconfOperationServiceSnapshot netconfOperationServiceSnapshot) {
this.netconfOperationServiceSnapshot = netconfOperationServiceSnapshot;
for (Capability cap : caps) {
if(capabilityMap.containsKey(cap.getCapabilityUri())) {
- logger.debug("Duplicate capability {} from service {}", cap.getCapabilityUri(), netconfOperationService);
+ LOG.debug("Duplicate capability {} from service {}", cap.getCapabilityUri(), netconfOperationService);
}
capabilityMap.put(cap.getCapabilityUri(), cap);
package org.opendaylight.controller.netconf.impl;
-import org.opendaylight.controller.netconf.api.jmx.CommitJMXNotification;
-import org.opendaylight.controller.netconf.api.jmx.DefaultCommitOperationMXBean;
-import org.opendaylight.controller.netconf.api.jmx.NetconfJMXNotification;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Element;
-
+import java.util.Set;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanRegistrationException;
import javax.management.NotCompliantMBeanException;
import javax.management.NotificationBroadcasterSupport;
import javax.management.ObjectName;
-import java.util.Set;
+import org.opendaylight.controller.netconf.api.jmx.CommitJMXNotification;
+import org.opendaylight.controller.netconf.api.jmx.DefaultCommitOperationMXBean;
+import org.opendaylight.controller.netconf.api.jmx.NetconfJMXNotification;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Element;
public class DefaultCommitNotificationProducer extends NotificationBroadcasterSupport implements
DefaultCommitOperationMXBean, AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultCommitNotificationProducer.class);
private final MBeanServer mbeanServer;
public DefaultCommitNotificationProducer(MBeanServer mBeanServer) {
this.mbeanServer = mBeanServer;
- logger.debug("Registering to JMX under {}", on);
+ LOG.debug("Registering to JMX under {}", on);
registerMBean(this, mbeanServer, on);
}
public void sendCommitNotification(String message, Element cfgSnapshot, Set<String> capabilities) {
CommitJMXNotification notif = NetconfJMXNotification.afterCommit(this, message, cfgSnapshot, capabilities);
- logger.debug("Notification about commit {} sent", notif);
+ LOG.debug("Notification about commit {} sent", notif);
sendNotification(notif);
}
try {
mbeanServer.unregisterMBean(on);
} catch (InstanceNotFoundException | MBeanRegistrationException e) {
- logger.warn("Ignoring exception while unregistering {} as {}", this, on, e);
+ LOG.warn("Ignoring exception while unregistering {} as {}", this, on, e);
}
}
}
public final class NetconfServerSession extends AbstractNetconfSession<NetconfServerSession, NetconfServerSessionListener> implements NetconfManagementSession {
- private static final Logger logger = LoggerFactory.getLogger(NetconfServerSession.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSession.class);
private final NetconfHelloMessageAdditionalHeader header;
NetconfHelloMessageAdditionalHeader header) {
super(sessionListener, channel, sessionId);
this.header = header;
- logger.debug("Session {} created", toString());
+ LOG.debug("Session {} created", toString());
}
@Override
private Class<? extends Transport> getTransportForString(String transport) {
switch(transport) {
- case "ssh" : return NetconfSsh.class;
- case "tcp" : return NetconfTcp.class;
- default: throw new IllegalArgumentException("Unknown transport type " + transport);
+ case "ssh" :
+ return NetconfSsh.class;
+ case "tcp" :
+ return NetconfTcp.class;
+ default:
+ throw new IllegalArgumentException("Unknown transport type " + transport);
}
}
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
-
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.api.NetconfSessionListener;
public class NetconfServerSessionListener implements NetconfSessionListener<NetconfServerSession> {
- static final Logger logger = LoggerFactory.getLogger(NetconfServerSessionListener.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionListener.class);
private final SessionMonitoringService monitoringService;
private final NetconfOperationRouter operationRouter;
private final AutoCloseable onSessionDownCloseable;
@Override
public void onSessionDown(NetconfServerSession netconfNetconfServerSession, Exception cause) {
- logger.debug("Session {} down, reason: {}", netconfNetconfServerSession, cause.getMessage());
+ LOG.debug("Session {} down, reason: {}", netconfNetconfServerSession, cause.getMessage());
onDown(netconfNetconfServerSession);
}
try {
operationRouter.close();
} catch (Exception closingEx) {
- logger.debug("Ignoring exception while closing operationRouter", closingEx);
+ LOG.debug("Ignoring exception while closing operationRouter", closingEx);
}
try {
onSessionDownCloseable.close();
} catch(Exception ex){
- logger.debug("Ignoring exception while closing onSessionDownCloseable", ex);
+ LOG.debug("Ignoring exception while closing onSessionDownCloseable", ex);
}
}
@Override
public void onSessionTerminated(NetconfServerSession netconfNetconfServerSession,
NetconfTerminationReason netconfTerminationReason) {
- logger.debug("Session {} terminated, reason: {}", netconfNetconfServerSession,
+ LOG.debug("Session {} terminated, reason: {}", netconfNetconfServerSession,
netconfTerminationReason.getErrorMessage());
onDown(netconfNetconfServerSession);
}
// schemas
final NetconfMessage message = processDocument(netconfMessage,
session);
- logger.debug("Responding with message {}", XmlUtil.toString(message.getDocument()));
+ LOG.debug("Responding with message {}", XmlUtil.toString(message.getDocument()));
session.sendMessage(message);
if (isCloseSession(netconfMessage)) {
} catch (final RuntimeException e) {
// TODO: should send generic error or close session?
- logger.error("Unexpected exception", e);
+ LOG.error("Unexpected exception", e);
session.onIncommingRpcFail();
throw new IllegalStateException("Unable to process incoming message " + netconfMessage, e);
} catch (NetconfDocumentedException e) {
- logger.trace("Error occurred while processing message",e);
+ LOG.trace("Error occurred while processing message",e);
session.onOutgoingRpcError();
session.onIncommingRpcFail();
SendErrorExceptionUtil.sendErrorMessage(session, e, netconfMessage);
private void closeNetconfSession(NetconfServerSession session) {
// destroy NetconfOperationService
session.close();
- logger.info("Session {} closed successfully", session.getSessionId());
+ LOG.info("Session {} closed successfully", session.getSessionId());
}
}
private void checkMessageId(Node rootNode) throws NetconfDocumentedException {
- NamedNodeMap attributes = rootNode.getAttributes();
+
+ NamedNodeMap attributes = rootNode.getAttributes();
+
if(attributes.getNamedItemNS(XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.MESSAGE_ID)!=null) {
return;
}
throw new NetconfDocumentedException("Missing attribute" + rootNode.getNodeName(),
NetconfDocumentedException.ErrorType.protocol, NetconfDocumentedException.ErrorTag.missing_attribute,
- NetconfDocumentedException.ErrorSeverity.error, ImmutableMap.of(NetconfDocumentedException.ErrorTag.missing_attribute.toString(),
- XmlNetconfConstants.MESSAGE_ID));
+ NetconfDocumentedException.ErrorSeverity.error,
+ ImmutableMap.of(NetconfDocumentedException.ErrorTag.missing_attribute.toString(),
+ XmlNetconfConstants.MESSAGE_ID));
}
private static boolean isCloseSession(final NetconfMessage incomingDocument) {
package org.opendaylight.controller.netconf.impl;
-import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouterImpl;
import org.opendaylight.controller.netconf.impl.osgi.SessionMonitoringService;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
import io.netty.channel.local.LocalAddress;
import io.netty.util.Timer;
import io.netty.util.concurrent.Promise;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.AbstractMap;
+import java.util.Map;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfServerSessionPreferences;
import org.opendaylight.controller.netconf.nettyutil.AbstractNetconfSessionNegotiator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.AbstractMap;
-import java.util.Map;
-
public class NetconfServerSessionNegotiator
extends
AbstractNetconfSessionNegotiator<NetconfServerSessionPreferences, NetconfServerSession, NetconfServerSessionListener> {
- static final Logger logger = LoggerFactory
- .getLogger(NetconfServerSessionNegotiator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionNegotiator.class);
+
private static final String UNKNOWN = "unknown";
protected NetconfServerSessionNegotiator(
}
- logger.debug("Additional header from hello parsed as {} from {}",
+ LOG.debug("Additional header from hello parsed as {} from {}",
parsedHeader, additionalHeader);
return new NetconfServerSession(sessionListener, channel,
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
-
+import com.google.common.collect.Sets;
+import io.netty.channel.Channel;
+import io.netty.util.Timer;
+import io.netty.util.concurrent.Promise;
import java.util.Set;
-
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfServerSessionPreferences;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.protocol.framework.SessionListenerFactory;
import org.opendaylight.protocol.framework.SessionNegotiator;
import org.opendaylight.protocol.framework.SessionNegotiatorFactory;
-
-import com.google.common.collect.Sets;
-
-import io.netty.channel.Channel;
-import io.netty.util.Timer;
-import io.netty.util.concurrent.Promise;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
private final long connectionTimeoutMillis;
private final DefaultCommitNotificationProducer commitNotificationProducer;
private final SessionMonitoringService monitoringService;
- private static final Logger logger = LoggerFactory.getLogger(NetconfServerSessionNegotiatorFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfServerSessionNegotiatorFactory.class);
private final Set<String> baseCapabilities;
// TODO too many params, refactor
proposal = new NetconfServerSessionPreferences(
createHelloMessage(sessionId, capabilityProvider), sessionId);
} catch (NetconfDocumentedException e) {
- logger.error("Unable to create hello mesage for session {} with capability provider {}", sessionId,capabilityProvider);
+ LOG.error("Unable to create hello mesage for session {} with capability provider {}", sessionId,capabilityProvider);
throw new IllegalStateException(e);
}
* See <a href="http://tools.ietf.org/html/rfc6241#section-6">rfc6241</a> for details.
*/
public class SubtreeFilter {
- private static final Logger logger = LoggerFactory.getLogger(SubtreeFilter.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SubtreeFilter.class);
static Document applySubtreeFilter(Document requestDocument, Document rpcReply) throws NetconfDocumentedException {
// FIXME: rpcReply document must be reread otherwise some nodes do not inherit namespaces. (services/service)
try {
rpcReply = XmlUtil.readXmlToDocument(XmlUtil.toString(rpcReply, true));
} catch (SAXException | IOException e) {
- logger.error("Cannot transform document", e);
+ LOG.error("Cannot transform document", e);
throw new NetconfDocumentedException("Cannot transform document");
}
XmlNetconfConstants.FILTER, XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0);
if (maybeFilter.isPresent() && (
"subtree".equals(maybeFilter.get().getAttribute("type"))||
- "subtree".equals(maybeFilter.get().getAttribute("type", XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0))
- )) {
+ "subtree".equals(maybeFilter.get().getAttribute("type", XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0)))
+ ) {
// do
if (result == null) {
result = MatchingResult.NO_MATCH;
}
- logger.debug("Matching {} to {} resulted in {}", src, filter, result);
+ LOG.debug("Matching {} to {} resulted in {}", src, filter, result);
return result;
}
package org.opendaylight.controller.netconf.impl.mapping;
import com.google.common.base.Optional;
-
import java.util.Set;
public interface CapabilityProvider {
package org.opendaylight.controller.netconf.impl.mapping.operations;
+import com.google.common.base.Optional;
+import java.util.Collections;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.util.mapping.AbstractSingletonNetconfOperation;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import com.google.common.base.Optional;
-
-import java.util.Collections;
-
public class DefaultCloseSession extends AbstractSingletonNetconfOperation {
public static final String CLOSE_SESSION = "close-session";
private final AutoCloseable sessionResources;
+ getNetconfSessionIdForReporting(), NetconfDocumentedException.ErrorType.application,
NetconfDocumentedException.ErrorTag.operation_failed,
NetconfDocumentedException.ErrorSeverity.error, Collections.singletonMap(
- NetconfDocumentedException.ErrorSeverity.error.toString(), e.getMessage()));
+ NetconfDocumentedException.ErrorSeverity.error.toString(), e.getMessage()));
}
return XmlUtil.createElement(document, XmlNetconfConstants.OK, Optional.<String>absent());
}
package org.opendaylight.controller.netconf.impl.mapping.operations;
import com.google.common.base.Preconditions;
-
+import java.io.InputStream;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
-import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
+import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationRouter;
import org.opendaylight.controller.netconf.mapping.api.HandlingPriority;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
import org.opendaylight.controller.netconf.util.mapping.AbstractNetconfOperation;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import java.io.InputStream;
-
public class DefaultCommit extends AbstractNetconfOperation {
- private static final Logger logger = LoggerFactory.getLogger(DefaultCommit.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultCommit.class);
private static final String NOTIFY_ATTR = "notify";
"Subsequent netconf operation expected by %s", this);
if (isCommitWithoutNotification(requestMessage)) {
- logger.debug("Skipping commit notification");
+ LOG.debug("Skipping commit notification");
} else {
// Send commit notification if commit was not issued by persister
removePersisterAttributes(requestMessage);
Element cfgSnapshot = getConfigSnapshot(operationRouter);
- logger.debug("Config snapshot retrieved successfully {}", cfgSnapshot);
+ LOG.debug("Config snapshot retrieved successfully {}", cfgSnapshot);
notificationProducer.sendCommitNotification("ok", cfgSnapshot, cap.getCapabilities());
}
xmlElement = XmlElement.fromDomElementWithExpected(message.getDocumentElement(),
XmlNetconfConstants.RPC_KEY, XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0);
} catch (NetconfDocumentedException e) {
- logger.trace("Commit operation is not valid due to {}",e);
+ LOG.trace("Commit operation is not valid due to ",e);
return false;
}
if (attr == null || attr.equals("")){
return false;
} else if (attr.equals(Boolean.toString(false))) {
- logger.debug("Commit operation received with notify=false attribute {}", message);
+ LOG.debug("Commit operation received with notify=false attribute {}", message);
return true;
} else {
return false;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
-
+import java.util.Map;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.impl.mapping.CapabilityProvider;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import java.util.Map;
-
public final class DefaultGetSchema extends AbstractLastNetconfOperation {
public static final String GET_SCHEMA = "get-schema";
public static final String IDENTIFIER = "identifier";
public static final String VERSION = "version";
- private static final Logger logger = LoggerFactory.getLogger(DefaultGetSchema.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultGetSchema.class);
private final CapabilityProvider cap;
public DefaultGetSchema(CapabilityProvider cap, String netconfSessionIdForReporting) {
} catch (IllegalStateException e) {
Map<String, String> errorInfo = Maps.newHashMap();
errorInfo.put(entry.identifier, e.getMessage());
- logger.warn("Rpc error: {}", NetconfDocumentedException.ErrorTag.operation_failed, e);
+ LOG.warn("Rpc error: {}", NetconfDocumentedException.ErrorTag.operation_failed, e);
throw new NetconfDocumentedException(e.getMessage(), NetconfDocumentedException.ErrorType.application,
NetconfDocumentedException.ErrorTag.operation_failed,
NetconfDocumentedException.ErrorSeverity.error, errorInfo);
Element getSchemaResult;
getSchemaResult = XmlUtil.createTextElement(document, XmlNetconfConstants.DATA_KEY, schema,
Optional.of(XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_YANG_IETF_NETCONF_MONITORING));
- logger.trace("{} operation successful", GET_SCHEMA);
+ LOG.trace("{} operation successful", GET_SCHEMA);
return getSchemaResult;
}
try {
identifierElement = getSchemaElement.getOnlyChildElementWithSameNamespace(IDENTIFIER);
} catch (MissingNameSpaceException e) {
- logger.trace("Can't get identifier element as only child element with same namespace due to {}",e);
+ LOG.trace("Can't get identifier element as only child element with same namespace due to ",e);
throw NetconfDocumentedException.wrap(e);
}
identifier = identifierElement.getTextContent();
import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorSeverity;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorTag;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException.ErrorType;
-import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.api.NetconfMessage;
+import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.impl.NetconfServerSession;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
import org.opendaylight.controller.netconf.util.mapping.AbstractSingletonNetconfOperation;
public class DefaultStartExi extends AbstractSingletonNetconfOperation implements DefaultNetconfOperation {
public static final String START_EXI = "start-exi";
- private static final Logger logger = LoggerFactory.getLogger(DefaultStartExi.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultStartExi.class);
private NetconfServerSession netconfSession;
public DefaultStartExi(String netconfSessionIdForReporting) {
@Override
public Document handle(Document message,
NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
- logger.debug("Received start-exi message {} ", XmlUtil.toString(message));
+ LOG.debug("Received start-exi message {} ", XmlUtil.toString(message));
try {
netconfSession.startExiCommunication(new NetconfMessage(message));
@Override
protected Element handleWithNoSubsequentOperations(Document document, XmlElement operationElement) throws NetconfDocumentedException {
Element getSchemaResult = document.createElementNS( XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.OK);
- logger.trace("{} operation successful", START_EXI);
+ LOG.trace("{} operation successful", START_EXI);
return getSchemaResult;
}
public static final String STOP_EXI = "stop-exi";
private NetconfServerSession netconfSession;
- private static final Logger logger = LoggerFactory
- .getLogger(DefaultStartExi.class);
+ private static final Logger LOG = LoggerFactory
+ .getLogger(DefaultStopExi.class);
public DefaultStopExi(String netconfSessionIdForReporting) {
super(netconfSessionIdForReporting);
@Override
protected Element handleWithNoSubsequentOperations(Document document, XmlElement operationElement) throws NetconfDocumentedException {
- logger.debug("Received stop-exi message {} ", XmlUtil.toString(operationElement));
+ LOG.debug("Received stop-exi message {} ", XmlUtil.toString(operationElement));
netconfSession.stopExiCommunication();
Element getSchemaResult = document.createElementNS( XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0, XmlNetconfConstants.OK);
- logger.trace("{} operation successful", STOP_EXI);
+ LOG.trace("{} operation successful", STOP_EXI);
return getSchemaResult;
}
public class NetconfImplActivator implements BundleActivator {
- private static final Logger logger = LoggerFactory.getLogger(NetconfImplActivator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfImplActivator.class);
private NetconfOperationServiceFactoryTracker factoriesTracker;
private DefaultCommitNotificationProducer commitNot;
NetconfServerDispatcher dispatch = new NetconfServerDispatcher(serverChannelInitializer, eventLoopGroup, eventLoopGroup);
LocalAddress address = NetconfConfigUtil.getNetconfLocalAddress();
- logger.trace("Starting local netconf server at {}", address);
+ LOG.trace("Starting local netconf server at {}", address);
dispatch.createLocalServer(address);
context.registerService(NetconfOperationProvider.class, factoriesListener, null);
@Override
public void stop(final BundleContext context) {
- logger.info("Shutting down netconf because YangStoreService service was removed");
+ LOG.info("Shutting down netconf because YangStoreService service was removed");
commitNot.close();
eventLoopGroup.shutdownGracefully(0, 1, TimeUnit.SECONDS);
public class NetconfMonitoringServiceImpl implements NetconfMonitoringService, SessionMonitoringService {
- private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringServiceImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringServiceImpl.class);
private final Set<NetconfManagementSession> sessions = new ConcurrentSet<>();
private final NetconfOperationProvider netconfOperationProvider;
@Override
public void onSessionUp(NetconfManagementSession session) {
- logger.debug("Session {} up", session);
+ LOG.debug("Session {} up", session);
Preconditions.checkState(!sessions.contains(session), "Session %s was already added", session);
sessions.add(session);
}
@Override
public void onSessionDown(NetconfManagementSession session) {
- logger.debug("Session {} down", session);
+ LOG.debug("Session {} down", session);
Preconditions.checkState(sessions.contains(session), "Session %s not present", session);
sessions.remove(session);
}
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.TreeMap;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.impl.DefaultCommitNotificationProducer;
import org.opendaylight.controller.netconf.impl.NetconfServerSession;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Set;
-import java.util.TreeMap;
-
public class NetconfOperationRouterImpl implements NetconfOperationRouter {
- private static final Logger logger = LoggerFactory.getLogger(NetconfOperationRouterImpl.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfOperationRouterImpl.class);
private final NetconfOperationServiceSnapshot netconfOperationServiceSnapshot;
private Set<NetconfOperation> allNetconfOperations;
messageAsString = XmlUtil.toString(message);
netconfOperationExecution = getNetconfOperationWithHighestPriority(message, session);
} catch (IllegalArgumentException | IllegalStateException e) {
- logger.warn("Unable to handle rpc {} on session {}", messageAsString, session, e);
+ LOG.warn("Unable to handle rpc {} on session {}", messageAsString, session, e);
String errorMessage = String.format("Unable to handle rpc %s on session %s", messageAsString, session);
Map<String, String> errorInfo = Maps.newHashMap();
}
private NetconfDocumentedException handleUnexpectedEx(String s, Exception e) throws NetconfDocumentedException {
- logger.error(s, e);
+ LOG.error(s, e);
Map<String, String> info = Maps.newHashMap();
info.put(NetconfDocumentedException.ErrorSeverity.error.toString(), e.toString());
private Document executeOperationWithHighestPriority(Document message,
NetconfOperationExecution netconfOperationExecution, String messageAsString)
throws NetconfDocumentedException {
- logger.debug("Forwarding netconf message {} to {}", messageAsString, netconfOperationExecution.netconfOperation);
+ LOG.debug("Forwarding netconf message {} to {}", messageAsString, netconfOperationExecution.netconfOperation);
return netconfOperationExecution.execute(message);
}
*/
package org.opendaylight.controller.netconf.impl.osgi;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
-import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
-
import java.util.HashSet;
import java.util.Set;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
+import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
public class NetconfOperationServiceFactoryListenerImpl implements NetconfOperationServiceFactoryListener,
NetconfOperationProvider {
package org.opendaylight.controller.netconf.impl.osgi;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
import org.opendaylight.controller.netconf.util.CloseableUtil;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
public class NetconfOperationServiceSnapshotImpl implements NetconfOperationServiceSnapshot {
private final Set<NetconfOperationService> services;
import com.google.common.collect.Maps;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
+import java.util.Map;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.util.messages.SendErrorExceptionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Map;
-
-public final class
- DeserializerExceptionHandler implements ChannelHandler {
-
- private static final Logger logger = LoggerFactory.getLogger(DeserializerExceptionHandler.class);
+public final class DeserializerExceptionHandler implements ChannelHandler {
+ private static final Logger LOG = LoggerFactory.getLogger(DeserializerExceptionHandler.class);
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
- logger.warn("An exception occurred during message handling", cause);
+ LOG.warn("An exception occurred during message handling", cause);
handleDeserializerException(ctx, cause);
}
package org.opendaylight.controller.netconf.impl;
import static org.junit.Assert.assertEquals;
+
import org.junit.Test;
import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
-
import io.netty.channel.ChannelFuture;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.HashedWheelTimer;
import io.netty.util.concurrent.GlobalEventExecutor;
-
import java.io.DataOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicLong;
-
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.opendaylight.controller.netconf.client.NetconfClientDispatcher;
import org.opendaylight.controller.netconf.client.NetconfClientDispatcherImpl;
import org.opendaylight.controller.netconf.client.SimpleNetconfClientSessionListener;
+import org.opendaylight.controller.netconf.client.TestingNetconfClient;
import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration;
import org.opendaylight.controller.netconf.client.conf.NetconfClientConfigurationBuilder;
-import org.opendaylight.controller.netconf.client.TestingNetconfClient;
import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationServiceFactoryListenerImpl;
import org.opendaylight.controller.netconf.impl.osgi.SessionMonitoringService;
import org.opendaylight.controller.netconf.mapping.api.Capability;
@RunWith(Parameterized.class)
public class ConcurrentClientsTest {
- private static final Logger logger = LoggerFactory.getLogger(ConcurrentClientsTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConcurrentClientsTest.class);
private static ExecutorService clientExecutor;
@Parameterized.Parameters()
public static Collection<Object[]> data() {
- return Arrays.asList(new Object[][]{
- {4, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
- {1, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
- // empty set of capabilities = only base 1.0 netconf capability
- {4, TestingNetconfClientRunnable.class, Collections.emptySet()},
- {4, TestingNetconfClientRunnable.class, getOnlyExiServerCaps()},
- {4, TestingNetconfClientRunnable.class, getOnlyChunkServerCaps()},
-
- {4, BlockingClientRunnable.class, getOnlyExiServerCaps()},
- {1, BlockingClientRunnable.class, getOnlyExiServerCaps()},
+ return Arrays.asList(new Object[][]{{4, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
+ {1, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES},
+ // empty set of capabilities = only base 1.0 netconf capability
+ {4, TestingNetconfClientRunnable.class, Collections.emptySet()},
+ {4, TestingNetconfClientRunnable.class, getOnlyExiServerCaps()},
+ {4, TestingNetconfClientRunnable.class, getOnlyChunkServerCaps()},
+ {4, BlockingClientRunnable.class, getOnlyExiServerCaps()},
+ {1, BlockingClientRunnable.class, getOnlyExiServerCaps()},
});
}
try {
nettyGroup.shutdownGracefully().get();
} catch (InterruptedException | ExecutionException e) {
- logger.warn("Ignoring exception while cleaning up after test", e);
+ LOG.warn("Ignoring exception while cleaning up after test", e);
}
}
} catch (InterruptedException e) {
throw new IllegalStateException(e);
} catch (ExecutionException e) {
- logger.error("Thread for testing client failed", e);
+ LOG.error("Thread for testing client failed", e);
fail("Client failed: " + e.getMessage());
}
}
@Override
public Document handle(Document requestMessage, NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
try {
- logger.info("Handling netconf message from test {}", XmlUtil.toString(requestMessage));
+ LOG.info("Handling netconf message from test {}", XmlUtil.toString(requestMessage));
counter.getAndIncrement();
return XmlUtil.readXmlToDocument("<test/>");
} catch (Exception e) {
while (sb.toString().endsWith("]]>]]>") == false) {
sb.append((char) inFromServer.read());
}
- logger.info(sb.toString());
+ LOG.info(sb.toString());
outToServer.write(IOUtils.toByteArray(clientHello));
outToServer.write("]]>]]>".getBytes());
while (sb.toString().endsWith("]]>]]>") == false) {
sb.append((char) inFromServer.read());
}
- logger.info(sb.toString());
+ LOG.info(sb.toString());
clientSocket.close();
}
}
final TestingNetconfClient netconfClient =
new TestingNetconfClient(Thread.currentThread().getName(), netconfClientDispatcher, getClientConfig());
long sessionId = netconfClient.getSessionId();
- logger.info("Client with session id {}: hello exchanged", sessionId);
+ LOG.info("Client with session id {}: hello exchanged", sessionId);
final NetconfMessage getMessage = XmlFileLoader
.xmlFileToNetconfMessage("netconfMessages/getConfig.xml");
NetconfMessage result = netconfClient.sendRequest(getMessage).get();
- logger.info("Client with session id {}: got result {}", sessionId, result);
+ LOG.info("Client with session id {}: got result {}", sessionId, result);
Preconditions.checkState(NetconfMessageUtil.isErrorMessage(result) == false,
"Received error response: " + XmlUtil.toString(result.getDocument()) + " to request: "
+ XmlUtil.toString(getMessage.getDocument()));
netconfClient.close();
- logger.info("Client with session id {}: ended", sessionId);
+ LOG.info("Client with session id {}: ended", sessionId);
} catch (final Exception e) {
throw new IllegalStateException(Thread.currentThread().getName(), e);
}
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.HashedWheelTimer;
+import java.lang.management.ManagementFactory;
+import java.net.InetSocketAddress;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.opendaylight.controller.netconf.impl.osgi.NetconfOperationServiceFactoryListenerImpl;
-import java.lang.management.ManagementFactory;
-import java.net.InetSocketAddress;
-
public class NetconfDispatcherImplTest {
private EventLoopGroup nettyGroup;
package org.opendaylight.controller.netconf.impl;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.opendaylight.controller.netconf.api.monitoring.NetconfManagementSession;
-import org.opendaylight.controller.netconf.impl.NetconfServerSession;
-import org.opendaylight.controller.netconf.impl.NetconfServerSessionListener;
import org.opendaylight.controller.netconf.impl.osgi.NetconfMonitoringServiceImpl;
import org.opendaylight.controller.netconf.mapping.api.Capability;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationProvider;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceSnapshot;
import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.*;
-
public class NetconfMonitoringServiceImplTest {
private NetconfMonitoringServiceImpl service;
@RunWith(value = Parameterized.class)
public class SubtreeFilterTest {
- private static final Logger logger = LoggerFactory.getLogger(SubtreeFilterTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SubtreeFilterTest.class);
private final int directoryIndex;
Document preFilterDocument = getDocument("pre-filter.xml");
Document postFilterDocument = getDocument("post-filter.xml");
Document actualPostFilterDocument = SubtreeFilter.applySubtreeFilter(requestDocument, preFilterDocument);
- logger.info("Actual document: {}", XmlUtil.toString(actualPostFilterDocument));
+ LOG.info("Actual document: {}", XmlUtil.toString(actualPostFilterDocument));
Diff diff = XMLUnit.compareXML(postFilterDocument, actualPostFilterDocument);
assertTrue(diff.toString(), diff.similar());
package org.opendaylight.controller.netconf.impl.mapping.operations;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+
import org.junit.Test;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.util.xml.XmlElement;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.w3c.dom.Document;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
public class DefaultCloseSessionTest {
@Test
package org.opendaylight.controller.netconf.impl.mapping.operations;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anySetOf;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import com.google.common.collect.Sets;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import static org.mockito.Mockito.*;
-
public class DefaultCommitTest {
private NetconfOperationChainedExecution operation;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
+
import com.google.common.base.Optional;
import org.junit.Before;
import org.junit.Test;
package org.opendaylight.controller.netconf.impl.mapping.operations;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelPipeline;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.w3c.dom.Document;
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Mockito.*;
-
public class DefaultStopExiTest {
@Test
public void testHandleWithNoSubsequentOperations() throws Exception {
package org.opendaylight.controller.netconf.impl.osgi;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import java.util.Arrays;
import java.util.Dictionary;
import org.junit.Before;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.anyString;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-
public class NetconfImplActivatorTest {
@Mock
package org.opendaylight.controller.netconf.impl.osgi;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.osgi.framework.Filter;
import org.osgi.framework.ServiceReference;
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
public class NetconfOperationServiceFactoryTrackerTest {
@Mock
package org.opendaylight.controller.netconf.impl.util;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyObject;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import org.junit.Before;
import org.junit.Test;
-import static org.mockito.Mockito.*;
-
public class DeserializerExceptionHandlerTest {
private DeserializerExceptionHandler handler;
public static final String LOOPBACK_ADDRESS = "127.0.0.1";
public static final int SERVER_CONNECTION_TIMEOUT_MILLIS = 5000;
- static ModuleFactory[] FACTORIES = { new TestImplModuleFactory(),
- new DepTestImplModuleFactory(), new NetconfTestImplModuleFactory(),
- new IdentityTestModuleFactory(), new MultipleDependenciesModuleFactory() };
+ static ModuleFactory[] FACTORIES = {new TestImplModuleFactory(),
+ new DepTestImplModuleFactory(),
+ new NetconfTestImplModuleFactory(),
+ new IdentityTestModuleFactory(),
+ new MultipleDependenciesModuleFactory() };
private EventLoopGroup nettyThreadgroup;
private HashedWheelTimer hashedWheelTimer;
return TCP_ADDRESS;
}
- static SessionMonitoringService getNetconfMonitoringListenerService(final Logger logger, final NetconfMonitoringServiceImpl monitor) {
+ static SessionMonitoringService getNetconfMonitoringListenerService(final Logger LOG, final NetconfMonitoringServiceImpl monitor) {
return new SessionMonitoringService() {
@Override
public void onSessionUp(final NetconfManagementSession session) {
- logger.debug("Management session up {}", session);
+ LOG.debug("Management session up {}", session);
monitor.onSessionUp(session);
}
@Override
public void onSessionDown(final NetconfManagementSession session) {
- logger.debug("Management session down {}", session);
+ LOG.debug("Management session down {}", session);
monitor.onSessionDown(session);
}
};
import io.netty.channel.local.LocalAddress;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.util.concurrent.GlobalEventExecutor;
+import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.file.Files;
public static final String USERNAME = "user";
public static final String PASSWORD = "pwd";
+ private File sshKeyPair;
private SshProxyServer sshProxyServer;
private ExecutorService nioExec;
@Before
public void setUp() throws Exception {
+ sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+ sshKeyPair.deleteOnExit();
nioExec = Executors.newFixedThreadPool(1);
clientGroup = new NioEventLoopGroup();
minaTimerEx = Executors.newScheduledThreadPool(1);
return true;
}
})
- .setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString()))
+ .setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString()))
.setIdleTimeout(Integer.MAX_VALUE)
.createSshProxyServerConfiguration());
}
*/
package org.opendaylight.controller.netconf.it;
+import com.google.common.base.Preconditions;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
-
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
-import com.google.common.base.Preconditions;
-
public final class SSLUtil {
private SSLUtil() {}
package org.opendaylight.controller.netconf.mapping.api;
import com.google.common.base.Optional;
-
import java.util.List;
/**
public class Get extends AbstractNetconfOperation {
- private static final Logger logger = LoggerFactory.getLogger(Get.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Get.class);
private final NetconfMonitoringService netconfMonitor;
public Get(final NetconfMonitoringService netconfMonitor) {
return innerResult;
} catch (final RuntimeException e) {
final String errorMessage = "Get operation for netconf-state subtree failed";
- logger.warn(errorMessage, e);
+ LOG.warn(errorMessage, e);
throw new NetconfDocumentedException(errorMessage, NetconfDocumentedException.ErrorType.application,
NetconfDocumentedException.ErrorTag.operation_failed,
public class NetconfMonitoringActivator implements BundleActivator {
- private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringActivator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringActivator.class);
private NetconfMonitoringServiceTracker monitor;
try {
monitor.close();
} catch (Exception e) {
- logger.warn("Ignoring exception while closing {}", monitor, e);
+ LOG.warn("Ignoring exception while closing {}", monitor, e);
}
}
}
public class NetconfMonitoringServiceTracker extends ServiceTracker<NetconfMonitoringService, NetconfMonitoringService> {
- private static final Logger logger = LoggerFactory.getLogger(NetconfMonitoringServiceTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfMonitoringServiceTracker.class);
private ServiceRegistration<NetconfOperationServiceFactory> reg;
try {
reg.unregister();
} catch (final Exception e) {
- logger.warn("Ignoring exception while unregistering {}", reg, e);
+ LOG.warn("Ignoring exception while unregistering {}", reg, e);
}
}
}
*/
package org.opendaylight.controller.netconf.monitoring.xml;
-import org.opendaylight.controller.netconf.monitoring.xml.model.NetconfState;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.transform.dom.DOMResult;
+import org.opendaylight.controller.netconf.monitoring.xml.model.NetconfState;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
public class JaxBSerializer {
res = new DOMResult();
marshaller.marshal(monitoringModel, res);
} catch (final JAXBException e) {
- throw new RuntimeException("Unable to serialize netconf state " + monitoringModel, e);
+ throw new RuntimeException("Unable to serialize netconf state " + monitoringModel, e);
}
return ((Document)res.getNode()).getDocumentElement();
}
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Collections2;
+import java.util.Collection;
import javax.annotation.Nonnull;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.Yang;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
-
import javax.annotation.Nullable;
import javax.xml.bind.annotation.XmlElement;
-import java.util.Collection;
+import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.Yang;
+import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
final class MonitoringSchema {
*/
package org.opendaylight.controller.netconf.monitoring.xml.model;
+import com.google.common.base.Joiner;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
-
-import com.google.common.base.Joiner;
import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.extension.rev131210.Session1;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.sessions.Session;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
+import java.util.Collection;
+import javax.annotation.Nullable;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlRootElement;
import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.Schemas;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.schemas.Schema;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.sessions.Session;
-import javax.annotation.Nullable;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlElementWrapper;
-import javax.xml.bind.annotation.XmlRootElement;
-import java.util.Collection;
-
@XmlRootElement(name = MonitoringConstants.NETCONF_MONITORING_XML_ROOT_ELEMENT)
public final class NetconfState {
)
package org.opendaylight.controller.netconf.monitoring.xml.model;
-import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
-
import javax.xml.bind.annotation.XmlNs;
import javax.xml.bind.annotation.XmlNsForm;
-import javax.xml.bind.annotation.XmlSchema;
\ No newline at end of file
+import javax.xml.bind.annotation.XmlSchema;
+import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
\ No newline at end of file
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
+
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
package org.opendaylight.controller.netconf.monitoring.osgi;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+
import com.google.common.base.Optional;
import org.junit.Test;
import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService;
import org.opendaylight.controller.netconf.monitoring.MonitoringConstants;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-
public class NetconfMonitoringOperationServiceTest {
@Test
public void testGetters() throws Exception {
package org.opendaylight.controller.netconf.monitoring.osgi;
import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
+
import java.util.Hashtable;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.LoggerFactory;
public abstract class AbstractNetconfSession<S extends NetconfSession, L extends NetconfSessionListener<S>> extends AbstractProtocolSession<NetconfMessage> implements NetconfSession, NetconfExiSession {
- private static final Logger logger = LoggerFactory.getLogger(AbstractNetconfSession.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractNetconfSession.class);
private final L sessionListener;
private final long sessionId;
private boolean up = false;
this.sessionListener = sessionListener;
this.channel = channel;
this.sessionId = sessionId;
- logger.debug("Session {} created", sessionId);
+ LOG.debug("Session {} created", sessionId);
}
protected abstract S thisInstance();
@Override
protected void handleMessage(final NetconfMessage netconfMessage) {
- logger.debug("handling incoming message");
+ LOG.debug("handling incoming message");
sessionListener.onMessage(thisInstance(), netconfMessage);
}
public ChannelFuture sendMessage(final NetconfMessage netconfMessage) {
final ChannelFuture future = channel.writeAndFlush(netconfMessage);
if (delayedEncoder != null) {
- replaceMessageEncoder(delayedEncoder);
- delayedEncoder = null;
+ replaceMessageEncoder(delayedEncoder);
+ delayedEncoder = null;
}
return future;
@Override
protected void endOfInput() {
- logger.debug("Session {} end of input detected while session was in state {}", toString(), isUp() ? "up"
+ LOG.debug("Session {} end of input detected while session was in state {}", toString(), isUp() ? "up"
: "initialized");
if (isUp()) {
this.sessionListener.onSessionDown(thisInstance(), new IOException("End of input detected. Close the session."));
@Override
protected void sessionUp() {
- logger.debug("Session {} up", toString());
+ LOG.debug("Session {} up", toString());
sessionListener.onSessionUp(thisInstance());
this.up = true;
}
try {
exiParams = EXIParameters.fromXmlElement(XmlElement.fromDomDocument(startExiMessage.getDocument()));
} catch (final EXIOptionsException e) {
- logger.warn("Unable to parse EXI parameters from {} om session {}", XmlUtil.toString(startExiMessage.getDocument()), this, e);
+ LOG.warn("Unable to parse EXI parameters from {} om session {}", XmlUtil.toString(startExiMessage.getDocument()), this, e);
throw new IllegalArgumentException(e);
}
final NetconfEXICodec exiCodec = new NetconfEXICodec(exiParams.getOptions());
addExiHandlers(exiCodec);
- logger.debug("Session {} EXI handlers added to pipeline", this);
+ LOG.debug("Session {} EXI handlers added to pipeline", this);
}
protected abstract void addExiHandlers(NetconfEXICodec exiCodec);
import org.w3c.dom.NodeList;
public abstract class AbstractNetconfSessionNegotiator<P extends NetconfSessionPreferences, S extends AbstractNetconfSession<S, L>, L extends NetconfSessionListener<S>>
-extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
+ extends AbstractSessionNegotiator<NetconfHelloMessage, S> {
- private static final Logger logger = LoggerFactory.getLogger(AbstractNetconfSessionNegotiator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractNetconfSessionNegotiator.class);
public static final String NAME_OF_EXCEPTION_HANDLER = "lastExceptionHandler";
@Override
public void operationComplete(Future<? super Channel> future) {
Preconditions.checkState(future.isSuccess(), "Ssl handshake was not successful");
- logger.debug("Ssl handshake complete");
+ LOG.debug("Ssl handshake complete");
start();
}
});
private void start() {
final NetconfMessage helloMessage = this.sessionPreferences.getHelloMessage();
- logger.debug("Session negotiation started with hello message {} on channel {}", XmlUtil.toString(helloMessage.getDocument()), channel);
+ LOG.debug("Session negotiation started with hello message {} on channel {}", XmlUtil.toString(helloMessage.getDocument()), channel);
channel.pipeline().addLast(NAME_OF_EXCEPTION_HANDLER, new ExceptionHandlingInboundChannelHandler());
synchronized (this) {
if (state != State.ESTABLISHED) {
- logger.debug("Connection timeout after {}, session is in state {}", timeout, state);
+ LOG.debug("Connection timeout after {}, session is in state {}", timeout, state);
// Do not fail negotiation if promise is done or canceled
// It would result in setting result of the promise second time and that throws exception
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if(future.isSuccess()) {
- logger.debug("Channel {} closed: success", future.channel());
+ LOG.debug("Channel {} closed: success", future.channel());
} else {
- logger.warn("Channel {} closed: fail", future.channel());
+ LOG.warn("Channel {} closed: fail", future.channel());
}
}
});
protected abstract S getSession(L sessionListener, Channel channel, NetconfHelloMessage message) throws NetconfDocumentedException;
private synchronized void changeState(final State newState) {
- logger.debug("Changing state from : {} to : {} for channel: {}", state, newState, channel);
+ LOG.debug("Changing state from : {} to : {} for channel: {}", state, newState, channel);
Preconditions.checkState(isStateChangePermitted(state, newState), "Cannot change state from %s to %s for chanel %s", state,
newState, channel);
this.state = newState;
if (state == State.OPEN_WAIT && newState == State.FAILED) {
return true;
}
- logger.debug("Transition from {} to {} is not allowed", state, newState);
+ LOG.debug("Transition from {} to {} is not allowed", state, newState);
return false;
}
private final class ExceptionHandlingInboundChannelHandler extends ChannelInboundHandlerAdapter {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
- logger.warn("An exception occurred during negotiation with {}", channel.remoteAddress(), cause);
+ LOG.warn("An exception occurred during negotiation with {}", channel.remoteAddress(), cause);
cancelTimeout();
negotiationFailed(cause);
changeState(State.FAILED);
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.base.Preconditions;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
-
import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
import org.opendaylight.controller.netconf.util.messages.NetconfMessageHeader;
-import com.google.common.base.Preconditions;
-
public class ChunkedFramingMechanismEncoder extends MessageToByteEncoder<ByteBuf> {
public static final int DEFAULT_CHUNK_SIZE = 8192;
public static final int MIN_CHUNK_SIZE = 128;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
-
import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
public class EOMFramingMechanismEncoder extends MessageToByteEncoder<ByteBuf> {
package org.opendaylight.controller.netconf.nettyutil.handler;
+import io.netty.buffer.ByteBuf;
+import io.netty.handler.codec.MessageToByteEncoder;
import org.opendaylight.controller.netconf.util.messages.FramingMechanism;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import io.netty.buffer.ByteBuf;
-import io.netty.handler.codec.MessageToByteEncoder;
-
public final class FramingMechanismHandlerFactory {
- private static final Logger logger = LoggerFactory.getLogger(FramingMechanismHandlerFactory.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FramingMechanismHandlerFactory.class);
private FramingMechanismHandlerFactory() {
// not called - private constructor for utility class
}
public static MessageToByteEncoder<ByteBuf> createHandler(FramingMechanism framingMechanism) {
- logger.debug("{} framing mechanism was selected.", framingMechanism);
+ LOG.debug("{} framing mechanism was selected.", framingMechanism);
if (framingMechanism == FramingMechanism.EOM) {
return new EOMFramingMechanismEncoder();
} else {
package org.opendaylight.controller.netconf.nettyutil.handler;
-import java.util.List;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import io.netty.buffer.ByteBuf;
import io.netty.buffer.CompositeByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
+import java.util.List;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class NetconfChunkAggregator extends ByteToMessageDecoder {
- private final static Logger logger = LoggerFactory.getLogger(NetconfChunkAggregator.class);
+ private final static Logger LOG = LoggerFactory.getLogger(NetconfChunkAggregator.class);
private static final String GOT_PARAM_WHILE_WAITING_FOR_PARAM = "Got byte {} while waiting for {}";
private static final String GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM = "Got byte {} while waiting for {}-{}";
public static final int DEFAULT_MAXIMUM_CHUNK_SIZE = 16 * 1024 * 1024;
private void checkNewLine(byte b,String errorMessage){
if (b != '\n') {
- logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'\n');
+ LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'\n');
throw new IllegalStateException(errorMessage);
}
}
private void checkHash(byte b,String errorMessage){
if (b != '#') {
- logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'#');
+ LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM, b, (byte)'#');
throw new IllegalStateException(errorMessage);
}
}
private void checkChunkSize(){
if (chunkSize > maxChunkSize) {
- logger.debug("Parsed chunk size {}, maximum allowed is {}", chunkSize, maxChunkSize);
+ LOG.debug("Parsed chunk size {}, maximum allowed is {}", chunkSize, maxChunkSize);
throw new IllegalStateException("Maximum chunk size exceeded");
}
}
if (b < '0' || b > '9') {
- logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'0', (byte)'9');
+ LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'0', (byte)'9');
throw new IllegalStateException("Invalid chunk size encountered");
}
* comes through.
*/
if (in.readableBytes() < chunkSize) {
- logger.debug("Buffer has {} bytes, need {} to complete chunk", in.readableBytes(), chunkSize);
+ LOG.debug("Buffer has {} bytes, need {} to complete chunk", in.readableBytes(), chunkSize);
in.discardReadBytes();
return;
}
} else if (b == '#') {
state = State.FOOTER_FOUR;
} else {
- logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte) '#', (byte) '1', (byte) '9');
+ LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte) '#', (byte) '1', (byte) '9');
throw new IllegalStateException("Malformed chunk footer encountered (byte 2)");
}
}
private static int processHeaderLengthFirst(byte b) {
if (!isHeaderLengthFirst(b)) {
- logger.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'1', (byte)'9');
+ LOG.debug(GOT_PARAM_WHILE_WAITING_FOR_PARAM_PARAM, b, (byte)'1', (byte)'9');
throw new IllegalStateException("Invalid chunk size encountered (byte 0)");
}
*/
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.base.Preconditions;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+import io.netty.buffer.ByteBufUtil;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.handler.codec.ByteToMessageDecoder;
import java.io.InputStream;
import java.util.List;
-
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.sax.SAXTransformerFactory;
import javax.xml.transform.sax.TransformerHandler;
-
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.openexi.sax.EXIReader;
import org.slf4j.Logger;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
-import com.google.common.base.Preconditions;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.ByteBufInputStream;
-import io.netty.buffer.ByteBufUtil;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.handler.codec.ByteToMessageDecoder;
-
public final class NetconfEXIToMessageDecoder extends ByteToMessageDecoder {
private static final Logger LOG = LoggerFactory.getLogger(NetconfEXIToMessageDecoder.class);
final EXIReader r = codec.getReader();
final SAXTransformerFactory transformerFactory
- = (SAXTransformerFactory) TransformerFactory.newInstance();
+ = (SAXTransformerFactory) TransformerFactory.newInstance();
final TransformerHandler handler = transformerFactory.newTransformerHandler();
r.setContentHandler(handler);
*/
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Charsets;
+import com.google.common.base.Optional;
+import com.google.common.base.Preconditions;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
-
import java.io.IOException;
-
import javax.xml.transform.TransformerException;
-
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessage;
import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Charsets;
-import com.google.common.base.Optional;
-import com.google.common.base.Preconditions;
-
/**
* Customized NetconfMessageToXMLEncoder that serializes additional header with
* session metadata along with
*/
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Optional;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufOutputStream;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
-
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
-
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
-
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Comment;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Optional;
-
public class NetconfMessageToXMLEncoder extends MessageToByteEncoder<NetconfMessage> {
private static final Logger LOG = LoggerFactory.getLogger(NetconfMessageToXMLEncoder.class);
private static final TransformerFactory FACTORY = TransformerFactory.newInstance();
*/
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
-
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
-
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Charsets;
-import com.google.common.collect.ImmutableList;
import org.xml.sax.SAXException;
/**
*/
package org.opendaylight.controller.netconf.nettyutil.handler.exi;
+import com.google.common.base.Preconditions;
import org.opendaylight.controller.netconf.util.xml.XmlElement;
import org.openexi.proc.common.AlignmentType;
import org.openexi.proc.common.EXIOptions;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
-import com.google.common.base.Preconditions;
-
public final class EXIParameters {
private static final String EXI_PARAMETER_ALIGNMENT = "alignment";
static final String EXI_PARAMETER_BYTE_ALIGNED = "byte-aligned";
package org.opendaylight.controller.netconf.nettyutil.handler.ssh.client;
+import com.google.common.base.Preconditions;
+import io.netty.buffer.ByteBuf;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelOutboundHandlerAdapter;
+import io.netty.channel.ChannelPromise;
import java.io.IOException;
import java.net.SocketAddress;
-
import org.apache.sshd.ClientChannel;
import org.apache.sshd.ClientSession;
import org.apache.sshd.SshClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Preconditions;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelOutboundHandlerAdapter;
-import io.netty.channel.ChannelPromise;
-
/**
* Netty SSH handler class. Acts as interface between Netty and SSH library.
*/
public class AsyncSshHandler extends ChannelOutboundHandlerAdapter {
- private static final Logger logger = LoggerFactory.getLogger(AsyncSshHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncSshHandler.class);
public static final String SUBSYSTEM = "netconf";
public static final SshClient DEFAULT_CLIENT = SshClient.setUpDefaultClient();
}
private void startSsh(final ChannelHandlerContext ctx, final SocketAddress address) {
- logger.debug("Starting SSH to {} on channel: {}", address, ctx.channel());
+ LOG.debug("Starting SSH to {} on channel: {}", address, ctx.channel());
final ConnectFuture sshConnectionFuture = sshClient.connect(authenticationHandler.getUsername(), address);
sshConnectionFuture.addListener(new SshFutureListener<ConnectFuture>() {
private synchronized void handleSshSessionCreated(final ConnectFuture future, final ChannelHandlerContext ctx) {
try {
- logger.trace("SSH session created on channel: {}", ctx.channel());
+ LOG.trace("SSH session created on channel: {}", ctx.channel());
session = future.getSession();
final AuthFuture authenticateFuture = authenticationHandler.authenticate(session);
private synchronized void handleSshAuthenticated(final ClientSession session, final ChannelHandlerContext ctx) {
try {
- logger.debug("SSH session authenticated on channel: {}, server version: {}", ctx.channel(), session.getServerVersion());
+ LOG.debug("SSH session authenticated on channel: {}, server version: {}", ctx.channel(), session.getServerVersion());
channel = session.createSubsystemChannel(SUBSYSTEM);
channel.setStreaming(ClientChannel.Streaming.Async);
}
private synchronized void handleSshChanelOpened(final ChannelHandlerContext ctx) {
- logger.trace("SSH subsystem channel opened successfully on channel: {}", ctx.channel());
+ LOG.trace("SSH subsystem channel opened successfully on channel: {}", ctx.channel());
connectPromise.setSuccess();
connectPromise = null;
}
private synchronized void handleSshSetupFailure(final ChannelHandlerContext ctx, final Throwable e) {
- logger.warn("Unable to setup SSH connection on channel: {}", ctx.channel(), e);
+ LOG.warn("Unable to setup SSH connection on channel: {}", ctx.channel(), e);
connectPromise.setFailure(e);
connectPromise = null;
throw new IllegalStateException("Unable to setup SSH connection on channel: " + ctx.channel(), e);
channel = null;
promise.setSuccess();
- logger.debug("SSH session closed on channel: {}", ctx.channel());
+ LOG.debug("SSH session closed on channel: {}", ctx.channel());
ctx.fireChannelInactive();
}
*/
public final class AsyncSshHandlerReader implements SshFutureListener<IoReadFuture>, AutoCloseable {
- private static final Logger logger = LoggerFactory.getLogger(AsyncSshHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AsyncSshHandlerReader.class);
private static final int BUFFER_SIZE = 8192;
if(future.getException() != null) {
if(asyncOut.isClosed() || asyncOut.isClosing()) {
// Ssh dropped
- logger.debug("Ssh session dropped on channel: {}", channelId, future.getException());
+ LOG.debug("Ssh session dropped on channel: {}", channelId, future.getException());
} else {
- logger.warn("Exception while reading from SSH remote on channel {}", channelId, future.getException());
+ LOG.warn("Exception while reading from SSH remote on channel {}", channelId, future.getException());
}
invokeDisconnect();
return;
if (future.getRead() > 0) {
final ByteBuf msg = Unpooled.wrappedBuffer(buf.array(), 0, future.getRead());
- if(logger.isTraceEnabled()) {
- logger.trace("Reading message on channel: {}, message: {}", channelId, AsyncSshHandlerWriter.byteBufToString(msg));
+ if(LOG.isTraceEnabled()) {
+ LOG.trace("Reading message on channel: {}, message: {}", channelId, AsyncSshHandlerWriter.byteBufToString(msg));
}
readHandler.onMessageRead(msg);
*/
public final class AsyncSshHandlerWriter implements AutoCloseable {
- private static final Logger logger = LoggerFactory
+ private static final Logger LOG = LoggerFactory
.getLogger(AsyncSshHandlerWriter.class);
// public static final int MAX_PENDING_WRITES = 1000;
private void writeWithPendingDetection(final ChannelHandlerContext ctx, final ChannelPromise promise, final ByteBuf byteBufMsg) {
try {
- if (logger.isTraceEnabled()) {
- logger.trace("Writing request on channel: {}, message: {}", ctx.channel(), byteBufToString(byteBufMsg));
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Writing request on channel: {}, message: {}", ctx.channel(), byteBufToString(byteBufMsg));
}
asyncIn.write(toBuffer(byteBufMsg)).addListener(new SshFutureListener<IoWriteFuture>() {
- @Override
- public void operationComplete(final IoWriteFuture future) {
- if (logger.isTraceEnabled()) {
- logger.trace("Ssh write request finished on channel: {} with result: {}: and ex:{}, message: {}",
+ @Override
+ public void operationComplete(final IoWriteFuture future) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Ssh write request finished on channel: {} with result: {}: and ex:{}, message: {}",
ctx.channel(), future.isWritten(), future.getException(), byteBufToString(byteBufMsg));
- }
-
- // Notify success or failure
- if (future.isWritten()) {
- promise.setSuccess();
- } else {
- logger.warn("Ssh write request failed on channel: {} for message: {}", ctx.channel(), byteBufToString(byteBufMsg), future.getException());
- promise.setFailure(future.getException());
- }
-
- // Not needed anymore, release
- byteBufMsg.release();
-
- // Check pending queue and schedule next
- // At this time we are guaranteed that we are not in pending state anymore so the next request should succeed
- writePendingIfAny();
- }
- });
+ }
+
+ // Notify success or failure
+ if (future.isWritten()) {
+ promise.setSuccess();
+ } else {
+ LOG.warn("Ssh write request failed on channel: {} for message: {}", ctx.channel(), byteBufToString(byteBufMsg), future.getException());
+ promise.setFailure(future.getException());
+ }
+
+ // Not needed anymore, release
+ byteBufMsg.release();
+
+ // Check pending queue and schedule next
+ // At this time we are guaranteed that we are not in pending state anymore so the next request should succeed
+ writePendingIfAny();
+ }
+ });
} catch (final WritePendingException e) {
queueRequest(ctx, byteBufMsg, promise);
}
// In case of pending, reschedule next message from queue
final PendingWriteRequest pendingWrite = pending.poll();
final ByteBuf msg = pendingWrite.msg;
- if (logger.isTraceEnabled()) {
- logger.trace("Writing pending request on channel: {}, message: {}", pendingWrite.ctx.channel(), byteBufToString(msg));
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Writing pending request on channel: {}, message: {}", pendingWrite.ctx.channel(), byteBufToString(msg));
}
writeWithPendingDetection(pendingWrite.ctx, pendingWrite.promise, msg);
private void queueRequest(final ChannelHandlerContext ctx, final ByteBuf msg, final ChannelPromise promise) {
// try {
- logger.debug("Write pending on channel: {}, queueing, current queue size: {}", ctx.channel(), pending.size());
- if (logger.isTraceEnabled()) {
- logger.trace("Queueing request due to pending: {}", byteBufToString(msg));
+ LOG.debug("Write pending on channel: {}, queueing, current queue size: {}", ctx.channel(), pending.size());
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Queueing request due to pending: {}", byteBufToString(msg));
}
new PendingWriteRequest(ctx, msg, promise).pend(pending);
// } catch (final Exception ex) {
-// logger.warn("Unable to queue write request on channel: {}. Setting fail for the request: {}", ctx.channel(), ex, byteBufToString(msg));
+// LOG.warn("Unable to queue write request on channel: {}. Setting fail for the request: {}", ctx.channel(), ex, byteBufToString(msg));
// msg.release();
// promise.setFailure(ex);
// }
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+
import com.google.common.collect.Lists;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
fullOptions.setPreservePIs(true);
return Arrays.asList(new Object[][]{
- {noChangeXml, new EXIOptions()},
- {fullOptionsXml, fullOptions},
+ {noChangeXml, new EXIOptions()},
+ {fullOptionsXml, fullOptions},
});
}
fullOptions.setPreservePIs(true);
return Arrays.asList(new Object[][]{
- {noChangeXml, new EXIOptions()},
- {fullOptionsXml, fullOptions},
+ {noChangeXml, new EXIOptions()},
+ {fullOptionsXml, fullOptions},
});
}
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
+
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
*/
public class RemoteNetconfCommand implements AsyncCommand, SessionAware {
- private static final Logger logger = LoggerFactory.getLogger(RemoteNetconfCommand.class);
+ private static final Logger LOG = LoggerFactory.getLogger(RemoteNetconfCommand.class);
private final EventLoopGroup clientEventGroup;
private final LocalAddress localAddress;
@Override
public void start(final Environment env) throws IOException {
- logger.trace("Establishing internal connection to netconf server for client: {}", getClientAddress());
+ LOG.trace("Establishing internal connection to netconf server for client: {}", getClientAddress());
final Bootstrap clientBootstrap = new Bootstrap();
clientBootstrap.group(clientEventGroup).channel(LocalChannel.class);
if(future.isSuccess()) {
clientChannel = clientChannelFuture.channel();
} else {
- logger.warn("Unable to establish internal connection to netconf server for client: {}", getClientAddress());
+ LOG.warn("Unable to establish internal connection to netconf server for client: {}", getClientAddress());
Preconditions.checkNotNull(callback, "Exit callback must be set");
callback.onExit(1, "Unable to establish internal connection to netconf server for client: "+ getClientAddress());
}
@Override
public void destroy() {
- logger.trace("Releasing internal connection to netconf server for client: {} on channel: {}",
+ LOG.trace("Releasing internal connection to netconf server for client: {} on channel: {}",
getClientAddress(), clientChannel);
clientChannelFuture.cancel(true);
@Override
public void operationComplete(final ChannelFuture future) throws Exception {
if (future.isSuccess() == false) {
- logger.warn("Unable to release internal connection to netconf server on channel: {}", clientChannel);
+ LOG.warn("Unable to release internal connection to netconf server on channel: {}", clientChannel);
}
}
});
*/
final class SshProxyClientHandler extends ChannelInboundHandlerAdapter {
- private static final Logger logger = LoggerFactory.getLogger(SshProxyClientHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SshProxyClientHandler.class);
private final IoInputStream in;
private final IoOutputStream out;
}, new AsyncSshHandlerReader.ReadMsgHandler() {
@Override
public void onMessageRead(final ByteBuf msg) {
- if(logger.isTraceEnabled()) {
- logger.trace("Forwarding message for client: {} on channel: {}, message: {}",
+ if(LOG.isTraceEnabled()) {
+ LOG.trace("Forwarding message for client: {} on channel: {}, message: {}",
netconfHelloMessageAdditionalHeader.getAddress(), ctx.channel(), AsyncSshHandlerWriter.byteBufToString(msg));
}
// Just forward to delegate
@Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {
- asyncSshHandlerWriter.write(ctx, msg, ctx.newPromise());
+ asyncSshHandlerWriter.write(ctx, msg, ctx.newPromise());
}
@Override
public void channelInactive(final ChannelHandlerContext ctx) throws Exception {
- logger.debug("Internal connection to netconf server was dropped for client: {} on channel: ",
+ LOG.debug("Internal connection to netconf server was dropped for client: {} on channel: ",
netconfHelloMessageAdditionalHeader.getAddress(), ctx.channel());
callback.onExit(1, "Internal connection to netconf server was dropped for client: " +
netconfHelloMessageAdditionalHeader.getAddress() + " on channel: " + ctx.channel());
private static Map<String, String> getProperties(final SshProxyServerConfiguration sshProxyServerConfiguration) {
return new HashMap<String, String>()
- {{
- put(ServerFactoryManager.IDLE_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
- // TODO make auth timeout configurable on its own
- put(ServerFactoryManager.AUTH_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
- }};
+ {
+ {
+ put(ServerFactoryManager.IDLE_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
+ // TODO make auth timeout configurable on its own
+ put(ServerFactoryManager.AUTH_TIMEOUT, String.valueOf(sshProxyServerConfiguration.getIdleTimeout()));
+ }
+ };
}
@Override
import org.slf4j.LoggerFactory;
final class AuthProviderTracker implements ServiceTrackerCustomizer<AuthProvider, AuthProvider>, PasswordAuthenticator {
- private static final Logger logger = LoggerFactory.getLogger(AuthProviderTracker.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AuthProviderTracker.class);
private final BundleContext bundleContext;
@Override
public AuthProvider addingService(final ServiceReference<AuthProvider> reference) {
- logger.trace("Service {} added", reference);
+ LOG.trace("Service {} added", reference);
final AuthProvider authService = bundleContext.getService(reference);
final Integer newServicePreference = getPreference(reference);
if(isBetter(newServicePreference)) {
final AuthProvider authService = bundleContext.getService(reference);
final Integer newServicePreference = getPreference(reference);
if(isBetter(newServicePreference)) {
- logger.trace("Replacing modified service {} in netconf SSH.", reference);
+ LOG.trace("Replacing modified service {} in netconf SSH.", reference);
this.authProvider = authService;
}
}
@Override
public void removedService(final ServiceReference<AuthProvider> reference, final AuthProvider service) {
- logger.trace("Removing service {} from netconf SSH. " +
- "SSH won't authenticate users until AuthProvider service will be started.", reference);
+ LOG.trace("Removing service {} from netconf SSH. {}", reference,
+ " SSH won't authenticate users until AuthProvider service will be started.");
maxPreference = null;
this.authProvider = null;
}
import org.slf4j.LoggerFactory;
public class NetconfSSHActivator implements BundleActivator {
- private static final Logger logger = LoggerFactory.getLogger(NetconfSSHActivator.class);
+ private static final Logger LOG = LoggerFactory.getLogger(NetconfSSHActivator.class);
private static final java.lang.String ALGORITHM = "RSA";
private static final int KEY_SIZE = 4096;
final Optional<InetSocketAddress> maybeSshSocketAddress = NetconfConfigUtil.extractNetconfServerAddress(bundleContext, InfixProp.ssh);
if (maybeSshSocketAddress.isPresent() == false) {
- logger.trace("SSH bridge not configured");
+ LOG.trace("SSH bridge not configured");
return null;
}
final InetSocketAddress sshSocketAddress = maybeSshSocketAddress.get();
- logger.trace("Starting netconf SSH bridge at {}", sshSocketAddress);
+ LOG.trace("Starting netconf SSH bridge at {}", sshSocketAddress);
final LocalAddress localAddress = NetconfConfigUtil.getNetconfLocalAddress();
* the server.
*/
public class EchoClient extends Thread {
- private static final Logger logger = LoggerFactory.getLogger(EchoClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EchoClient.class);
private final ChannelInitializer<LocalChannel> channelInitializer;
// Wait until the connection is closed.
f.channel().closeFuture().sync();
} catch (Exception e) {
- logger.error("Error in client", e);
+ LOG.error("Error in client", e);
throw new RuntimeException("Error in client", e);
} finally {
// Shut down the event loop to terminate all threads.
- logger.info("Client is shutting down");
+ LOG.info("Client is shutting down");
group.shutdownGracefully();
}
}
* the server.
*/
public class EchoClientHandler extends ChannelInboundHandlerAdapter implements ChannelFutureListener {
- private static final Logger logger = LoggerFactory.getLogger(EchoClientHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EchoClientHandler.class);
private ChannelHandlerContext ctx;
private final StringBuilder fromServer = new StringBuilder();
@Override
public synchronized void channelActive(ChannelHandlerContext ctx) {
checkState(this.ctx == null);
- logger.info("channelActive");
+ LOG.info("channelActive");
this.ctx = ctx;
state = State.CONNECTED;
}
ByteBuf bb = (ByteBuf) msg;
String string = bb.toString(Charsets.UTF_8);
fromServer.append(string);
- logger.info(">{}", string);
+ LOG.info(">{}", string);
bb.release();
}
@Override
public synchronized void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
- logger.warn("Unexpected exception from downstream.", cause);
+ LOG.warn("Unexpected exception from downstream.", cause);
checkState(this.ctx.equals(ctx));
ctx.close();
this.ctx = null;
public synchronized void operationComplete(ChannelFuture future) throws Exception {
checkState(state == State.CONNECTING);
if (future.isSuccess()) {
- logger.trace("Successfully connected, state will be switched in channelActive");
+ LOG.trace("Successfully connected, state will be switched in channelActive");
} else {
state = State.FAILED_TO_CONNECT;
}
* Echoes back any received data from a client.
*/
public class EchoServer implements Runnable {
- private static final Logger logger = LoggerFactory.getLogger(EchoServer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(EchoServer.class);
public void run() {
// Configure the server.
if (message == null || "exit".equalsIgnoreCase(message)) {
break;
}
- logger.debug("Got '{}'", message);
+ LOG.debug("Got '{}'", message);
clientHandler.write(message);
} while (true);
System.exit(0);
@Sharable
public class EchoServerHandler extends ChannelInboundHandlerAdapter {
- private static final Logger logger = LoggerFactory.getLogger(EchoServerHandler.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(EchoServerHandler.class);
private String fromLastNewLine = "";
private final Splitter splitter = Splitter.onPattern("\r?\n");
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
- logger.debug("sleep start");
+ LOG.debug("sleep start");
Thread.sleep(1000);
- logger.debug("sleep done");
+ LOG.debug("sleep done");
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ByteBuf byteBuf = (ByteBuf) msg;
String message = byteBuf.toString(Charsets.UTF_8);
- logger.info("writing back '{}'", message);
+ LOG.info("writing back '{}'", message);
ctx.write(msg);
fromLastNewLine += message;
for (String line : splitter.split(fromLastNewLine)) {
if ("quit".equals(line)) {
- logger.info("closing server ctx");
+ LOG.info("closing server ctx");
ctx.flush();
ctx.close();
break;
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
- logger.debug("flushing");
+ LOG.debug("flushing");
ctx.flush();
}
}
import org.slf4j.LoggerFactory;
public class ProxyServerHandler extends ChannelInboundHandlerAdapter {
- private static final Logger logger = LoggerFactory.getLogger(ProxyServerHandler.class.getName());
+ private static final Logger LOG = LoggerFactory.getLogger(ProxyServerHandler.class);
private final Bootstrap clientBootstrap;
private final LocalAddress localAddress;
@Override
public void channelInactive(ChannelHandlerContext ctx) {
- logger.info("channelInactive - closing client connection");
+ LOG.info("channelInactive - closing client connection");
clientChannel.close();
}
@Override
public void channelRead(ChannelHandlerContext ctx, final Object msg) {
- logger.debug("Writing to client {}", msg);
+ LOG.debug("Writing to client {}", msg);
clientChannel.write(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
- logger.debug("flushing");
+ LOG.debug("flushing");
clientChannel.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
- logger.warn("Unexpected exception from downstream.", cause);
+ LOG.warn("Unexpected exception from downstream.", cause);
ctx.close();
}
}
class ProxyClientHandler extends ChannelInboundHandlerAdapter {
- private static final Logger logger = LoggerFactory.getLogger(ProxyClientHandler.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ProxyClientHandler.class);
private final ChannelHandlerContext remoteCtx;
@Override
public void channelActive(ChannelHandlerContext ctx) {
- logger.info("client active");
+ LOG.info("client active");
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
ByteBuf bb = (ByteBuf) msg;
- logger.info(">{}", bb.toString(Charsets.UTF_8));
+ LOG.info(">{}", bb.toString(Charsets.UTF_8));
remoteCtx.write(msg);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
- logger.debug("Flushing server ctx");
+ LOG.debug("Flushing server ctx");
remoteCtx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
- logger.warn("Unexpected exception from downstream", cause);
+ LOG.warn("Unexpected exception from downstream", cause);
ctx.close();
}
// called both when local or remote connection dies
@Override
public void channelInactive(ChannelHandlerContext ctx) {
- logger.debug("channelInactive() called, closing remote client ctx");
+ LOG.debug("channelInactive() called, closing remote client ctx");
remoteCtx.close();
}
}
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.util.HashedWheelTimer;
+import java.io.File;
import java.net.InetSocketAddress;
import java.nio.file.Files;
import java.util.concurrent.ExecutorService;
import org.slf4j.LoggerFactory;
public class SSHTest {
- public static final Logger logger = LoggerFactory.getLogger(SSHTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SSHTest.class);
public static final String AHOJ = "ahoj\n";
private static EventLoopGroup nettyGroup;
@Test
public void test() throws Exception {
+ File sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+ sshKeyPair.deleteOnExit();
new Thread(new EchoServer(), "EchoServer").start();
final InetSocketAddress addr = new InetSocketAddress("127.0.0.1", 10831);
public boolean authenticate(final String username, final String password, final ServerSession session) {
return true;
}
- }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
+ }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
final EchoClientHandler echoClientHandler = connectClient(addr);
Thread.sleep(500);
}
assertTrue(echoClientHandler.isConnected());
- logger.info("connected, writing to client");
+ LOG.info("connected, writing to client");
echoClientHandler.write(AHOJ);
// check that server sent back the same string
final String read = echoClientHandler.read();
assertTrue(read + " should end with " + AHOJ, read.endsWith(AHOJ));
} finally {
- logger.info("Closing socket");
+ LOG.info("Closing socket");
sshProxyServer.close();
}
}
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
+import java.io.File;
import java.net.InetSocketAddress;
import java.nio.file.Files;
import java.util.concurrent.ExecutorService;
private static final String PASSWORD = "netconf";
private static final String HOST = "127.0.0.1";
private static final int PORT = 1830;
- private static final Logger logger = LoggerFactory.getLogger(SSHServerTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(SSHServerTest.class);
+ private File sshKeyPair;
private SshProxyServer server;
@Mock
@Before
public void setUp() throws Exception {
+ sshKeyPair = Files.createTempFile("sshKeyPair", ".pem").toFile();
+ sshKeyPair.deleteOnExit();
+
MockitoAnnotations.initMocks(this);
doReturn(null).when(mockedContext).createFilter(anyString());
doNothing().when(mockedContext).addServiceListener(any(ServiceListener.class), anyString());
doReturn(new ServiceReference[0]).when(mockedContext).getServiceReferences(anyString(), anyString());
- logger.info("Creating SSH server");
+ LOG.info("Creating SSH server");
final InetSocketAddress addr = InetSocketAddress.createUnresolved(HOST, PORT);
server = new SshProxyServer(minaTimerEx, clientGroup, nioExec);
public boolean authenticate(final String username, final String password, final ServerSession session) {
return true;
}
- }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(Files.createTempFile("prefix", "suffix").toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
- logger.info("SSH server started on " + PORT);
+ }).setKeyPairProvider(new PEMGeneratorHostKeyProvider(sshKeyPair.toPath().toAbsolutePath().toString())).setIdleTimeout(Integer.MAX_VALUE).createSshProxyServerConfiguration());
+ LOG.info("SSH server started on {}", PORT);
}
@Test