.idea
xtend-gen
classes
+out/
void validateBean(ObjectName configBeanON) throws ValidationException;
- void destroyConfigBean(String moduleName, String instanceName)
- throws InstanceNotFoundException;
+ @Deprecated
+ /**
+ * Use {@link #destroyModule(String, String)}
+ */
+ void destroyConfigBean(String moduleName, String instanceName) throws InstanceNotFoundException;
+
+ void destroyModule(String moduleName, String instanceName) throws InstanceNotFoundException;
void setAttribute(ObjectName on, String jmxName, Attribute attribute);
}
}
@Override
+ @Deprecated
+ /**
+ * {@inheritDoc}
+ */
public void destroyConfigBean(String moduleName, String instanceName)
throws InstanceNotFoundException {
destroyModule(ObjectNameUtil.createTransactionModuleON(
getTransactionName(), moduleName, instanceName));
}
+ @Override
+ public void destroyModule(String moduleName, String instanceName)
+ throws InstanceNotFoundException {
+ destroyModule(ObjectNameUtil.createTransactionModuleON(
+ getTransactionName(), moduleName, instanceName));
+ }
+
@Override
public void abortConfig() {
configTransactionControllerMXBeanProxy.abortConfig();
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
-import org.opendaylight.controller.config.api.ConflictingVersionException;
-import org.opendaylight.controller.config.api.ValidationException;
import org.opendaylight.controller.config.api.jmx.CommitStatus;
import org.opendaylight.controller.config.api.jmx.ObjectNameUtil;
import org.opendaylight.controller.config.manager.impl.AbstractConfigTest;
import javax.management.InstanceAlreadyExistsException;
import javax.management.ObjectName;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
public class NetconfTestImplModuleTest extends AbstractConfigTest {
public static final String TESTING_DEP_PREFIX = "testing-dep";
}
@Test
- public void testDependencyList() throws InstanceAlreadyExistsException, ValidationException,
- ConflictingVersionException {
+ public void testDependencyList() throws Exception {
ConfigTransactionJMXClient transaction = configRegistryClient.createTransaction();
ObjectName on = createInstance(transaction, instanceName, 4);
transaction.validateConfig();
- CommitStatus status = transaction.commit();
+ CommitStatus status1 = transaction.commit();
assertBeanCount(1, factory.getImplementationName());
assertBeanCount(4 + 1, DepTestImplModuleFactory.NAME);
- assertStatus(status, 1 + 4 + 1, 0, 0);
+ assertStatus(status1, 1 + 4 + 1, 0, 0);
transaction = configRegistryClient.createTransaction();
assertTestingDeps(testingDeps, 4);
transaction.abortConfig();
+
+ // check that reuse logic works - equals on list of dependencies.
+ transaction = configRegistryClient.createTransaction();
+ CommitStatus status2 = transaction.commit();
+ assertStatus(status2, 0, 0, 6);
+
+ // replace single dependency
+ transaction = configRegistryClient.createTransaction();
+ String instanceName1 = TESTING_DEP_PREFIX + 1;
+ transaction.destroyModule(DepTestImplModuleFactory.NAME, instanceName1);
+ transaction.createModule(DepTestImplModuleFactory.NAME, instanceName1);
+ CommitStatus status3 = transaction.commit();
+ assertStatus(status3, 1, 1, 4);
+
+ }
+
+ @Test
+ public void testNullCheckInListOfDependencies() throws Exception {
+ ConfigTransactionJMXClient transaction = configRegistryClient.createTransaction();
+
+ ObjectName on = createInstance(transaction, instanceName, 4);
+ NetconfTestImplModuleMXBean proxy = transaction.newMXBeanProxy(on, NetconfTestImplModuleMXBean.class);
+ try{
+ proxy.setTestingDeps(null);
+ fail();
+ }catch(RuntimeException e) {
+ Throwable cause = e.getCause();
+ assertNotNull(cause);
+ assertTrue("Invalid type " + cause, cause instanceof IllegalArgumentException);
+ assertEquals("Null not supported", cause.getMessage());
+ }
+ proxy.setTestingDeps(Collections.<ObjectName>emptyList());
}
private void assertTestingDeps(List<ObjectName> testingDeps, int i) {
CLASSPATH=${CLASSPATH}:${basedir}/lib/org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar
FWCLASSPATH=${FWCLASSPATH},file:${basedir}/lib/org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar
+cd $basedir
+
if [ "${stopdaemon}" -eq 1 ]; then
if [ -e "${pidfile}" ]; then
daemonpid=`cat "${pidfile}"`
}
}
+ /**
+ * Function called by the dependency manager before Container is Stopped and Destroyed.
+ */
+ public void containerStop() {
+ uninstallAllFlowEntries(false);
+ }
+
/**
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
*/
void stop() {
stopping = true;
- uninstallAllFlowEntries(false);
// Shutdown executor
this.executor.shutdownNow();
// Now walk all the workMonitor and wake up the one sleeping because
prefix type;
import opendaylight-inventory {prefix inv; revision-date "2013-08-19";}
+ import ietf-inet-types {prefix inet; revision-date "2010-09-24";}
import yang-ext {prefix ext; revision-date "2013-07-09";}
revision "2013-11-03" {
typedef transaction-id {
type uint64;
}
+ // This refers to MD-SAL transaction reference.
+ grouping transaction-metadata {
+ leaf transaction-uri {
+ type inet:uri;
+ }
+ }
grouping transaction-aware {
leaf transaction-id {
rpc add-flow {
input {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
- uses node-flow;
- uses tr:transaction-aware;
+ uses node-flow;
}
output {
uses tr:transaction-aware;
rpc remove-flow {
input {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
- uses node-flow;
- uses tr:transaction-aware;
+ uses node-flow;
}
output {
uses tr:transaction-aware;
rpc update-flow {
input {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
- uses flow-update;
- uses tr:transaction-aware;
+ uses flow-update;
}
output {
uses tr:transaction-aware;
}
notification flow-added {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
}
notification flow-updated {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
uses node-flow;
- uses tr:transaction-aware;
-
+ uses tr:transaction-aware;
}
notification flow-removed {
+ uses tr:transaction-metadata;
leaf flow-ref {
type types:flow-ref;
}
notification node-error-notification {
uses error:error-message;
uses tr:transaction-aware;
+ uses tr:transaction-metadata;
}
notification node-experimenter-error-notification {
rpc add-group {
input {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
- uses node-group;
- uses tr:transaction-aware;
+ uses node-group;
}
output {
uses tr:transaction-aware;
rpc remove-group {
input {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
- uses node-group;
- uses tr:transaction-aware;
+ uses node-group;
}
output {
uses tr:transaction-aware;
rpc update-group {
input {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
- uses group-update;
- uses tr:transaction-aware;
+ uses group-update;
}
output {
uses tr:transaction-aware;
}
notification group-added {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
}
notification group-updated {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
}
notification group-removed {
+ uses tr:transaction-metadata;
leaf group-ref {
type group-type:group-ref;
}
rpc add-meter {
input {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
- uses node-meter;
- uses tr:transaction-aware;
+ uses node-meter;
}
output {
uses tr:transaction-aware;
rpc remove-meter {
input {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
- uses node-meter;
- uses tr:transaction-aware;
+ uses node-meter;
}
output {
uses tr:transaction-aware;
rpc update-meter {
input {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
- uses meter-update;
- uses tr:transaction-aware;
+ uses meter-update;
}
output {
uses tr:transaction-aware;
}
notification meter-added {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
}
notification meter-updated {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
}
notification meter-removed {
+ uses tr:transaction-metadata;
leaf meter-ref {
type meter-type:meter-ref;
}
--- /dev/null
+module netconf-node-inventory {
+ namespace "urn:opendaylight:netconf-node-inventory";
+ prefix "netinv";
+
+ import opendaylight-inventory { prefix inv; revision-date "2013-08-19";}
+ import yang-ext {prefix ext; revision-date "2013-07-09";}
+
+ revision "2014-01-08" {
+ description "Initial revision of Inventory model";
+ }
+
+ grouping netconf-node-fields {
+ leaf-list initial-capability {
+ type string;
+ }
+
+ leaf-list current-capability {
+ type string;
+ }
+ }
+
+ augment /inv:nodes/inv:node {
+ ext:augment-identifier "netconf-node";
+
+ uses netconf-node-fields;
+ }
+}
\ No newline at end of file
package org.opendaylight.controller.sal.binding.dom.serializer.api;
import org.opendaylight.yangtools.yang.binding.Augmentation;
+import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.Node;
@Override
public ValueWithQName<A> deserialize(Node<?> input);
+
+ public QName getAugmentationQName();
}
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.controller.sal.binding.dom.serializer.api.IdentifierCodec;
+import org.opendaylight.yangtools.yang.binding.DataObject;
public interface CodecRegistry {
void bindingClassEncountered(Class<?> cls);
void putPathToClass(List<QName> names, Class<?> cls);
+
+ public abstract QName getQNameForAugmentation(Class<?> cls);
}
import com.google.common.collect.ImmutableList
import org.opendaylight.yangtools.yang.binding.Augmentation
import java.util.concurrent.ConcurrentHashMap
+import org.opendaylight.yangtools.yang.binding.util.BindingReflections
class InstanceIdentifierCodecImpl implements InstanceIdentifierCodec {
private static val LOG = LoggerFactory.getLogger(InstanceIdentifierCodecImpl);
val CodecRegistry codecRegistry;
- val Map<Class<?>,QName> classToQName = new WeakHashMap;
val Map<Class<?>, Map<List<QName>, Class<?>>> classToPreviousAugment = new WeakHashMap;
public new(CodecRegistry registry) {
previousAugmentation = null;
} else {
- previousQName = resolveQname(baArg.type);
+ previousQName = codecRegistry.getQNameForAugmentation(baArg.type as Class);
previousAugmentation = baArg.type;
}
}
private def dispatch PathArgument serializePathArgument(Item argument, QName previousQname) {
val type = argument.type;
- val qname = resolveQname(type);
+ val qname = BindingReflections.findQName(type);
if(previousQname == null) {
return new NodeIdentifier(qname);
}
val Map<QName,Object> predicates = new HashMap();
val type = argument.type;
val keyCodec = codecRegistry.getIdentifierCodecForIdentifiable(type);
- val qname = resolveQname(type);
+ val qname = BindingReflections.findQName(type);
val combinedInput = new ValueWithQName(previousQname,argument.key)
val compositeOutput = keyCodec.serialize(combinedInput as ValueWithQName);
for(outputValue :compositeOutput.value) {
}
return new NodeIdentifierWithPredicates(QName.create(previousQname,qname.localName),predicates);
}
-
- def resolveQname(Class<?> class1) {
- val qname = classToQName.get(class1);
- if(qname !== null) {
- return qname;
- }
- val qnameField = class1.getField("QNAME");
- val qnameValue = qnameField.get(null) as QName;
- classToQName.put(class1,qnameValue);
- return qnameValue;
- }
}
\ No newline at end of file
import org.opendaylight.yangtools.yang.binding.DataContainer;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.Identifier;
+import org.opendaylight.yangtools.yang.binding.util.BindingReflections;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
import org.opendaylight.yangtools.yang.data.api.Node;
}
return codec;
}
+
+ @Override
+ public QName getQNameForAugmentation(Class<?> cls) {
+ checkArgument(Augmentation.class.isAssignableFrom(cls));
+ return getCodecForAugmentation((Class<? extends Augmentation>)cls).getAugmentationQName();
+ }
private static Class<? extends Augmentable<?>> getAugmentableArgumentFrom(
final Class<? extends Augmentation<?>> augmentation) {
Delegator<BindingCodec> {
private BindingCodec delegate;
+ private QName augmentationQName;
public AugmentationCodecWrapper(BindingCodec<Map<QName, Object>, Object> rawCodec) {
this.delegate = rawCodec;
+ this.augmentationQName = BindingReflections.findQName(rawCodec.getClass());
}
@Override
Object rawCodecValue = getDelegate().deserialize((Map<QName, Object>) input);
return new ValueWithQName<T>(input.getNodeType(), (T) rawCodecValue);
}
+
+ @Override
+ public QName getAugmentationQName() {
+ return augmentationQName;
+ }
}
private class IdentityCompositeCodec implements IdentitityCodec {
// TODO Auto-generated constructor stub
}
- @Override
public YangNode getParent() {
// TODO Auto-generated method stub
return null;
return null;
}
- @Override
public YangNode getParent() {
// TODO Auto-generated method stub
return null;
import javax.ws.rs.WebApplicationException;
import org.junit.BeforeClass;
+import org.junit.Ignore;
import org.junit.Test;
import org.opendaylight.controller.sal.rest.impl.StructuredDataToJsonProvider;
import org.opendaylight.controller.sal.restconf.impl.test.*;
dataLoad("/cnsn-to-json/simple-data-types");
}
+ // FIXME
+ @Ignore
@Test
public void incorrectTopLevelElementTest() {
leafBuild.setConfiguration(true);
contBuild.addChildNode(leafBuild);
- return contBuild.build(null);
+ // FIXME: build method does not accept any arguments
+ //return contBuild.build(null);
+ return null;
}
}
leafBuild.setConfiguration(true);
contBuild.addChildNode(leafBuild);
- return contBuild.build(null);
+ // FIXME: build method does not accept any arguments
+ //return contBuild.build(null);
+ return null;
}
this.multipartMessageManager.addTxIdToRequestTypeEntry(response.get().getResult().getTransactionId()
, StatsRequestType.AGGR_FLOW);
}
+ }else{
+ spLogger.debug("No details found in data store for flow tables associated with Node {}",targetNodeKey);
}
-
- //Note: Just for testing, because i am not able to fetch table list from datastore
- // Bug-225 is raised for investigation.
-
-// spLogger.info("Send aggregate stats request for flow table {} to node {}",1,targetNodeKey);
-// GetAggregateFlowStatisticsFromFlowTableForAllFlowsInputBuilder input =
-// new GetAggregateFlowStatisticsFromFlowTableForAllFlowsInputBuilder();
-//
-// input.setNode(new NodeRef(InstanceIdentifier.builder(Nodes.class).child(Node.class, targetNodeKey).toInstance()));
-// input.setTableId(new TableId((short)1));
-// Future<RpcResult<GetAggregateFlowStatisticsFromFlowTableForAllFlowsOutput>> response =
-// flowStatsService.getAggregateFlowStatisticsFromFlowTableForAllFlows(input.build());`
-//
-// multipartMessageManager.setTxIdAndTableIdMapEntry(response.get().getResult().getTransactionId(), (short)1);
-
}
private void sendAllNodeConnectorsStatisticsRequest(NodeRef targetNode) throws InterruptedException, ExecutionException{
private List<Short> getTablesFromNode(NodeKey nodeKey){
InstanceIdentifier<FlowCapableNode> nodesIdentifier = InstanceIdentifier.builder(Nodes.class).child(Node.class,nodeKey).augmentation(FlowCapableNode.class).toInstance();
- FlowCapableNode node = (FlowCapableNode)dps.readConfigurationData(nodesIdentifier);
+ FlowCapableNode node = (FlowCapableNode)dps.readOperationalData(nodesIdentifier);
List<Short> tablesId = new ArrayList<Short>();
if(node != null && node.getTable()!=null){
spLogger.info("Number of tables {} supported by node {}",node.getTable().size(),nodeKey);
* _ethernetSource=EthernetSource [_address=MacAddress [_value=00:00:00:00:00:00], _mask=null, augmentation=[]],
* _ethernetType=EthernetType [_type=EtherType [_value=2048], _mask=null, augmentation=[]]
*
- * So this custom equals method add additional check, in case any match element is null in data-store-flow, but not
+ * Similarly for inPort, if user/application don't set any value for it, FRM will store null value for it in data store.
+ * When we fetch the same flow (with its statistics) from switch, plugin converts its value to openflow:X:0.
+ * e.g _inPort=Uri [_value=openflow:1:0]
+ *
+ * So this custom equals method add additional check to take care of these scenario, in case any match element is null in data-store-flow, but not
* in the flow fetched from switch.
*
* @param statsFlow
return false;
}
}
- } //else if(!storedFlow.getEthernetMatch().equals(statsFlow.getEthernetMatch())) {
- else if(!EthernetMatchEquals(statsFlow.getEthernetMatch(),storedFlow.getEthernetMatch())) {
+ } else if(!EthernetMatchEquals(statsFlow.getEthernetMatch(),storedFlow.getEthernetMatch())) {
return false;
}
if (storedFlow.getIcmpv4Match()== null) {
}
if (storedFlow.getInPort()== null) {
if (statsFlow.getInPort() != null) {
- return false;
+ String[] portArr = statsFlow.getInPort().getValue().split(":");
+ if(portArr.length >= 3){
+ if(Integer.parseInt(portArr[2]) != 0){
+ return false;
+ }
+ }
}
} else if(!storedFlow.getInPort().equals(statsFlow.getInPort())) {
return false;
try {
Thread.sleep(ATTEMPT_TIMEOUT_MS);
} catch (InterruptedException e1) {
+ Thread.currentThread().interrupt();
throw new RuntimeException(e1);
}
continue;
ignoredMissingCapabilityRegex);
jmxNotificationHandler.init();
} catch (InterruptedException e) {
- logger.info("Interrupted while waiting for netconf connection");
+ Thread.currentThread().interrupt();
+ logger.error("Interrupted while waiting for netconf connection");
+ // uncaught exception handler will deal with this failure
+ throw new RuntimeException("Interrupted while waiting for netconf connection", e);
}
}
};
try {
return sessionListener.getLastMessage(attempts, attemptMsDelay);
} catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
throw new RuntimeException(this + " Cannot read message from " + address, e);
} catch (IllegalStateException e) {
throw new IllegalStateException(this + " Cannot read message from " + address, e);
netconf_ssh_input.join();
}
} catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
logger.error("netconf_ssh_input join error ",e);
}
netconf_ssh_output.join();
}
} catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
logger.error("netconf_ssh_output join error ",e);
}
try {
lock.wait();
} catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
Logger logger = LoggerFactory
.getLogger(ComponentActivatorAbstractBase.class);
private DependencyManager dm;
- private ConcurrentMap<ImmutablePair<String, Object>, Component> dbInstances = (ConcurrentMap<ImmutablePair<String, Object>, Component>) new ConcurrentHashMap<ImmutablePair<String, Object>, Component>();
- private ConcurrentMap<Object, Component> dbGlobalInstances = (ConcurrentMap<Object, Component>) new ConcurrentHashMap<Object, Component>();
+ private ConcurrentMap<ImmutablePair<String, Object>, Component> dbInstances = new ConcurrentHashMap<ImmutablePair<String, Object>, Component>();
+ private ConcurrentMap<Object, Component> dbGlobalInstances = new ConcurrentHashMap<Object, Component>();
/**
* Method that should be overriden by the derived class for customization
containerName, imps[i]);
Component c = this.dbInstances.get(key);
if (c != null) {
+ if (c.getService() != null) {
+ c.invokeCallbackMethod(new Object[] { c.getService() }, "containerStop",
+ new Class[][] {{ Component.class}, {} },
+ new Object[][] { {c}, {} });
+ }
// Now remove the component from dependency manager,
// which will implicitely stop it first
this.dm.remove(c);
log.debug("Host Facing Port in a container came up, install the rules for all hosts from this port !");
Set<HostNodeConnector> allHosts = this.hostTracker.getAllHosts();
for (HostNodeConnector host : allHosts) {
- if (node.equals(host.getnodeconnectorNode())
- && swPort.equals(host.getnodeConnector())) {
+ if (node.equals(host.getnodeconnectorNode())) {
/*
* This host resides behind the same switch and port for which a port up
* message is received. Ideally this should not happen, but if it does,