<repository>mvn:org.opendaylight.controller/features-mdsal/${mdsal.version}/xml/features</repository>
<repository>mvn:org.opendaylight.yangtools/features-yangtools/${yangtools.version}/xml/features</repository>
<repository>mvn:org.opendaylight.controller/features-netconf/${netconf.version}/xml/features</repository>
+ <!-- FIXME: This introduces cycle between projects, which makes version updates
+ harder. Should be moved to different.
+ -->
<repository>mvn:org.opendaylight.aaa/features-aaa/${aaa.version}/xml/features</repository>
<feature name='odl-netconf-connector-all' version='${project.version}' description='OpenDaylight :: Netconf Connector :: All'>
<!--
</feature>
<feature name='odl-netconf-ssh' version='${netconf.version}' description="OpenDaylight :: Netconf Connector :: SSH">
<feature version='${netconf.version}'>odl-netconf-tcp</feature>
- <feature version='${aaa.version}'>odl-aaa-authn-plugin</feature>
+ <!-- FIXME: This introduces cycle between projects, which makes version updates
+ harder. Should be moved to different.
+ -->
+ <feature version='${aaa.version}'>odl-aaa-netconf-plugin</feature>
<bundle>mvn:org.opendaylight.controller/netconf-ssh/${netconf.version}</bundle>
<bundle>mvn:org.bouncycastle/bcpkix-jdk15on/${bouncycastle.version}</bundle>
<bundle>mvn:org.bouncycastle/bcprov-jdk15on/${bouncycastle.version}</bundle>
package org.opendaylight.controller.config.api;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
import org.junit.Assert;
import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.opendaylight.controller.config.api.annotations.AbstractServiceInterface;
import org.opendaylight.yangtools.yang.binding.BaseIdentity;
-import javax.management.*;
-
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
public class IdentityAttributeRefTest {
IdentityAttributeRef attr = new IdentityAttributeRef("attr");
package org.opendaylight.controller.config.api;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import com.google.common.collect.Lists;
import java.nio.file.AccessDeniedException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-
-import com.google.common.collect.Lists;
import org.hamcrest.CoreMatchers;
-import org.junit.Assert;
-import org.junit.Before;
import org.junit.Test;
-import javax.management.Query;
-
-import static org.junit.Assert.*;
-
public class JmxAttributeValidationExceptionTest {
- private JmxAttribute jmxAttribute = new JmxAttribute("attr1");
+ private final JmxAttribute jmxAttribute = new JmxAttribute("attr1");
@Test
public void testJmxAttributeValidationExceptionElement() throws Exception {
@Test
public void testJmxAttributeValidationExceptionList() throws Exception {
- List attributeNames = new ArrayList<JmxAttribute>();
+ List<JmxAttribute> attributeNames = new ArrayList<>();
attributeNames.add(new JmxAttribute("att1"));
attributeNames.add(new JmxAttribute("att2"));
attributeNames.add(new JmxAttribute("att3"));
@Test
public void testJmxAttributeValidationExceptionList2() throws Exception {
- List attributeNames = new ArrayList<JmxAttribute>();
+ List<JmxAttribute> attributeNames = new ArrayList<>();
attributeNames.add(new JmxAttribute("att1"));
attributeNames.add(new JmxAttribute("att2"));
attributeNames.add(new JmxAttribute("att3"));
JmxAttributeValidationException.checkCondition(false, "message", jmxAttribute);
}
- private void assertJmxEx(JmxAttributeValidationException e, String message, JmxAttribute... attrNames) {
+ private void assertJmxEx(final JmxAttributeValidationException e, final String message, final JmxAttribute... attrNames) {
assertEquals(message, e.getMessage());
assertEquals(Lists.newArrayList(attrNames), e.getAttributeNames());
}
package org.opendaylight.controller.config.api;
-import junit.framework.Assert;
-import org.junit.Test;
-
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
+import org.junit.Test;
public class ModuleIdentifierTest {
String fact = new String("factory");
package org.opendaylight.controller.config.api;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
-import static org.junit.Assert.assertNotEquals;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
-import static org.junit.matchers.JUnitMatchers.containsString;
-
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import com.google.common.collect.Lists;
import java.util.Map;
-
import org.junit.Assert;
import org.junit.Test;
public class ValidationExceptionTest {
- private String instance = "instance";
+ private final String instance = "instance";
private final ModuleIdentifier mi = new ModuleIdentifier("module", instance);
- private String instance2 = "instance2";
+ private final String instance2 = "instance2";
private final ModuleIdentifier mi2 = new ModuleIdentifier("module", instance2);
private final String message = "ex message";
private final Exception e = new IllegalStateException(message);
package org.opendaylight.controller.config.api.jmx;
+import java.util.ArrayList;
+import java.util.List;
+import javax.management.ObjectName;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import javax.management.ObjectName;
-import java.util.ArrayList;
-import java.util.List;
-
public class CommitStatusTest {
- List newInst = new ArrayList<ObjectName>();
- List reusedInst = new ArrayList<ObjectName>();
- List recreatedInst = new ArrayList<ObjectName>();
+ List<ObjectName> newInst = new ArrayList<>();
+ List<ObjectName> reusedInst = new ArrayList<>();
+ List<ObjectName> recreatedInst = new ArrayList<>();
@Before
public void setUp() throws Exception {
@Test
public void testNotEqual() throws Exception {
- List newInst2 = new ArrayList<ObjectName>();
- List reusedInst2 = new ArrayList<ObjectName>();
- List recreatedInst2 = new ArrayList<ObjectName>();
+ List<ObjectName> newInst2 = new ArrayList<>();
+ List<ObjectName> reusedInst2 = new ArrayList<>();
+ List<ObjectName> recreatedInst2 = new ArrayList<>();
newInst2.add(new ObjectName("first: key1 = value1"));
reusedInst2.add(new ObjectName("second: key = val"));
*/
package org.opendaylight.controller.config.api.jmx;
-import static junit.framework.Assert.fail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-
+import static org.junit.Assert.fail;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
-
import java.util.HashMap;
import java.util.Map;
import javax.management.ObjectName;
-import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import org.opendaylight.controller.config.api.ModuleIdentifier;
assertPattern(on, pattern);
}
- private void assertPattern(ObjectName test, ObjectName pattern) {
+ private void assertPattern(final ObjectName test, final ObjectName pattern) {
assertTrue(pattern.isPattern());
assertTrue(pattern.apply(test));
}
}, IllegalArgumentException.class);
}
- private void assertFailure(Runnable test, Class<? extends Exception> ex) {
+ private void assertFailure(final Runnable test, final Class<? extends Exception> ex) {
try {
test.run();
} catch(Exception e) {
- Assert.assertTrue("Failed with wrong exception: " + Throwables.getStackTraceAsString(e),
+ assertTrue("Failed with wrong exception: " + Throwables.getStackTraceAsString(e),
e.getClass().isAssignableFrom(ex));
return;
}
*/
package org.opendaylight.controller.config.manager.impl.util;
-import org.opendaylight.controller.config.api.annotations.AbstractServiceInterface;
-import org.opendaylight.controller.config.api.annotations.ServiceInterfaceAnnotation;
-import org.opendaylight.controller.config.spi.Module;
-import org.opendaylight.controller.config.spi.ModuleFactory;
-
-import javax.management.JMX;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
+import javax.management.JMX;
+import org.opendaylight.controller.config.api.annotations.AbstractServiceInterface;
+import org.opendaylight.controller.config.api.annotations.ServiceInterfaceAnnotation;
+import org.opendaylight.controller.config.spi.Module;
+import org.opendaylight.controller.config.spi.ModuleFactory;
public class InterfacesHelper {
}
- private static Set<Class<?>> getAllSuperInterfaces(Set<Class<?>> ifcs) {
+ private static Set<Class<?>> getAllSuperInterfaces(final Set<? extends Class<?>> ifcs) {
Set<Class<?>> interfaces = new HashSet<>(ifcs); // create copy to modify
// each interface can extend other interfaces
Set<Class<?>> result = new HashSet<>();
* Get interfaces that this class is derived from that are JMX interfaces.
*/
public static Set<Class<?>> getMXInterfaces(
- Class<? extends Module> configBeanClass) {
+ final Class<? extends Module> configBeanClass) {
Set<Class<?>> allInterfaces = getAllInterfaces(configBeanClass);
Set<Class<?>> result = new HashSet<>();
for (Class<?> clazz : allInterfaces) {
* annotation.
*/
public static Set<Class<?>> getServiceInterfaces(
- Class<? extends Module> configBeanClass) {
+ final Class<? extends Module> configBeanClass) {
Set<Class<?>> allInterfaces = getAllInterfaces(configBeanClass);
Set<Class<?>> result = new HashSet<>();
for (Class<?> clazz : allInterfaces) {
return result;
}
- public static Set<Class<? extends AbstractServiceInterface>> getAllAbstractServiceClasses(Class<? extends Module> configBeanClass) {
+ public static Set<Class<? extends AbstractServiceInterface>> getAllAbstractServiceClasses(final Class<? extends Module> configBeanClass) {
Set<Class<? extends AbstractServiceInterface>> foundGeneratedSIClasses = new HashSet<>();
for (Class<?> clazz : getAllInterfaces(configBeanClass)) {
* {@link org.opendaylight.controller.config.api.annotations.ServiceInterfaceAnnotation#osgiRegistrationType()}
*/
public static Set<Class<?>> getOsgiRegistrationTypes(
- Class<? extends Module> configBeanClass) {
+ final Class<? extends Module> configBeanClass) {
Set<Class<?>> serviceInterfaces = getServiceInterfaces(configBeanClass);
Set<Class<?>> result = new HashSet<>();
for (Class<?> clazz : serviceInterfaces) {
return result;
}
- public static Set<String> getQNames(Set<ServiceInterfaceAnnotation> siAnnotations) {
+ public static Set<String> getQNames(final Set<ServiceInterfaceAnnotation> siAnnotations) {
Set<String> qNames = new HashSet<>();
for (ServiceInterfaceAnnotation sia: siAnnotations) {
qNames.add(sia.value());
return Collections.unmodifiableSet(qNames);
}
- public static Set<ServiceInterfaceAnnotation> getServiceInterfaceAnnotations(ModuleFactory factory) {
+ public static Set<ServiceInterfaceAnnotation> getServiceInterfaceAnnotations(final ModuleFactory factory) {
Set<Class<? extends AbstractServiceInterface>> implementedServiceIntefaces = Collections.unmodifiableSet(factory.getImplementedServiceIntefaces());
return getServiceInterfaceAnnotations(implementedServiceIntefaces);
}
- private static Set<ServiceInterfaceAnnotation> getServiceInterfaceAnnotations(Set<Class<? extends AbstractServiceInterface>> implementedServiceIntefaces) {
+ private static Set<ServiceInterfaceAnnotation> getServiceInterfaceAnnotations(final Set<Class<? extends AbstractServiceInterface>> implementedServiceIntefaces) {
Set<Class<? extends AbstractServiceInterface>> inspected = getAllAbstractServiceInterfaceClasses(implementedServiceIntefaces);
Set<ServiceInterfaceAnnotation> result = new HashSet<>();
// SIs can form hierarchies, inspect superclass until it does not extend AbstractSI
}
static Set<Class<? extends AbstractServiceInterface>> getAllAbstractServiceInterfaceClasses(
- Set<Class<? extends AbstractServiceInterface>> directlyImplementedAbstractSIs) {
+ final Set<Class<? extends AbstractServiceInterface>> directlyImplementedAbstractSIs) {
- Set<Class<?>> allInterfaces = getAllSuperInterfaces((Set) directlyImplementedAbstractSIs);
+ Set<Class<?>> allInterfaces = getAllSuperInterfaces(directlyImplementedAbstractSIs);
Set<Class<? extends AbstractServiceInterface>> result = new HashSet<>();
for(Class<?> ifc: allInterfaces){
if (AbstractServiceInterface.class.isAssignableFrom(ifc) &&
package org.opendaylight.controller.config.manager.impl.util;
import static com.google.common.base.Preconditions.checkNotNull;
-
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
}
@SafeVarargs
- public static <T> AutoCloseable registerService(BundleContext bundleContext, T service, Class<? super T> ... interfaces) {
+ public static <T> AutoCloseable registerService(final BundleContext bundleContext, final T service, final Class<? super T> ... interfaces) {
checkNotNull(service);
checkNotNull(interfaces);
List<AutoCloseable> autoCloseableList = new ArrayList<>();
};
}
- public static AutoCloseable wrap(final BundleTracker bundleTracker) {
+ public static AutoCloseable wrap(final BundleTracker<?> bundleTracker) {
checkNotNull(bundleTracker);
return new AutoCloseable() {
@Override
package org.opendaylight.controller.config.manager.impl.osgi;
-import static junit.framework.Assert.assertNotNull;
-import static junit.framework.Assert.assertSame;
-import static junit.framework.Assert.fail;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
-
import java.util.Collections;
import javax.management.ObjectName;
import org.junit.Before;
package org.opendaylight.controller.config.manager.impl.osgi;
-import static junit.framework.Assert.fail;
+import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
-import static org.junit.matchers.JUnitMatchers.containsString;
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
-
import com.google.common.collect.Lists;
import java.util.Map;
import org.junit.Before;
@Mock
private BundleContext bundleContext;
private BundleContextBackedModuleFactoriesResolver resolver;
- private ServiceReference s1;
- private ServiceReference s2;
+ private ServiceReference<?> s1;
+ private ServiceReference<?> s2;
private ModuleFactory f1;
private ModuleFactory f2;
resolver = new BundleContextBackedModuleFactoriesResolver(bundleContext);
}
- private ModuleFactory getMockFactory(String name) {
+ private ModuleFactory getMockFactory(final String name) {
ModuleFactory mock = mock(ModuleFactory.class);
doReturn(name).when(mock).toString();
doReturn(name).when(mock).getImplementationName();
return mock;
}
- private ServiceReference getServiceRef() {
- ServiceReference mock = mock(ServiceReference.class);
+ private ServiceReference<?> getServiceRef() {
+ ServiceReference<?> mock = mock(ServiceReference.class);
doReturn("serviceRef").when(mock).toString();
final Bundle bundle = mock(Bundle.class);
doReturn(bundleContext).when(bundle).getBundleContext();
package org.opendaylight.controller.config.manager.impl.osgi;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNotNull;
-import static junit.framework.Assert.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
-
import java.util.Dictionary;
import java.util.Set;
-
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
- doAnswer(new Answer() {
+ doAnswer(new Answer<Object>() {
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
return getClass().getClassLoader().loadClass((String) invocation.getArguments()[0]);
}
static class WrongConstructorTestingFactory extends TestingFactory {
- WrongConstructorTestingFactory(String randomParam) {
+ WrongConstructorTestingFactory(final String randomParam) {
}
}
package org.opendaylight.controller.config.manager.impl.osgi;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import java.util.Dictionary;
import org.junit.Test;
import org.mockito.Mockito;
import org.opendaylight.controller.config.manager.impl.osgi.mapping.RefreshingSCPModuleInfoRegistry;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceRegistration;
-import java.util.*;
-
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-
public class RefreshingSCPModuleInfoRegistryTest {
@Test
public void testConstructor() throws Exception {
doReturn("string").when(prov).toString();
BundleContext ctxt = mock(BundleContext.class);
- Dictionary dict = new Hashtable();
- ServiceRegistration servReg = mock(ServiceRegistration.class);
+ ServiceRegistration<?> servReg = mock(ServiceRegistration.class);
doReturn(servReg).when(ctxt).registerService(Mockito.any(Class.class), Mockito.any(SchemaContextProvider.class), Mockito.any(Dictionary.class));
doReturn(servReg).when(ctxt).registerService(Mockito.anyString(), Mockito.any(Object.class), Mockito.any(Dictionary.class));
RefreshingSCPModuleInfoRegistry scpreg = new RefreshingSCPModuleInfoRegistry(reg, prov, ctxt);
package org.opendaylight.controller.config.manager.impl.util;
import static org.junit.Assert.assertEquals;
-
+import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import java.util.Collections;
import java.util.HashSet;
input.add(clazz);
Set<Class<? extends AbstractServiceInterface>> result = InterfacesHelper.getAllAbstractServiceInterfaceClasses(input);
- Set<Class<?>> expected = Sets.newHashSet((Class<?>) TestingScheduledThreadPoolServiceInterface.class,
+ Set<Class<?>> expected = ImmutableSet.of((Class<?>) TestingScheduledThreadPoolServiceInterface.class,
TestingThreadPoolServiceInterface.class
);
assertEquals(expected, result);
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
-
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
OsgiRegistrationUtil.wrap(serviceReg).close();
verify(serviceReg).unregister();
- final BundleTracker tracker = mock(BundleTracker.class);
+ final BundleTracker<?> tracker = mock(BundleTracker.class);
doNothing().when(tracker).close();
OsgiRegistrationUtil.wrap(tracker).close();
verify(tracker).close();
}
private ServiceRegistration<?> mockServiceRegistration() {
- ServiceRegistration mock = mock(ServiceRegistration.class);
+ ServiceRegistration<?> mock = mock(ServiceRegistration.class);
doNothing().when(mock).unregister();
return mock;
}
*/
package org.opendaylight.controller.config.manager.testingservices.scheduledthreadpool;
-import com.google.common.collect.Sets;
+import com.google.common.collect.ImmutableSet;
+import java.util.HashSet;
+import java.util.Set;
import org.opendaylight.controller.config.api.DependencyResolver;
import org.opendaylight.controller.config.api.DependencyResolverFactory;
import org.opendaylight.controller.config.api.DynamicMBeanWithInstance;
import org.opendaylight.controller.config.spi.ModuleFactory;
import org.osgi.framework.BundleContext;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
public class TestingScheduledThreadPoolModuleFactory implements ModuleFactory {
public static final String NAME = "scheduled";
- private static Set<Class<? extends AbstractServiceInterface>> ifc = Collections.unmodifiableSet(Sets.newHashSet(
+ private static Set<Class<? extends AbstractServiceInterface>> ifc = ImmutableSet.of(
(Class<? extends AbstractServiceInterface>) TestingScheduledThreadPoolServiceInterface.class,
- TestingThreadPoolServiceInterface.class));
+ TestingThreadPoolServiceInterface.class);
@Override
public boolean isModuleImplementingServiceInterface(
- Class<? extends AbstractServiceInterface> serviceInterface) {
+ final Class<? extends AbstractServiceInterface> serviceInterface) {
return ifc.contains(serviceInterface);
}
}
@Override
- public Module createModule(String instanceName,
- DependencyResolver dependencyResolver, BundleContext bundleContext) {
+ public Module createModule(final String instanceName,
+ final DependencyResolver dependencyResolver, final BundleContext bundleContext) {
return new TestingScheduledThreadPoolModule(new ModuleIdentifier(NAME,
instanceName), null, null);
}
@Override
- public Module createModule(String instanceName,
- DependencyResolver dependencyResolver, DynamicMBeanWithInstance old, BundleContext bundleContext)
+ public Module createModule(final String instanceName,
+ final DependencyResolver dependencyResolver, final DynamicMBeanWithInstance old, final BundleContext bundleContext)
throws Exception {
TestingScheduledThreadPoolImpl oldInstance;
try {
}
@Override
- public Set<Module> getDefaultModules(DependencyResolverFactory dependencyResolverFactory, BundleContext bundleContext) {
+ public Set<Module> getDefaultModules(final DependencyResolverFactory dependencyResolverFactory, final BundleContext bundleContext) {
return new HashSet<Module>();
}
*/
package org.opendaylight.controller.config.manager.testingservices.scheduledthreadpool.test;
+import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import static org.junit.matchers.JUnitMatchers.containsString;
-
import javax.annotation.Nullable;
import javax.management.DynamicMBean;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import javax.management.ObjectName;
-
import org.junit.Test;
import org.opendaylight.controller.config.api.ValidationException;
import org.opendaylight.controller.config.api.jmx.ObjectNameUtil;
import org.opendaylight.controller.config.manager.testingservices.parallelapsp.TestingParallelAPSPConfigMXBean;
import org.opendaylight.controller.config.manager.testingservices.parallelapsp.TestingParallelAPSPModuleFactory;
import org.opendaylight.controller.config.manager.testingservices.scheduledthreadpool.TestingScheduledThreadPoolImpl;
-import org.opendaylight.controller.config.manager.testingservices.scheduledthreadpool
- .TestingScheduledThreadPoolModuleFactory;
+import org.opendaylight.controller.config.manager.testingservices.scheduledthreadpool.TestingScheduledThreadPoolModuleFactory;
import org.opendaylight.controller.config.util.ConfigTransactionJMXClient;
public class TwoInterfacesExportTest extends AbstractScheduledTest {
- private void assertExists(String moduleName, String instanceName)
+ private void assertExists(final String moduleName, final String instanceName)
throws Exception {
assertExists(null, moduleName, instanceName);
}
- private void assertExists(@Nullable ConfigTransactionJMXClient transaction,
- String moduleName, String instanceName)
+ private void assertExists(@Nullable final ConfigTransactionJMXClient transaction,
+ final String moduleName, final String instanceName)
throws InstanceNotFoundException {
if (transaction != null) {
transaction.lookupConfigBean(moduleName, instanceName);
}
}
- private void assertNotExists(String moduleName, String instanceName) {
+ private void assertNotExists(final String moduleName, final String instanceName) {
assertNotExists(null, moduleName, instanceName);
}
private void assertNotExists(
- @Nullable ConfigTransactionJMXClient transaction,
- String moduleName, String instanceName) {
+ @Nullable final ConfigTransactionJMXClient transaction,
+ final String moduleName, final String instanceName) {
if (transaction != null) {
try {
*/
package org.opendaylight.controller.config.manager.testingservices.threadpool;
-import com.google.common.collect.Sets;
+import com.google.common.collect.ImmutableSet;
+import java.util.HashSet;
+import java.util.Set;
import org.opendaylight.controller.config.api.DependencyResolver;
import org.opendaylight.controller.config.api.DependencyResolverFactory;
import org.opendaylight.controller.config.api.DynamicMBeanWithInstance;
import org.opendaylight.controller.config.spi.ModuleFactory;
import org.osgi.framework.BundleContext;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
public class TestingFixedThreadPoolModuleFactory extends AbstractTestingFixedThreadPoolModuleFactory implements ModuleFactory {
public static final String NAME = "fixed";
- private static Set<Class<? extends AbstractServiceInterface>> ifc = Collections.unmodifiableSet(Sets.newHashSet(
+ private static final Set<Class<? extends AbstractServiceInterface>> ifc = ImmutableSet.of(
(Class<? extends AbstractServiceInterface>) ModifiableThreadPoolServiceInterface.class,
- TestingThreadPoolServiceInterface.class));
+ TestingThreadPoolServiceInterface.class);
@Override
public String getImplementationName() {
}
@Override
- public TestingFixedThreadPoolModule createModule(String instanceName,
- DependencyResolver dependencyResolver, BundleContext bundleContext) {
+ public TestingFixedThreadPoolModule createModule(final String instanceName,
+ final DependencyResolver dependencyResolver, final BundleContext bundleContext) {
return new TestingFixedThreadPoolModule(new ModuleIdentifier(NAME,
instanceName), null, null);
}
@Override
- public Module createModule(String instanceName,
- DependencyResolver dependencyResolver, DynamicMBeanWithInstance old, BundleContext bundleContext)
+ public Module createModule(final String instanceName,
+ final DependencyResolver dependencyResolver, final DynamicMBeanWithInstance old, final BundleContext bundleContext)
throws Exception {
int threadCount = (Integer) old.getAttribute("ThreadCount");
// is the instance compatible?
@Override
public boolean isModuleImplementingServiceInterface(
- Class<? extends AbstractServiceInterface> serviceInterface) {
+ final Class<? extends AbstractServiceInterface> serviceInterface) {
return ifc.contains(serviceInterface);
}
@Override
- public Set<Module> getDefaultModules(DependencyResolverFactory dependencyResolverFactory, BundleContext bundleContext) {
+ public Set<Module> getDefaultModules(final DependencyResolverFactory dependencyResolverFactory, final BundleContext bundleContext) {
return new HashSet<Module>();
}
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
-
-import org.junit.After;
import org.junit.Before;
import org.junit.Test;
*/
package org.opendaylight.controller.config.util;
+import static org.hamcrest.CoreMatchers.hasItem;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
-import static org.junit.matchers.JUnitMatchers.hasItem;
-
+import com.google.common.collect.Sets;
import java.lang.management.ManagementFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
-
import javax.management.InstanceNotFoundException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
-
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.opendaylight.controller.config.api.ConfigRegistry;
-import com.google.common.collect.Sets;
-
public class ConfigRegistryClientsTest {
private TestingConfigRegistry testingRegistry;
assertEquals(Sets.newHashSet(TestingConfigRegistry.run2, TestingConfigRegistry.run1, TestingConfigRegistry.run3), jmxLookup);
}
- private Set<ObjectName> lookupRuntimeBeans(ConfigRegistryClient client)
+ private Set<ObjectName> lookupRuntimeBeans(final ConfigRegistryClient client)
throws Exception {
Set<ObjectName> beans = client.lookupRuntimeBeans();
for (ObjectName on : beans) {
}
private Set<ObjectName> clientLookupRuntimeBeansWithModuleAndInstance(
- ConfigRegistryClient client, String moduleName, String instanceName) {
+ final ConfigRegistryClient client, final String moduleName, final String instanceName) {
Set<ObjectName> beans = client.lookupRuntimeBeans(moduleName, instanceName);
if (beans.size() > 0) {
assertEquals("RuntimeBean",
assertThat(clientLookupServiceInterfaceNames(testingRegistryON), hasItem(TestingConfigRegistry.serviceQName2));
}
- private Set<String> clientLookupServiceInterfaceNames(ObjectName client) throws InstanceNotFoundException{
+ private Set<String> clientLookupServiceInterfaceNames(final ObjectName client) throws InstanceNotFoundException{
return jmxRegistryClient.lookupServiceInterfaceNames(client);
}
*/
package org.opendaylight.controller.config.yang.logback.config;
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.slf4j.LoggerFactory;
-
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.core.rolling.FixedWindowRollingPolicy;
import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy;
import ch.qos.logback.core.rolling.TimeBasedRollingPolicy;
-
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.slf4j.LoggerFactory;
/**
* Implementation of {@link ContextSetter}. Resets running logback
private final LogbackStatusListener statusListener;
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(ContextSetterImpl.class);
- public ContextSetterImpl(LogbackRuntimeRegistrator rootRuntimeBeanRegistratorWrapper) {
+ public ContextSetterImpl(final LogbackRuntimeRegistrator rootRuntimeBeanRegistratorWrapper) {
statusListener = new LogbackStatusListener(rootRuntimeBeanRegistratorWrapper);
statusListener.register();
}
- public void updateContext(LogbackModule module) {
+ @Override
+ public void updateContext(final LogbackModule module) {
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
List<ch.qos.logback.classic.Logger> loggersBefore = context.getLoggerList();
createLoggers(context, module, Sets.newHashSet(loggersBefore));
}
- private Map<String, Appender<ILoggingEvent>> createConsoleAppenders(LoggerContext context, LogbackModule module) {
+ private Map<String, Appender<ILoggingEvent>> createConsoleAppenders(final LoggerContext context, final LogbackModule module) {
Map<String, Appender<ILoggingEvent>> appendersMap = new HashMap<>();
for (ConsoleAppenderTO appender : module.getConsoleAppenderTO()) {
Preconditions.checkState(appendersMap.containsKey(appender.getName()) == false,
"Duplicate appender name %s", appender.getName());
- ch.qos.logback.core.ConsoleAppender app = new ch.qos.logback.core.ConsoleAppender();
+ ch.qos.logback.core.ConsoleAppender<ILoggingEvent> app = new ch.qos.logback.core.ConsoleAppender<>();
app.setContext(context);
PatternLayoutEncoder encoder = new PatternLayoutEncoder();
encoder.setContext(context);
return appendersMap;
}
- private void createLoggers(LoggerContext context, LogbackModule module,
- Set<ch.qos.logback.classic.Logger> loggersBefore) {
+ private void createLoggers(final LoggerContext context, final LogbackModule module,
+ final Set<ch.qos.logback.classic.Logger> loggersBefore) {
Map<String, Appender<ILoggingEvent>> appendersMap = getAppenders(module, context);
}
}
- private void addNewAppenders(Map<String, Appender<ILoggingEvent>> appendersMap, LoggerTO logger,
- ch.qos.logback.classic.Logger logbackLogger, Optional<Set<Appender<ILoggingEvent>>> appendersBefore) {
+ private void addNewAppenders(final Map<String, Appender<ILoggingEvent>> appendersMap, final LoggerTO logger,
+ final ch.qos.logback.classic.Logger logbackLogger, final Optional<Set<Appender<ILoggingEvent>>> appendersBefore) {
if (logger.getAppenders() != null) {
for (String appenderName : logger.getAppenders()) {
if (appendersMap.containsKey(appenderName)) {
}
}
- private void removeBeforeAppenders(Set<ch.qos.logback.classic.Logger> loggersBefore, LoggerTO logger,
- ch.qos.logback.classic.Logger logbackLogger, Optional<Set<Appender<ILoggingEvent>>> appendersBefore) {
+ private void removeBeforeAppenders(final Set<ch.qos.logback.classic.Logger> loggersBefore, final LoggerTO logger,
+ final ch.qos.logback.classic.Logger logbackLogger, final Optional<Set<Appender<ILoggingEvent>>> appendersBefore) {
if (appendersBefore.isPresent()) {
for (Appender<ILoggingEvent> appenderBefore : appendersBefore.get()) {
logbackLogger.detachAppender(appenderBefore);
}
}
- private Optional<Set<Appender<ILoggingEvent>>> getAppendersBefore(Set<ch.qos.logback.classic.Logger> loggersBefore,
- ch.qos.logback.classic.Logger logbackLogger) {
+ private Optional<Set<Appender<ILoggingEvent>>> getAppendersBefore(final Set<ch.qos.logback.classic.Logger> loggersBefore,
+ final ch.qos.logback.classic.Logger logbackLogger) {
if (loggersBefore.contains(logbackLogger)) {
Iterator<Appender<ILoggingEvent>> appenderIt = logbackLogger.iteratorForAppenders();
Set<Appender<ILoggingEvent>> appendersBefore = Sets.newHashSet();
}
- private Map<String, Appender<ILoggingEvent>> getAppenders(LogbackModule module, LoggerContext context) {
+ private Map<String, Appender<ILoggingEvent>> getAppenders(final LogbackModule module, final LoggerContext context) {
Map<String, Appender<ILoggingEvent>> appendersMap = new HashMap<>();
addAllAppenders(appendersMap, createRollingAppenders(context, module));
addAllAppenders(appendersMap, createFileAppenders(context, module));
return appendersMap;
}
- private void addAllAppenders(Map<String, Appender<ILoggingEvent>> allAppenders,
- Map<String, Appender<ILoggingEvent>> appendersToAdd) {
+ private void addAllAppenders(final Map<String, Appender<ILoggingEvent>> allAppenders,
+ final Map<String, Appender<ILoggingEvent>> appendersToAdd) {
for (String appenderName : appendersToAdd.keySet()) {
Preconditions.checkState(allAppenders.containsKey(appenderName) == false, "Duplicate appender name %s",
appenderName);
}
}
- private Map<String, Appender<ILoggingEvent>> createFileAppenders(LoggerContext context, LogbackModule module) {
+ private Map<String, Appender<ILoggingEvent>> createFileAppenders(final LoggerContext context, final LogbackModule module) {
Map<String, Appender<ILoggingEvent>> appendersMap = new HashMap<>();
for (FileAppenderTO appender : module.getFileAppenderTO()) {
Preconditions.checkState(appendersMap.containsKey(appender.getName()) == false,
"Duplicate appender name %s", appender.getName());
- ch.qos.logback.core.FileAppender app = new ch.qos.logback.core.FileAppender<>();
+ ch.qos.logback.core.FileAppender<ILoggingEvent> app = new ch.qos.logback.core.FileAppender<>();
app.setAppend(appender.getAppend());
app.setContext(context);
PatternLayoutEncoder encoder = new PatternLayoutEncoder();
return appendersMap;
}
- private Map<String, Appender<ILoggingEvent>> createRollingAppenders(LoggerContext context, LogbackModule module) {
+ private Map<String, Appender<ILoggingEvent>> createRollingAppenders(final LoggerContext context, final LogbackModule module) {
Map<String, Appender<ILoggingEvent>> appendersMap = new HashMap<>();
for (RollingFileAppenderTO appender : module.getRollingFileAppenderTO()) {
Preconditions.checkState(appendersMap.containsKey(appender.getName()) == false,
"Duplicate appender name %s", appender.getName());
- ch.qos.logback.core.rolling.RollingFileAppender app = new ch.qos.logback.core.rolling.RollingFileAppender<>();
+ ch.qos.logback.core.rolling.RollingFileAppender<ILoggingEvent> app = new ch.qos.logback.core.rolling.RollingFileAppender<>();
app.setAppend(appender.getAppend());
app.setContext(context);
PatternLayoutEncoder encoder = new PatternLayoutEncoder();
policy.start();
app.setRollingPolicy(policy);
} else if (appender.getRollingPolicyType().equals("TimeBasedRollingPolicy")) {
- TimeBasedRollingPolicy policy = new TimeBasedRollingPolicy();
+ TimeBasedRollingPolicy<ILoggingEvent> policy = new TimeBasedRollingPolicy<>();
policy.setContext(context);
policy.setMaxHistory(appender.getMaxHistory());
if (appender.getCleanHistoryOnStart() != null) {
policy.start();
app.setRollingPolicy(policy);
}
- SizeBasedTriggeringPolicy triggeringPolicy = new SizeBasedTriggeringPolicy();
+ SizeBasedTriggeringPolicy<ILoggingEvent> triggeringPolicy = new SizeBasedTriggeringPolicy<>();
triggeringPolicy.setContext(context);
triggeringPolicy.setMaxFileSize(appender.getMaxFileSize());
triggeringPolicy.start();
*/
package org.opendaylight.controller.config.yang.logback.config;
+import ch.qos.logback.classic.Logger;
+import ch.qos.logback.classic.LoggerContext;
+import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.classic.spi.LoggerComparator;
+import ch.qos.logback.core.Appender;
+import ch.qos.logback.core.rolling.FixedWindowRollingPolicy;
+import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy;
+import ch.qos.logback.core.rolling.TimeBasedRollingPolicy;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-
import org.apache.commons.lang3.StringUtils;
import org.opendaylight.controller.config.api.DependencyResolver;
import org.opendaylight.controller.config.api.DependencyResolverFactory;
import org.osgi.framework.BundleContext;
import org.slf4j.LoggerFactory;
-import ch.qos.logback.classic.Logger;
-import ch.qos.logback.classic.LoggerContext;
-import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import ch.qos.logback.classic.spi.LoggerComparator;
-import ch.qos.logback.core.Appender;
-import ch.qos.logback.core.rolling.FixedWindowRollingPolicy;
-import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy;
-import ch.qos.logback.core.rolling.TimeBasedRollingPolicy;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
/**
*
*/
private Map<String, FileAppenderTO> fileDTOs;
@Override
- public LogbackModule instantiateModule(String instanceName, DependencyResolver dependencyResolver,
- BundleContext bundleContext) {
+ public LogbackModule instantiateModule(final String instanceName, final DependencyResolver dependencyResolver,
+ final BundleContext bundleContext) {
Preconditions.checkArgument(instanceName.equals(INSTANCE_NAME),
"There should be just one instance of logback, named " + INSTANCE_NAME);
prepareDTOs();
}
@Override
- public LogbackModule instantiateModule(String instanceName, DependencyResolver dependencyResolver,
- LogbackModule oldModule, AutoCloseable oldInstance, BundleContext bundleContext) {
+ public LogbackModule instantiateModule(final String instanceName, final DependencyResolver dependencyResolver,
+ final LogbackModule oldModule, final AutoCloseable oldInstance, final BundleContext bundleContext) {
Preconditions.checkArgument(instanceName.equals(INSTANCE_NAME),
"There should be just one instance of logback, named " + INSTANCE_NAME);
prepareDTOs();
prepareAppendersDTOs(context);
}
- private void prepareAppendersDTOs(LoggerContext context) {
+ private void prepareAppendersDTOs(final LoggerContext context) {
this.rollingDTOs = new HashMap<>();
this.consoleDTOs = new HashMap<>();
this.fileDTOs = new HashMap<>();
app.setFileNamePattern(rollingPolicy.getFileNamePattern());
app.setRollingPolicyType("FixedWindowRollingPolicy");
} else if (rollingApp.getRollingPolicy() instanceof TimeBasedRollingPolicy<?>) {
- TimeBasedRollingPolicy rollingPolicy = (TimeBasedRollingPolicy) rollingApp.getRollingPolicy();
+ TimeBasedRollingPolicy<ILoggingEvent> rollingPolicy = (TimeBasedRollingPolicy<ILoggingEvent>) rollingApp.getRollingPolicy();
app.setRollingPolicyType("TimeBasedRollingPolicy");
app.setFileNamePattern(rollingPolicy.getFileNamePattern());
app.setMaxHistory(rollingPolicy.getMaxHistory());
}
}
- private Map<String, LoggerTO> prepareLoggersDTOs(LoggerContext context) {
+ private Map<String, LoggerTO> prepareLoggersDTOs(final LoggerContext context) {
Map<String, LoggerTO> DTOs = new HashMap<>();
List<String> appenders = new ArrayList<>();
List<org.slf4j.Logger> loggersToBeAdd = removeUnusableLoggers(context.getLoggerList(),
return DTOs;
}
- private List<org.slf4j.Logger> removeUnusableLoggers(List<Logger> loggerList, Logger rootLogger) {
+ private List<org.slf4j.Logger> removeUnusableLoggers(final List<Logger> loggerList, final Logger rootLogger) {
Collections.sort(loggerList, new LoggerComparator());
Map<String, org.slf4j.Logger> loggersToReturn = new HashMap<>();
}
@Override
- public Set<LogbackModule> getDefaultModules(DependencyResolverFactory dependencyResolverFactory,
- BundleContext bundleContext) {
+ public Set<LogbackModule> getDefaultModules(final DependencyResolverFactory dependencyResolverFactory,
+ final BundleContext bundleContext) {
DependencyResolver resolver = dependencyResolverFactory.createDependencyResolver(new ModuleIdentifier(
getImplementationName(), INSTANCE_NAME));
LogbackModule defaultLogback = instantiateModule(INSTANCE_NAME, resolver, bundleContext);
<version>0.2.0-SNAPSHOT</version>
<relativePath>../..</relativePath>
</parent>
- <groupId>org.opendaylight.controller</groupId>
<artifactId>karaf.branding</artifactId>
<version>1.1.0-SNAPSHOT</version>
<packaging>bundle</packaging>
</parent>
<modelVersion>4.0.0</modelVersion>
- <groupId>org.opendaylight.controller</groupId>
<artifactId>benchmark-data-store</artifactId>
<dependencies>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-inmemory-datastore</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>sal-broker-impl</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ </dependency>
</dependencies>
<build>
</plugin>
</plugins>
</build>
-</project>
\ No newline at end of file
+</project>
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.benchmark;
+
+import java.util.concurrent.TimeUnit;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.api.DOMDataReadWriteTransaction;
+import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataBrokerImpl;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Warmup;
+
+/**
+ * @author Lukas Sedlak <lsedlak@cisco.com>
+ */
+public abstract class AbstractInMemoryBrokerWriteTransactionBenchmark extends AbstractInMemoryWriteTransactionBenchmark {
+
+ protected DOMDataBrokerImpl domBroker;
+
+ protected void initTestNode() throws Exception {
+ final YangInstanceIdentifier testPath = YangInstanceIdentifier.builder(BenchmarkModel.TEST_PATH)
+ .build();
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, testPath, provideOuterListNode());
+
+ writeTx.submit().get();
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write100KSingleNodeWithOneInnerItemInOneCommitBenchmark() throws Exception {
+
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_100K; ++outerListKey) {
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_100K_PATHS[outerListKey], OUTER_LIST_ONE_ITEM_INNER_LIST[outerListKey]);
+ }
+
+ writeTx.submit().get();
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write100KSingleNodeWithOneInnerItemInCommitPerWriteBenchmark() throws Exception {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_100K; ++outerListKey) {
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_100K_PATHS[outerListKey], OUTER_LIST_ONE_ITEM_INNER_LIST[outerListKey]);
+
+ writeTx.submit().get();
+ }
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write50KSingleNodeWithTwoInnerItemsInOneCommitBenchmark() throws Exception {
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_50K; ++outerListKey) {
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_50K_PATHS[outerListKey], OUTER_LIST_TWO_ITEM_INNER_LIST[outerListKey]);
+ }
+
+ writeTx.submit().get();
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write50KSingleNodeWithTwoInnerItemsInCommitPerWriteBenchmark() throws Exception {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_50K; ++outerListKey) {
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_50K_PATHS[outerListKey], OUTER_LIST_TWO_ITEM_INNER_LIST[outerListKey]);
+ writeTx.submit().get();
+ }
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write10KSingleNodeWithTenInnerItemsInOneCommitBenchmark() throws Exception {
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_10K; ++outerListKey) {
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_10K_PATHS[outerListKey], OUTER_LIST_TEN_ITEM_INNER_LIST[outerListKey]);
+ }
+ writeTx.submit().get();
+ }
+
+ @Benchmark
+ @Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ @Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
+ public void write10KSingleNodeWithTenInnerItemsInCommitPerWriteBenchmark() throws Exception {
+ for (int outerListKey = 0; outerListKey < OUTER_LIST_10K; ++outerListKey) {
+ DOMDataReadWriteTransaction writeTx = domBroker.newReadWriteTransaction();
+ writeTx.put(LogicalDatastoreType.OPERATIONAL, OUTER_LIST_10K_PATHS[outerListKey], OUTER_LIST_TEN_ITEM_INNER_LIST[outerListKey]);
+ writeTx.submit().get();
+ }
+ }
+}
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
-import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
-import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
-import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.CollectionNodeBuilder;
-import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder;
-import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Warmup;
/**
* @author Lukas Sedlak <lsedlak@cisco.com>
*/
-public abstract class AbstractInMemoryDatastoreWriteTransactionBenchmark {
+public abstract class AbstractInMemoryDatastoreWriteTransactionBenchmark extends AbstractInMemoryWriteTransactionBenchmark {
- private static final int WARMUP_ITERATIONS = 20;
- private static final int MEASUREMENT_ITERATIONS = 20;
-
- private static final int OUTER_LIST_100K = 100000;
- private static final int OUTER_LIST_50K = 50000;
- private static final int OUTER_LIST_10K = 10000;
-
- private static final YangInstanceIdentifier[] OUTER_LIST_100K_PATHS = initOuterListPaths(OUTER_LIST_100K);
- private static final YangInstanceIdentifier[] OUTER_LIST_50K_PATHS = initOuterListPaths(OUTER_LIST_50K);
- private static final YangInstanceIdentifier[] OUTER_LIST_10K_PATHS = initOuterListPaths(OUTER_LIST_10K);
-
- private static YangInstanceIdentifier[] initOuterListPaths(final int outerListPathsCount) {
- final YangInstanceIdentifier[] paths = new YangInstanceIdentifier[outerListPathsCount];
-
- for (int outerListKey = 0; outerListKey < outerListPathsCount; ++outerListKey) {
- paths[outerListKey] = YangInstanceIdentifier.builder(BenchmarkModel.OUTER_LIST_PATH)
- .nodeWithKey(BenchmarkModel.OUTER_LIST_QNAME, BenchmarkModel.ID_QNAME, outerListKey)
- .build();
- }
- return paths;
- }
-
- private static final MapNode ONE_ITEM_INNER_LIST = initInnerListItems(1);
- private static final MapNode TWO_ITEM_INNER_LIST = initInnerListItems(2);
- private static final MapNode TEN_ITEM_INNER_LIST = initInnerListItems(10);
-
- private static MapNode initInnerListItems(final int count) {
- final CollectionNodeBuilder<MapEntryNode, MapNode> mapEntryBuilder = ImmutableNodes
- .mapNodeBuilder(BenchmarkModel.INNER_LIST_QNAME);
-
- for (int i = 1; i <= count; ++i) {
- mapEntryBuilder
- .withChild(ImmutableNodes.mapEntry(BenchmarkModel.INNER_LIST_QNAME, BenchmarkModel.NAME_QNAME, i));
- }
- return mapEntryBuilder.build();
- }
-
- private static final NormalizedNode<?, ?>[] OUTER_LIST_ONE_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_100K, ONE_ITEM_INNER_LIST);
- private static final NormalizedNode<?, ?>[] OUTER_LIST_TWO_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_50K, TWO_ITEM_INNER_LIST);
- private static final NormalizedNode<?, ?>[] OUTER_LIST_TEN_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_10K, TEN_ITEM_INNER_LIST);
-
- private static NormalizedNode<?,?>[] initOuterListItems(int outerListItemsCount, MapNode innerList) {
- final NormalizedNode<?,?>[] outerListItems = new NormalizedNode[outerListItemsCount];
-
- for (int i = 0; i < outerListItemsCount; ++i) {
- int outerListKey = i;
- outerListItems[i] = ImmutableNodes.mapEntryBuilder(BenchmarkModel.OUTER_LIST_QNAME, BenchmarkModel.ID_QNAME, outerListKey)
- .withChild(innerList).build();
- }
- return outerListItems;
- }
-
- protected SchemaContext schemaContext;
protected InMemoryDOMDataStore domStore;
- abstract public void setUp() throws Exception;
-
- abstract public void tearDown();
-
protected void initTestNode() throws Exception {
final YangInstanceIdentifier testPath = YangInstanceIdentifier.builder(BenchmarkModel.TEST_PATH)
.build();
cohort.commit().get();
}
- private DataContainerChild<?, ?> provideOuterListNode() {
- return ImmutableContainerNodeBuilder
- .create()
- .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(BenchmarkModel.TEST_QNAME))
- .withChild(
- ImmutableNodes.mapNodeBuilder(BenchmarkModel.OUTER_LIST_QNAME)
- .build()).build();
- }
-
@Benchmark
@Warmup(iterations = WARMUP_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = MEASUREMENT_ITERATIONS, timeUnit = TimeUnit.MILLISECONDS)
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.benchmark;
+
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
+import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
+import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.CollectionNodeBuilder;
+import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+public abstract class AbstractInMemoryWriteTransactionBenchmark {
+ protected static final int OUTER_LIST_100K = 100000;
+ protected static final int OUTER_LIST_50K = 50000;
+ protected static final int OUTER_LIST_10K = 10000;
+
+ protected static final YangInstanceIdentifier[] OUTER_LIST_100K_PATHS = initOuterListPaths(OUTER_LIST_100K);
+ protected static final YangInstanceIdentifier[] OUTER_LIST_50K_PATHS = initOuterListPaths(OUTER_LIST_50K);
+ protected static final YangInstanceIdentifier[] OUTER_LIST_10K_PATHS = initOuterListPaths(OUTER_LIST_10K);
+
+ private static YangInstanceIdentifier[] initOuterListPaths(final int outerListPathsCount) {
+ final YangInstanceIdentifier[] paths = new YangInstanceIdentifier[outerListPathsCount];
+
+ for (int outerListKey = 0; outerListKey < outerListPathsCount; ++outerListKey) {
+ paths[outerListKey] = YangInstanceIdentifier.builder(BenchmarkModel.OUTER_LIST_PATH)
+ .nodeWithKey(BenchmarkModel.OUTER_LIST_QNAME, BenchmarkModel.ID_QNAME, outerListKey)
+ .build();
+ }
+ return paths;
+ }
+
+ protected static final int WARMUP_ITERATIONS = 20;
+ protected static final int MEASUREMENT_ITERATIONS = 20;
+
+ protected static final MapNode ONE_ITEM_INNER_LIST = initInnerListItems(1);
+ protected static final MapNode TWO_ITEM_INNER_LIST = initInnerListItems(2);
+ protected static final MapNode TEN_ITEM_INNER_LIST = initInnerListItems(10);
+
+ private static MapNode initInnerListItems(final int count) {
+ final CollectionNodeBuilder<MapEntryNode, MapNode> mapEntryBuilder = ImmutableNodes
+ .mapNodeBuilder(BenchmarkModel.INNER_LIST_QNAME);
+
+ for (int i = 1; i <= count; ++i) {
+ mapEntryBuilder
+ .withChild(ImmutableNodes.mapEntry(BenchmarkModel.INNER_LIST_QNAME, BenchmarkModel.NAME_QNAME, i));
+ }
+ return mapEntryBuilder.build();
+ }
+
+ protected static final NormalizedNode<?, ?>[] OUTER_LIST_ONE_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_100K, ONE_ITEM_INNER_LIST);
+ protected static final NormalizedNode<?, ?>[] OUTER_LIST_TWO_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_50K, TWO_ITEM_INNER_LIST);
+ protected static final NormalizedNode<?, ?>[] OUTER_LIST_TEN_ITEM_INNER_LIST = initOuterListItems(OUTER_LIST_10K, TEN_ITEM_INNER_LIST);
+
+ private static NormalizedNode<?,?>[] initOuterListItems(final int outerListItemsCount, final MapNode innerList) {
+ final NormalizedNode<?,?>[] outerListItems = new NormalizedNode[outerListItemsCount];
+
+ for (int i = 0; i < outerListItemsCount; ++i) {
+ int outerListKey = i;
+ outerListItems[i] = ImmutableNodes.mapEntryBuilder(BenchmarkModel.OUTER_LIST_QNAME, BenchmarkModel.ID_QNAME, outerListKey)
+ .withChild(innerList).build();
+ }
+ return outerListItems;
+ }
+
+ protected SchemaContext schemaContext;
+ abstract public void setUp() throws Exception;
+ abstract public void tearDown();
+
+ protected static DataContainerChild<?, ?> provideOuterListNode() {
+ return ImmutableContainerNodeBuilder
+ .create()
+ .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(BenchmarkModel.TEST_QNAME))
+ .withChild(
+ ImmutableNodes.mapNodeBuilder(BenchmarkModel.OUTER_LIST_QNAME)
+ .build()).build();
+ }
+}
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.md.sal.dom.store.benchmark;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.opendaylight.controller.md.sal.dom.broker.impl.DOMDataBrokerImpl;
+import org.opendaylight.controller.md.sal.dom.store.impl.InMemoryDOMDataStore;
+import org.opendaylight.controller.sal.core.spi.data.DOMStore;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Level;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+
+@State(Scope.Thread)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@Fork(1)
+public class InMemoryBrokerWriteTransactionBenchmark extends AbstractInMemoryBrokerWriteTransactionBenchmark {
+ private ListeningExecutorService executor;
+
+ @Setup(Level.Trial)
+ @Override
+ public void setUp() throws Exception {
+ ListeningExecutorService dsExec = MoreExecutors.sameThreadExecutor();
+ executor = MoreExecutors.listeningDecorator(
+ MoreExecutors.getExitingExecutorService((ThreadPoolExecutor)Executors.newFixedThreadPool(1), 1L, TimeUnit.SECONDS));
+
+ InMemoryDOMDataStore operStore = new InMemoryDOMDataStore("OPER", dsExec,
+ MoreExecutors.sameThreadExecutor());
+ InMemoryDOMDataStore configStore = new InMemoryDOMDataStore("CFG", dsExec,
+ MoreExecutors.sameThreadExecutor());
+ Map<LogicalDatastoreType, DOMStore> datastores = ImmutableMap.of(
+ LogicalDatastoreType.OPERATIONAL, (DOMStore)operStore,
+ LogicalDatastoreType.CONFIGURATION, configStore);
+
+ domBroker = new DOMDataBrokerImpl(datastores, executor);
+ schemaContext = BenchmarkModel.createTestContext();
+ configStore.onGlobalContextUpdated(schemaContext);
+ operStore.onGlobalContextUpdated(schemaContext);
+ initTestNode();
+ }
+
+ @Override
+ public void tearDown() {
+ domBroker.close();
+ executor.shutdown();
+ }
+}
final Inet4Address inetDestAddress) {
String inetSourceAddressStr = InetAddresses
.toAddrString(inetSourceAddress);
- Ipv4Prefix ipv4SourcePrefix = new Ipv4Prefix(inetSourceAddressStr);
+ Ipv4Prefix ipv4SourcePrefix = new Ipv4Prefix(inetSourceAddressStr + "/32");
String inetDestAddressValue = InetAddresses
.toAddrString(inetDestAddress);
- Ipv4Prefix ipv4DestPrefix = new Ipv4Prefix(inetDestAddressValue);
+ Ipv4Prefix ipv4DestPrefix = new Ipv4Prefix(inetDestAddressValue + "/32");
ArpMatchBuilder arpMatchBuilder = new ArpMatchBuilder();
if(inetSourceAddress != null) {
String inetSrcAddressString = InetAddresses
.toAddrString(inetSourceAddress);
- layer4MatchBuild.setIpv4Source(new Ipv4Prefix(inetSrcAddressString));
+ layer4MatchBuild.setIpv4Source(new Ipv4Prefix(inetSrcAddressString + "/32"));
}
if(inetDestAddress != null) {
String inetDstAddressString = InetAddresses
.toAddrString(inetDestAddress);
layer4MatchBuild
- .setIpv4Destination(new Ipv4Prefix(inetDstAddressString));
+ .setIpv4Destination(new Ipv4Prefix(inetDstAddressString + "/32"));
}
return layer4MatchBuild.build();
if(inetSourceAddress != null) {
String inetSrcAddressString = InetAddresses
.toAddrString(inetSourceAddress);
- layer6MatchBuild.setIpv6Source(new Ipv6Prefix(inetSrcAddressString));
+ layer6MatchBuild.setIpv6Source(new Ipv6Prefix(inetSrcAddressString + "/128"));
}
if(inetDestAddress != null) {
String inetDstAddressString = InetAddresses
.toAddrString(inetDestAddress);
layer6MatchBuild
- .setIpv6Destination(new Ipv6Prefix(inetDstAddressString));
+ .setIpv6Destination(new Ipv6Prefix(inetDstAddressString + "/128"));
}
return layer6MatchBuild.build();
}
public static Address toInetAddress(final InetAddress address) {
if (address instanceof Inet4Address) {
return new Ipv4Builder()
- .setIpv4Address(new Ipv4Prefix(InetAddresses.toAddrString(address)))
+ .setIpv4Address(new Ipv4Prefix(InetAddresses.toAddrString(address) + "/32"))
.build();
}
if (address instanceof Inet6Address) {
return new Ipv6Builder()
- .setIpv6Address(new Ipv6Prefix(InetAddresses.toAddrString(address)))
+ .setIpv6Address(new Ipv6Prefix(InetAddresses.toAddrString(address) + "/128"))
.build();
}
Layer3Match layer3Match = match.getLayer3Match();
boolean arpFound = false;
if (layer3Match instanceof ArpMatch) {
- assertEquals("Source IP address is wrong.", "192.168.100.100", ((ArpMatch) layer3Match)
+ assertEquals("Source IP address is wrong.", "192.168.100.100/32", ((ArpMatch) layer3Match)
.getArpSourceTransportAddress().getValue());
- assertEquals("Destination IP address is wrong.", "192.168.100.101", ((ArpMatch) layer3Match)
+ assertEquals("Destination IP address is wrong.", "192.168.100.101/32", ((ArpMatch) layer3Match)
.getArpTargetTransportAddress().getValue());
assertEquals("Source MAC address is wrong.", "ff:ee:dd:cc:bb:aa", ((ArpMatch) layer3Match)
.getArpSourceHardwareAddress().getAddress().getValue());
boolean ipv4Found = false;
layer3Match = match.getLayer3Match();
if (layer3Match instanceof Ipv4Match) {
- assertEquals("Source IP address is wrong.", "192.168.100.102", ((Ipv4Match) layer3Match)
+ assertEquals("Source IP address is wrong.", "192.168.100.102/32", ((Ipv4Match) layer3Match)
.getIpv4Source().getValue());
- assertEquals("Destination IP address is wrong.", "192.168.100.103", ((Ipv4Match) layer3Match)
+ assertEquals("Destination IP address is wrong.", "192.168.100.103/32", ((Ipv4Match) layer3Match)
.getIpv4Destination().getValue());
}
assertNotNull("Ipv4 wasn't found", ipv4Found);
boolean ipv6Found = false;
layer3Match = match.getLayer3Match();
if (layer3Match instanceof Ipv6Match) {
- assertEquals("Source IP address is wrong.", "2001:db8:85a3::8a2e:370:7335", ((Ipv6Match) layer3Match)
+ assertEquals("Source IP address is wrong.", "2001:db8:85a3::8a2e:370:7335/128", ((Ipv6Match) layer3Match)
.getIpv6Source().getValue());
- assertEquals("Destination IP address is wrong.", "2001:db8:85a3::8a2e:370:7336",
+ assertEquals("Destination IP address is wrong.", "2001:db8:85a3::8a2e:370:7336/128",
((Ipv6Match) layer3Match).getIpv6Destination().getValue());
}
assertNotNull("Ipv6 wasn't found", ipv6Found);
boolean ipv4AddressFound = false;
if (address instanceof Ipv4) {
ipv4AddressFound = true;
- assertEquals("Wrong IP address type in SetNextHopAction.", "192.168.100.100", ((Ipv4) address)
+ assertEquals("Wrong IP address type in SetNextHopAction.", "192.168.100.100/32", ((Ipv4) address)
.getIpv4Address().getValue());
}
assertTrue("Ipv4 address wasn't found.", ipv4AddressFound);
boolean ipv4AddressFound = false;
if (address instanceof Ipv4) {
ipv4AddressFound = true;
- assertEquals("Wrong IP address type in SetNwDstAction.", "192.168.100.101", ((Ipv4) address)
+ assertEquals("Wrong IP address type in SetNwDstAction.", "192.168.100.101/32", ((Ipv4) address)
.getIpv4Address().getValue());
}
assertTrue("Ipv4 address wasn't found.", ipv4AddressFound);
boolean ipv4AddressFound = false;
if (address instanceof Ipv4) {
ipv4AddressFound = true;
- assertEquals("Wrong IP address type in SetNwSrcAction.", "192.168.100.102", ((Ipv4) address)
+ assertEquals("Wrong IP address type in SetNwSrcAction.", "192.168.100.102/32", ((Ipv4) address)
.getIpv4Address().getValue());
}
assertTrue("Ipv4 address wasn't found.", ipv4AddressFound);
private Address prapareIpv4Address(String ipv4Address) {
Ipv4Builder ipv4Builder = new Ipv4Builder();
- ipv4Builder.setIpv4Address(new Ipv4Prefix(ipv4Address));
+ ipv4Builder.setIpv4Address(new Ipv4Prefix(ipv4Address + "/32"));
return ipv4Builder.build();
}
private Layer3Match prepLayer3MatchIpv4() {
Ipv4MatchBuilder ipv4MatchBuilder = new Ipv4MatchBuilder();
- ipv4MatchBuilder.setIpv4Source(new Ipv4Prefix("192.168.1.104"));
- ipv4MatchBuilder.setIpv4Destination(new Ipv4Prefix("192.168.1.105"));
+ ipv4MatchBuilder.setIpv4Source(new Ipv4Prefix("192.168.1.104/32"));
+ ipv4MatchBuilder.setIpv4Destination(new Ipv4Prefix("192.168.1.105/32"));
return ipv4MatchBuilder.build();
}
private Layer3Match prepLayer3MatchArp() {
ArpMatchBuilder arpMatchBuilder = new ArpMatchBuilder();
- arpMatchBuilder.setArpSourceTransportAddress(new Ipv4Prefix("192.168.1.101"));
- arpMatchBuilder.setArpTargetTransportAddress(new Ipv4Prefix("192.168.1.102"));
+ arpMatchBuilder.setArpSourceTransportAddress(new Ipv4Prefix("192.168.1.101/32"));
+ arpMatchBuilder.setArpTargetTransportAddress(new Ipv4Prefix("192.168.1.102/32"));
ArpSourceHardwareAddressBuilder arpSourAddressBuild = new ArpSourceHardwareAddressBuilder();
arpSourAddressBuild.setAddress(new MacAddress("22:44:66:88:AA:CC"));
type uint32;
}
- leaf perc_level {
+ leaf prec_level {
description "Number of drop precedence level to add";
type uint8;
}
private final Set<String> followers;
private Cancellable heartbeatSchedule = null;
- private Cancellable appendEntriesSchedule = null;
private Cancellable installSnapshotSchedule = null;
private List<ClientRequestTracker> trackerList = new ArrayList<>();
import akka.actor.PoisonPill;
import akka.actor.Props;
import akka.actor.Terminated;
-import akka.event.Logging;
import akka.japi.Creator;
+import akka.japi.Procedure;
+import akka.pattern.Patterns;
import akka.persistence.RecoveryCompleted;
+import akka.persistence.SaveSnapshotFailure;
import akka.persistence.SaveSnapshotSuccess;
import akka.persistence.SnapshotMetadata;
import akka.persistence.SnapshotOffer;
+import akka.persistence.SnapshotSelectionCriteria;
import akka.testkit.JavaTestKit;
import akka.testkit.TestActorRef;
+import akka.util.Timeout;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.Uninterruptibles;
import com.google.protobuf.ByteString;
import org.junit.After;
+import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.cluster.DataPersistenceProvider;
import org.opendaylight.controller.cluster.datastore.DataPersistenceProviderMonitor;
import org.opendaylight.controller.cluster.raft.base.messages.ApplyLogEntries;
+import org.opendaylight.controller.cluster.raft.base.messages.ApplySnapshot;
+import org.opendaylight.controller.cluster.raft.base.messages.ApplyState;
import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot;
import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshotReply;
import org.opendaylight.controller.cluster.raft.client.messages.FindLeader;
import org.opendaylight.controller.cluster.raft.protobuff.client.messages.Payload;
import org.opendaylight.controller.cluster.raft.utils.MockAkkaJournal;
import org.opendaylight.controller.cluster.raft.utils.MockSnapshotStore;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
import java.io.ByteArrayInputStream;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
public class RaftActorTest extends AbstractActorTest {
public static class MockRaftActor extends RaftActor {
private final DataPersistenceProvider dataPersistenceProvider;
+ private final RaftActor delegate;
public static final class MockRaftActorCreator implements Creator<MockRaftActor> {
private final Map<String, String> peerAddresses;
}
private final CountDownLatch recoveryComplete = new CountDownLatch(1);
- private final CountDownLatch applyRecoverySnapshot = new CountDownLatch(1);
- private final CountDownLatch applyStateLatch = new CountDownLatch(1);
private final List<Object> state;
public MockRaftActor(String id, Map<String, String> peerAddresses, Optional<ConfigParams> config, DataPersistenceProvider dataPersistenceProvider) {
super(id, peerAddresses, config);
state = new ArrayList<>();
+ this.delegate = mock(RaftActor.class);
if(dataPersistenceProvider == null){
this.dataPersistenceProvider = new PersistentDataProvider();
} else {
}
}
- public CountDownLatch getApplyRecoverySnapshotLatch(){
- return applyRecoverySnapshot;
- }
-
public List<Object> getState() {
return state;
}
@Override protected void applyState(ActorRef clientActor, String identifier, Object data) {
- applyStateLatch.countDown();
+ delegate.applyState(clientActor, identifier, data);
+ LOG.info("applyState called");
}
+
+
+
@Override
protected void startLogRecoveryBatch(int maxBatchSize) {
}
@Override
protected void onRecoveryComplete() {
+ delegate.onRecoveryComplete();
recoveryComplete.countDown();
}
@Override
protected void applyRecoverySnapshot(ByteString snapshot) {
- applyRecoverySnapshot.countDown();
+ delegate.applyRecoverySnapshot(snapshot);
try {
Object data = toObject(snapshot);
System.out.println("!!!!!applyRecoverySnapshot: "+data);
}
@Override protected void createSnapshot() {
+ delegate.createSnapshot();
}
@Override protected void applySnapshot(ByteString snapshot) {
+ delegate.applySnapshot(snapshot);
}
@Override protected void onStateChanged() {
+ delegate.onStateChanged();
}
@Override
}
- public boolean waitForStartup(){
+ public ActorRef getRaftActor() {
+ return raftActor;
+ }
+
+ public boolean waitForLogMessage(final Class logEventClass, String message){
// Wait for a specific log message to show up
return
- new JavaTestKit.EventFilter<Boolean>(Logging.Info.class
+ new JavaTestKit.EventFilter<Boolean>(logEventClass
) {
@Override
protected Boolean run() {
return true;
}
}.from(raftActor.path().toString())
- .message("Switching from behavior Candidate to Leader")
+ .message(message)
.occurrences(1).exec();
}
- public void findLeader(final String expectedLeader){
- raftActor.tell(new FindLeader(), getRef());
-
- FindLeaderReply reply = expectMsgClass(duration("5 seconds"), FindLeaderReply.class);
- assertEquals("getLeaderActor", expectedLeader, reply.getLeaderActor());
+ protected void waitUntilLeader(){
+ waitUntilLeader(raftActor);
}
- public ActorRef getRaftActor() {
- return raftActor;
+ protected void waitUntilLeader(ActorRef actorRef) {
+ FiniteDuration duration = Duration.create(100, TimeUnit.MILLISECONDS);
+ for(int i = 0; i < 20 * 5; i++) {
+ Future<Object> future = Patterns.ask(actorRef, new FindLeader(), new Timeout(duration));
+ try {
+ FindLeaderReply resp = (FindLeaderReply) Await.result(future, duration);
+ if(resp.getLeaderActor() != null) {
+ return;
+ }
+ } catch(TimeoutException e) {
+ } catch(Exception e) {
+ System.err.println("FindLeader threw ex");
+ e.printStackTrace();
+ }
+
+
+ Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
+ }
+
+ Assert.fail("Leader not found for actorRef " + actorRef.path());
}
+
}
@Test
public void testConstruction() {
- boolean started = new RaftActorTestKit(getSystem(), "testConstruction").waitForStartup();
- assertEquals(true, started);
+ new RaftActorTestKit(getSystem(), "testConstruction").waitUntilLeader();
}
@Test
public void testFindLeaderWhenLeaderIsSelf(){
RaftActorTestKit kit = new RaftActorTestKit(getSystem(), "testFindLeader");
- kit.waitForStartup();
- kit.findLeader(kit.getRaftActor().path().toString());
+ kit.waitUntilLeader();
}
@Test
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
Collections.EMPTY_MAP, Optional.<ConfigParams>of(config)), persistenceId);
+ MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
+
+ // Wait for akka's recovery to complete so it doesn't interfere.
+ mockRaftActor.waitForRecoveryComplete();
+
ByteString snapshotBytes = fromObject(Arrays.asList(
new MockRaftActorContext.MockPayload("A"),
new MockRaftActorContext.MockPayload("B"),
Snapshot snapshot = Snapshot.create(snapshotBytes.toByteArray(),
Lists.<ReplicatedLogEntry>newArrayList(), 3, 1 ,3, 1);
- MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
-
mockRaftActor.onReceiveRecover(new SnapshotOffer(new SnapshotMetadata(persistenceId, 100, 100), snapshot));
- CountDownLatch applyRecoverySnapshotLatch = mockRaftActor.getApplyRecoverySnapshotLatch();
-
- assertEquals("apply recovery snapshot", true, applyRecoverySnapshotLatch.await(5, TimeUnit.SECONDS));
+ verify(mockRaftActor.delegate).applyRecoverySnapshot(eq(snapshotBytes));
mockRaftActor.onReceiveRecover(new ReplicatedLogImplEntry(0, 1, new MockRaftActorContext.MockPayload("A")));
mockRaftActor.onReceiveRecover(mock(RecoveryCompleted.class));
- mockRaftActor.waitForRecoveryComplete();
-
mockActorRef.tell(PoisonPill.getInstance(), getRef());
}};
MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
+ // Wait for akka's recovery to complete so it doesn't interfere.
+ mockRaftActor.waitForRecoveryComplete();
+
ByteString snapshotBytes = fromObject(Arrays.asList(
new MockRaftActorContext.MockPayload("A"),
new MockRaftActorContext.MockPayload("B"),
mockRaftActor.onReceiveRecover(new SnapshotOffer(new SnapshotMetadata(persistenceId, 100, 100), snapshot));
- CountDownLatch applyRecoverySnapshotLatch = mockRaftActor.getApplyRecoverySnapshotLatch();
-
- assertEquals("apply recovery snapshot", false, applyRecoverySnapshotLatch.await(1, TimeUnit.SECONDS));
+ verify(mockRaftActor.delegate, times(0)).applyRecoverySnapshot(any(ByteString.class));
mockRaftActor.onReceiveRecover(new ReplicatedLogImplEntry(0, 1, new MockRaftActorContext.MockPayload("A")));
mockRaftActor.onReceiveRecover(mock(RecoveryCompleted.class));
- mockRaftActor.waitForRecoveryComplete();
-
mockActorRef.tell(PoisonPill.getInstance(), getRef());
}};
}
config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
- CountDownLatch persistLatch = new CountDownLatch(1);
- DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
- dataPersistenceProviderMonitor.setPersistLatch(persistLatch);
+
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
- Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
- mockRaftActor.getRaftActorContext().getReplicatedLog().appendAndPersist(new MockRaftActorContext.MockReplicatedLogEntry(10, 10, mock(Payload.class)));
+ MockRaftActorContext.MockReplicatedLogEntry logEntry = new MockRaftActorContext.MockReplicatedLogEntry(10, 10, mock(Payload.class));
- assertEquals("Persist called", true, persistLatch.await(5, TimeUnit.SECONDS));
+ mockRaftActor.getRaftActorContext().getReplicatedLog().appendAndPersist(logEntry);
+
+ verify(dataPersistenceProvider).persist(eq(logEntry), any(Procedure.class));
mockActorRef.tell(PoisonPill.getInstance(), getRef());
config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
- CountDownLatch persistLatch = new CountDownLatch(2);
- DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
- dataPersistenceProviderMonitor.setPersistLatch(persistLatch);
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
- Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
mockRaftActor.getRaftActorContext().getReplicatedLog().removeFromAndPersist(0);
- assertEquals("Persist called", true, persistLatch.await(5, TimeUnit.SECONDS));
+ verify(dataPersistenceProvider, times(2)).persist(anyObject(), any(Procedure.class));
+
mockActorRef.tell(PoisonPill.getInstance(), getRef());
config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
- CountDownLatch persistLatch = new CountDownLatch(1);
- DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
- dataPersistenceProviderMonitor.setPersistLatch(persistLatch);
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
- Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
mockRaftActor.onReceiveCommand(new ApplyLogEntries(10));
- assertEquals("Persist called", true, persistLatch.await(5, TimeUnit.SECONDS));
+ verify(dataPersistenceProvider, times(1)).persist(anyObject(), any(Procedure.class));
mockActorRef.tell(PoisonPill.getInstance(), getRef());
config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
- CountDownLatch persistLatch = new CountDownLatch(1);
- DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
- dataPersistenceProviderMonitor.setSaveSnapshotLatch(persistLatch);
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
- Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
mockRaftActor.onReceiveCommand(new CaptureSnapshotReply(snapshotBytes));
- assertEquals("Save Snapshot called", true, persistLatch.await(5, TimeUnit.SECONDS));
+ verify(dataPersistenceProvider).saveSnapshot(anyObject());
mockActorRef.tell(PoisonPill.getInstance(), getRef());
config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
- CountDownLatch deleteMessagesLatch = new CountDownLatch(1);
- CountDownLatch deleteSnapshotsLatch = new CountDownLatch(1);
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
+
+ TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
+
+ MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
+
+ mockRaftActor.getReplicatedLog().append(new MockRaftActorContext.MockReplicatedLogEntry(1,0, mock(Payload.class)));
+ mockRaftActor.getReplicatedLog().append(new MockRaftActorContext.MockReplicatedLogEntry(1,1, mock(Payload.class)));
+ mockRaftActor.getReplicatedLog().append(new MockRaftActorContext.MockReplicatedLogEntry(1,2, mock(Payload.class)));
+ mockRaftActor.getReplicatedLog().append(new MockRaftActorContext.MockReplicatedLogEntry(1,3, mock(Payload.class)));
+ mockRaftActor.getReplicatedLog().append(new MockRaftActorContext.MockReplicatedLogEntry(1,4, mock(Payload.class)));
+
+ ByteString snapshotBytes = fromObject(Arrays.asList(
+ new MockRaftActorContext.MockPayload("A"),
+ new MockRaftActorContext.MockPayload("B"),
+ new MockRaftActorContext.MockPayload("C"),
+ new MockRaftActorContext.MockPayload("D")));
+
+ mockRaftActor.onReceiveCommand(new CaptureSnapshot(-1, 1, 2, 1));
+
+ verify(mockRaftActor.delegate).createSnapshot();
+
+ mockRaftActor.onReceiveCommand(new CaptureSnapshotReply(snapshotBytes));
+
+ mockRaftActor.onReceiveCommand(new SaveSnapshotSuccess(new SnapshotMetadata("foo", 100, 100)));
+
+ verify(dataPersistenceProvider).deleteSnapshots(any(SnapshotSelectionCriteria.class));
+
+ verify(dataPersistenceProvider).deleteMessages(100);
+
+ assertNotNull("Snapshot should not be null", mockRaftActor.getReplicatedLog().getSnapshot());
+
+ assertEquals(2, mockRaftActor.getReplicatedLog().size());
+
+ assertNotNull(mockRaftActor.getReplicatedLog().get(3));
+ assertNotNull(mockRaftActor.getReplicatedLog().get(4));
+
+ // Index 2 will not be in the log because it was removed due to snapshotting
+ assertNull(mockRaftActor.getReplicatedLog().get(2));
+
+ mockActorRef.tell(PoisonPill.getInstance(), getRef());
+
+ }
+ };
+ }
+
+ @Test
+ public void testApplyState() throws Exception {
+
+ new JavaTestKit(getSystem()) {
+ {
+ String persistenceId = "testApplyState";
+
+ DefaultConfigParamsImpl config = new DefaultConfigParamsImpl();
+
+ config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
+
+ DataPersistenceProvider dataPersistenceProvider = mock(DataPersistenceProvider.class);
+
+ TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProvider), persistenceId);
+
+ MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
+
+ ReplicatedLogEntry entry = new MockRaftActorContext.MockReplicatedLogEntry(1, 5,
+ new MockRaftActorContext.MockPayload("F"));
+
+ mockRaftActor.onReceiveCommand(new ApplyState(mockActorRef, "apply-state", entry));
+
+ verify(mockRaftActor.delegate).applyState(eq(mockActorRef), eq("apply-state"), anyObject());
+
+ mockActorRef.tell(PoisonPill.getInstance(), getRef());
+
+ }
+ };
+
+
+ }
+
+ @Test
+ public void testApplySnapshot() throws Exception {
+ new JavaTestKit(getSystem()) {
+ {
+ String persistenceId = "testApplySnapshot";
+
+ DefaultConfigParamsImpl config = new DefaultConfigParamsImpl();
+
+ config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
+
+ DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
+
+ TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
+ Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
+
+ MockRaftActor mockRaftActor = mockActorRef.underlyingActor();
+
+ ReplicatedLog oldReplicatedLog = mockRaftActor.getReplicatedLog();
+
+ oldReplicatedLog.append(new MockRaftActorContext.MockReplicatedLogEntry(1,0,mock(Payload.class)));
+ oldReplicatedLog.append(new MockRaftActorContext.MockReplicatedLogEntry(1,1,mock(Payload.class)));
+ oldReplicatedLog.append(new MockRaftActorContext.MockReplicatedLogEntry(1,2,mock(Payload.class)));
+
+ ByteString snapshotBytes = fromObject(Arrays.asList(
+ new MockRaftActorContext.MockPayload("A"),
+ new MockRaftActorContext.MockPayload("B"),
+ new MockRaftActorContext.MockPayload("C"),
+ new MockRaftActorContext.MockPayload("D")));
+
+ Snapshot snapshot = mock(Snapshot.class);
+
+ doReturn(snapshotBytes.toByteArray()).when(snapshot).getState();
+
+ doReturn(3L).when(snapshot).getLastAppliedIndex();
+
+ mockRaftActor.onReceiveCommand(new ApplySnapshot(snapshot));
+
+ verify(mockRaftActor.delegate).applySnapshot(eq(snapshotBytes));
+
+ assertTrue("The replicatedLog should have changed", oldReplicatedLog != mockRaftActor.getReplicatedLog());
+
+ assertEquals("lastApplied should be same as in the snapshot", (Long) 3L, mockRaftActor.getLastApplied());
+
+ assertEquals(0, mockRaftActor.getReplicatedLog().size());
+
+ mockActorRef.tell(PoisonPill.getInstance(), getRef());
+
+ }
+ };
+ }
+
+ @Test
+ public void testSaveSnapshotFailure() throws Exception {
+ new JavaTestKit(getSystem()) {
+ {
+ String persistenceId = "testSaveSnapshotFailure";
+
+ DefaultConfigParamsImpl config = new DefaultConfigParamsImpl();
+
+ config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS));
+
DataPersistenceProviderMonitor dataPersistenceProviderMonitor = new DataPersistenceProviderMonitor();
- dataPersistenceProviderMonitor.setDeleteMessagesLatch(deleteMessagesLatch);
- dataPersistenceProviderMonitor.setDeleteSnapshotsLatch(deleteSnapshotsLatch);
TestActorRef<MockRaftActor> mockActorRef = TestActorRef.create(getSystem(), MockRaftActor.props(persistenceId,
Collections.EMPTY_MAP, Optional.<ConfigParams>of(config), dataPersistenceProviderMonitor), persistenceId);
mockRaftActor.onReceiveCommand(new CaptureSnapshotReply(snapshotBytes));
- mockRaftActor.onReceiveCommand(new SaveSnapshotSuccess(new SnapshotMetadata("foo", 100, 100)));
+ mockRaftActor.onReceiveCommand(new SaveSnapshotFailure(new SnapshotMetadata("foobar", 10L, 1234L),
+ new Exception()));
- assertEquals("Delete Messages called", true, deleteMessagesLatch.await(5, TimeUnit.SECONDS));
+ assertEquals("Snapshot index should not have advanced because save snapshot failed", -1,
+ mockRaftActor.getReplicatedLog().getSnapshotIndex());
- assertEquals("Delete Snapshots called", true, deleteSnapshotsLatch.await(5, TimeUnit.SECONDS));
+ assertNull("Snapshot should be null", mockRaftActor.getReplicatedLog().getSnapshot());
mockActorRef.tell(PoisonPill.getInstance(), getRef());
import org.opendaylight.controller.cluster.raft.FollowerLogInformationImpl;
import org.opendaylight.controller.cluster.raft.MockRaftActorContext;
import org.opendaylight.controller.cluster.raft.RaftActorContext;
+import org.opendaylight.controller.cluster.raft.RaftState;
import org.opendaylight.controller.cluster.raft.ReplicatedLogImplEntry;
import org.opendaylight.controller.cluster.raft.SerializationUtils;
+import org.opendaylight.controller.cluster.raft.base.messages.ApplyLogEntries;
import org.opendaylight.controller.cluster.raft.base.messages.ApplyState;
import org.opendaylight.controller.cluster.raft.base.messages.Replicate;
import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat;
import org.opendaylight.controller.cluster.raft.messages.AppendEntriesReply;
import org.opendaylight.controller.cluster.raft.messages.InstallSnapshot;
import org.opendaylight.controller.cluster.raft.messages.InstallSnapshotReply;
+import org.opendaylight.controller.cluster.raft.messages.RequestVoteReply;
import org.opendaylight.controller.cluster.raft.utils.DoNothingActor;
import org.opendaylight.controller.cluster.raft.utils.MessageCollectorActor;
import org.opendaylight.controller.protobuff.messages.cluster.raft.AppendEntriesMessages;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
}};
}
+ @Test
+ public void testHandleAppendEntriesReplyFailure(){
+ new JavaTestKit(getSystem()) {
+ {
+
+ ActorRef leaderActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+ ActorRef followerActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+
+ MockRaftActorContext leaderActorContext =
+ new MockRaftActorContext("leader", getSystem(), leaderActor);
+
+ Map<String, String> peerAddresses = new HashMap();
+ peerAddresses.put("follower-1",
+ followerActor.path().toString());
+
+ leaderActorContext.setPeerAddresses(peerAddresses);
+
+ Leader leader = new Leader(leaderActorContext);
+
+ AppendEntriesReply reply = new AppendEntriesReply("follower-1", 1, false, 10, 1);
+
+ RaftActorBehavior raftActorBehavior = leader.handleAppendEntriesReply(followerActor, reply);
+
+ assertEquals(RaftState.Leader, raftActorBehavior.state());
+
+ }};
+ }
+
+ @Test
+ public void testHandleAppendEntriesReplySuccess() throws Exception {
+ new JavaTestKit(getSystem()) {
+ {
+
+ ActorRef leaderActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+ ActorRef followerActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+
+ MockRaftActorContext leaderActorContext =
+ new MockRaftActorContext("leader", getSystem(), leaderActor);
+
+ leaderActorContext.setReplicatedLog(
+ new MockRaftActorContext.MockReplicatedLogBuilder().createEntries(0, 3, 1).build());
+
+ Map<String, String> peerAddresses = new HashMap();
+ peerAddresses.put("follower-1",
+ followerActor.path().toString());
+
+ leaderActorContext.setPeerAddresses(peerAddresses);
+ leaderActorContext.setCommitIndex(1);
+ leaderActorContext.setLastApplied(1);
+ leaderActorContext.getTermInformation().update(1, "leader");
+
+ Leader leader = new Leader(leaderActorContext);
+
+ AppendEntriesReply reply = new AppendEntriesReply("follower-1", 1, true, 2, 1);
+
+ RaftActorBehavior raftActorBehavior = leader.handleAppendEntriesReply(followerActor, reply);
+
+ assertEquals(RaftState.Leader, raftActorBehavior.state());
+
+ assertEquals(2, leaderActorContext.getCommitIndex());
+
+ ApplyLogEntries applyLogEntries =
+ (ApplyLogEntries) MessageCollectorActor.getFirstMatching(leaderActor,
+ ApplyLogEntries.class);
+
+ assertNotNull(applyLogEntries);
+
+ assertEquals(2, leaderActorContext.getLastApplied());
+
+ assertEquals(2, applyLogEntries.getToIndex());
+
+ List<Object> applyStateList = MessageCollectorActor.getAllMatching(leaderActor,
+ ApplyState.class);
+
+ assertEquals(1,applyStateList.size());
+
+ ApplyState applyState = (ApplyState) applyStateList.get(0);
+
+ assertEquals(2, applyState.getReplicatedLogEntry().getIndex());
+
+ }};
+ }
+
+ @Test
+ public void testHandleAppendEntriesReplyUnknownFollower(){
+ new JavaTestKit(getSystem()) {
+ {
+
+ ActorRef leaderActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+ MockRaftActorContext leaderActorContext =
+ new MockRaftActorContext("leader", getSystem(), leaderActor);
+
+ Leader leader = new Leader(leaderActorContext);
+
+ AppendEntriesReply reply = new AppendEntriesReply("follower-1", 1, false, 10, 1);
+
+ RaftActorBehavior raftActorBehavior = leader.handleAppendEntriesReply(getRef(), reply);
+
+ assertEquals(RaftState.Leader, raftActorBehavior.state());
+
+ }};
+ }
+
+ @Test
+ public void testHandleRequestVoteReply(){
+ new JavaTestKit(getSystem()) {
+ {
+
+ ActorRef leaderActor =
+ getSystem().actorOf(Props.create(MessageCollectorActor.class));
+
+ MockRaftActorContext leaderActorContext =
+ new MockRaftActorContext("leader", getSystem(), leaderActor);
+
+ Leader leader = new Leader(leaderActorContext);
+
+ RaftActorBehavior raftActorBehavior = leader.handleRequestVoteReply(getRef(), new RequestVoteReply(1, true));
+
+ assertEquals(RaftState.Leader, raftActorBehavior.state());
+
+ raftActorBehavior = leader.handleRequestVoteReply(getRef(), new RequestVoteReply(1, false));
+
+ assertEquals(RaftState.Leader, raftActorBehavior.state());
+
+
+ }};
+
+ }
+
private static class LeaderTestKit extends JavaTestKit {
private LeaderTestKit(ActorSystem actorSystem) {
import akka.actor.UntypedActor;
import akka.pattern.Patterns;
import akka.util.Timeout;
+import com.google.common.collect.Lists;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
return null;
}
+ public static List<Object> getAllMatching(ActorRef actor, Class clazz) throws Exception {
+ List<Object> allMessages = getAllMessages(actor);
+
+ List<Object> output = Lists.newArrayList();
+
+ for(Object message : allMessages){
+ if(message.getClass().equals(clazz)){
+ output.add(message);
+ }
+ }
+
+ return output;
+ }
+
}
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+import com.google.protobuf.ByteString;
import org.opendaylight.controller.cluster.datastore.node.utils.QNameFactory;
import org.opendaylight.controller.cluster.datastore.util.InstanceIdentifierUtils;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
InstanceIdentifierUtils.toSerializable((YangInstanceIdentifier) value, context));
} else if(value instanceof Set) {
Set set = (Set) value;
- if(!set.isEmpty()){
- for(Object o : set){
- if(o instanceof String){
+ if (!set.isEmpty()) {
+ for (Object o : set) {
+ if (o instanceof String) {
builder.addBitsValue(o.toString());
} else {
throw new IllegalArgumentException("Expected value type to be Bits but was : " +
- value.toString());
+ value.toString());
}
}
}
+ } else if(value instanceof byte[]){
+ builder.setBytesValue(ByteString.copyFrom((byte[]) value));
} else {
builder.setValue(value.toString());
}
node.getInstanceIdentifierValue(), context);
} else if(node.getIntValueType() == ValueType.BITS_TYPE.ordinal()){
return new HashSet(node.getBitsValueList());
+ } else if(node.getIntValueType() == ValueType.BINARY_TYPE.ordinal()){
+ return node.getBytesValue().toByteArray();
}
return deSerializeBasicTypes(node.getIntValueType(), node.getValue());
}
YANG_IDENTIFIER_TYPE,
STRING_TYPE,
BIG_INTEGER_TYPE,
- BIG_DECIMAL_TYPE;
+ BIG_DECIMAL_TYPE,
+ BINARY_TYPE;
private static Map<Class, ValueType> types = new HashMap<>();
types.put(Short.class,SHORT_TYPE);
types.put(BigInteger.class, BIG_INTEGER_TYPE);
types.put(BigDecimal.class, BIG_DECIMAL_TYPE);
+ types.put(byte[].class, BINARY_TYPE);
}
public static final ValueType getSerializableType(Object node){
public interface CanCommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // required string transactionId = 1;
+ // optional string transactionId = 1;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
boolean hasTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
java.lang.String getTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
com.google.protobuf.ByteString
getTransactionIdBytes();
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
public static final int TRANSACTIONID_FIELD_NUMBER = 1;
private java.lang.Object transactionId_;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
- if (!hasTransactionId()) {
- memoizedIsInitialized = 0;
- return false;
- }
memoizedIsInitialized = 1;
return true;
}
}
public final boolean isInitialized() {
- if (!hasTransactionId()) {
-
- return false;
- }
return true;
}
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
private java.lang.Object transactionId_ = "";
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionId(
java.lang.String value) {
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder clearTransactionId() {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionIdBytes(
com.google.protobuf.ByteString value) {
public interface AbortTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // required string transactionId = 1;
+ // optional string transactionId = 1;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
boolean hasTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
java.lang.String getTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
com.google.protobuf.ByteString
getTransactionIdBytes();
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
public static final int TRANSACTIONID_FIELD_NUMBER = 1;
private java.lang.Object transactionId_;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
- if (!hasTransactionId()) {
- memoizedIsInitialized = 0;
- return false;
- }
memoizedIsInitialized = 1;
return true;
}
}
public final boolean isInitialized() {
- if (!hasTransactionId()) {
-
- return false;
- }
return true;
}
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
private java.lang.Object transactionId_ = "";
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionId(
java.lang.String value) {
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder clearTransactionId() {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionIdBytes(
com.google.protobuf.ByteString value) {
public interface CommitTransactionOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // required string transactionId = 1;
+ // optional string transactionId = 1;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
boolean hasTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
java.lang.String getTransactionId();
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
com.google.protobuf.ByteString
getTransactionIdBytes();
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
public static final int TRANSACTIONID_FIELD_NUMBER = 1;
private java.lang.Object transactionId_;
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
- if (!hasTransactionId()) {
- memoizedIsInitialized = 0;
- return false;
- }
memoizedIsInitialized = 1;
return true;
}
}
public final boolean isInitialized() {
- if (!hasTransactionId()) {
-
- return false;
- }
return true;
}
}
private int bitField0_;
- // required string transactionId = 1;
+ // optional string transactionId = 1;
private java.lang.Object transactionId_ = "";
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public boolean hasTransactionId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public java.lang.String getTransactionId() {
java.lang.Object ref = transactionId_;
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public com.google.protobuf.ByteString
getTransactionIdBytes() {
}
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionId(
java.lang.String value) {
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder clearTransactionId() {
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
/**
- * <code>required string transactionId = 1;</code>
+ * <code>optional string transactionId = 1;</code>
*/
public Builder setTransactionIdBytes(
com.google.protobuf.ByteString value) {
java.lang.String[] descriptorData = {
"\n\014Cohort.proto\022!org.opendaylight.control" +
"ler.mdsal\"-\n\024CanCommitTransaction\022\025\n\rtra" +
- "nsactionId\030\001 \002(\t\".\n\031CanCommitTransaction" +
+ "nsactionId\030\001 \001(\t\".\n\031CanCommitTransaction" +
"Reply\022\021\n\tcanCommit\030\001 \002(\010\")\n\020AbortTransac" +
- "tion\022\025\n\rtransactionId\030\001 \002(\t\"\027\n\025AbortTran" +
+ "tion\022\025\n\rtransactionId\030\001 \001(\t\"\027\n\025AbortTran" +
"sactionReply\"*\n\021CommitTransaction\022\025\n\rtra" +
- "nsactionId\030\001 \002(\t\"\030\n\026CommitTransactionRep" +
+ "nsactionId\030\001 \001(\t\"\030\n\026CommitTransactionRep" +
"ly\"\026\n\024PreCommitTransaction\"\033\n\031PreCommitT" +
"ransactionReplyBZ\n8org.opendaylight.cont" +
"roller.protobuff.messages.cohort3pcB\036Thr",
*/
com.google.protobuf.ByteString
getCodeBytes(int index);
+
+ // optional bytes bytesValue = 13;
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ boolean hasBytesValue();
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ com.google.protobuf.ByteString getBytesValue();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.Node}
code_.add(input.readBytes());
break;
}
+ case 106: {
+ bitField0_ |= 0x00000100;
+ bytesValue_ = input.readBytes();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
return code_.getByteString(index);
}
+ // optional bytes bytesValue = 13;
+ public static final int BYTESVALUE_FIELD_NUMBER = 13;
+ private com.google.protobuf.ByteString bytesValue_;
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public boolean hasBytesValue() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public com.google.protobuf.ByteString getBytesValue() {
+ return bytesValue_;
+ }
+
private void initFields() {
path_ = "";
type_ = "";
instanceIdentifierValue_ = org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages.InstanceIdentifier.getDefaultInstance();
bitsValue_ = com.google.protobuf.LazyStringArrayList.EMPTY;
code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bytesValue_ = com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
for (int i = 0; i < code_.size(); i++) {
output.writeBytes(12, code_.getByteString(i));
}
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeBytes(13, bytesValue_);
+ }
getUnknownFields().writeTo(output);
}
size += dataSize;
size += 1 * getCodeList().size();
}
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(13, bytesValue_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
bitField0_ = (bitField0_ & ~0x00000400);
code_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000800);
+ bytesValue_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00001000);
return this;
}
bitField0_ = (bitField0_ & ~0x00000800);
}
result.code_ = code_;
+ if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+ to_bitField0_ |= 0x00000100;
+ }
+ result.bytesValue_ = bytesValue_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
onChanged();
}
+ if (other.hasBytesValue()) {
+ setBytesValue(other.getBytesValue());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
return this;
}
+ // optional bytes bytesValue = 13;
+ private com.google.protobuf.ByteString bytesValue_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public boolean hasBytesValue() {
+ return ((bitField0_ & 0x00001000) == 0x00001000);
+ }
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public com.google.protobuf.ByteString getBytesValue() {
+ return bytesValue_;
+ }
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public Builder setBytesValue(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00001000;
+ bytesValue_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes bytesValue = 13;</code>
+ */
+ public Builder clearBytesValue() {
+ bitField0_ = (bitField0_ & ~0x00001000);
+ bytesValue_ = getDefaultInstance().getBytesValue();
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.Node)
}
"ntroller.mdsal.Attribute\022\017\n\007intType\030\006 \001(" +
"\005\"f\n\022InstanceIdentifier\022B\n\targuments\030\001 \003" +
"(\0132/.org.opendaylight.controller.mdsal.P" +
- "athArgument\022\014\n\004code\030\002 \003(\t\"\245\003\n\004Node\022\014\n\004pa" +
+ "athArgument\022\014\n\004code\030\002 \003(\t\"\271\003\n\004Node\022\014\n\004pa" +
"th\030\001 \001(\t\022\014\n\004type\030\002 \001(\t\022E\n\014pathArgument\030\003" +
" \001(\0132/.org.opendaylight.controller.mdsal" +
".PathArgument\022\017\n\007intType\030\004 \001(\005\022@\n\nattrib",
"pe\030\t \001(\005\022V\n\027instanceIdentifierValue\030\n \001(" +
"\01325.org.opendaylight.controller.mdsal.In" +
"stanceIdentifier\022\021\n\tbitsValue\030\013 \003(\t\022\014\n\004c" +
- "ode\030\014 \003(\t\"`\n\tContainer\022\022\n\nparentPath\030\001 \002" +
- "(\t\022?\n\016normalizedNode\030\002 \001(\0132\'.org.openday" +
- "light.controller.mdsal.Node\"\246\001\n\014NodeMapE",
- "ntry\022U\n\026instanceIdentifierPath\030\001 \002(\01325.o" +
- "rg.opendaylight.controller.mdsal.Instanc" +
- "eIdentifier\022?\n\016normalizedNode\030\002 \001(\0132\'.or" +
- "g.opendaylight.controller.mdsal.Node\"N\n\007" +
- "NodeMap\022C\n\nmapEntries\030\001 \003(\0132/.org.openda" +
- "ylight.controller.mdsal.NodeMapEntryBO\n5" +
- "org.opendaylight.controller.protobuff.me" +
- "ssages.commonB\026NormalizedNodeMessages"
+ "ode\030\014 \003(\t\022\022\n\nbytesValue\030\r \001(\014\"`\n\tContain" +
+ "er\022\022\n\nparentPath\030\001 \002(\t\022?\n\016normalizedNode" +
+ "\030\002 \001(\0132\'.org.opendaylight.controller.mds",
+ "al.Node\"\246\001\n\014NodeMapEntry\022U\n\026instanceIden" +
+ "tifierPath\030\001 \002(\01325.org.opendaylight.cont" +
+ "roller.mdsal.InstanceIdentifier\022?\n\016norma" +
+ "lizedNode\030\002 \001(\0132\'.org.opendaylight.contr" +
+ "oller.mdsal.Node\"N\n\007NodeMap\022C\n\nmapEntrie" +
+ "s\030\001 \003(\0132/.org.opendaylight.controller.md" +
+ "sal.NodeMapEntryBO\n5org.opendaylight.con" +
+ "troller.protobuff.messages.commonB\026Norma" +
+ "lizedNodeMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
internal_static_org_opendaylight_controller_mdsal_Node_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_Node_descriptor,
- new java.lang.String[] { "Path", "Type", "PathArgument", "IntType", "Attributes", "Child", "Value", "ValueType", "IntValueType", "InstanceIdentifierValue", "BitsValue", "Code", });
+ new java.lang.String[] { "Path", "Type", "PathArgument", "IntType", "Attributes", "Child", "Value", "ValueType", "IntValueType", "InstanceIdentifierValue", "BitsValue", "Code", "BytesValue", });
internal_static_org_opendaylight_controller_mdsal_Container_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_org_opendaylight_controller_mdsal_Container_fieldAccessorTable = new
*/
com.google.protobuf.ByteString
getTransactionChainIdBytes();
+
+ // optional int32 messageVersion = 4;
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ boolean hasMessageVersion();
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ int getMessageVersion();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransaction}
transactionChainId_ = input.readBytes();
break;
}
+ case 32: {
+ bitField0_ |= 0x00000008;
+ messageVersion_ = input.readInt32();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
}
}
+ // optional int32 messageVersion = 4;
+ public static final int MESSAGEVERSION_FIELD_NUMBER = 4;
+ private int messageVersion_;
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public boolean hasMessageVersion() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public int getMessageVersion() {
+ return messageVersion_;
+ }
+
private void initFields() {
transactionId_ = "";
transactionType_ = 0;
transactionChainId_ = "";
+ messageVersion_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getTransactionChainIdBytes());
}
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeInt32(4, messageVersion_);
+ }
getUnknownFields().writeTo(output);
}
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getTransactionChainIdBytes());
}
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(4, messageVersion_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
bitField0_ = (bitField0_ & ~0x00000002);
transactionChainId_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
+ messageVersion_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
to_bitField0_ |= 0x00000004;
}
result.transactionChainId_ = transactionChainId_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.messageVersion_ = messageVersion_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
transactionChainId_ = other.transactionChainId_;
onChanged();
}
+ if (other.hasMessageVersion()) {
+ setMessageVersion(other.getMessageVersion());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
return this;
}
+ // optional int32 messageVersion = 4;
+ private int messageVersion_ ;
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public boolean hasMessageVersion() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public int getMessageVersion() {
+ return messageVersion_;
+ }
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public Builder setMessageVersion(int value) {
+ bitField0_ |= 0x00000008;
+ messageVersion_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 messageVersion = 4;</code>
+ */
+ public Builder clearMessageVersion() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ messageVersion_ = 0;
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransaction)
}
*/
com.google.protobuf.ByteString
getTransactionIdBytes();
+
+ // optional int32 messageVersion = 3;
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ boolean hasMessageVersion();
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ int getMessageVersion();
}
/**
* Protobuf type {@code org.opendaylight.controller.mdsal.CreateTransactionReply}
transactionId_ = input.readBytes();
break;
}
+ case 24: {
+ bitField0_ |= 0x00000004;
+ messageVersion_ = input.readInt32();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
}
}
+ // optional int32 messageVersion = 3;
+ public static final int MESSAGEVERSION_FIELD_NUMBER = 3;
+ private int messageVersion_;
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public boolean hasMessageVersion() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public int getMessageVersion() {
+ return messageVersion_;
+ }
+
private void initFields() {
transactionActorPath_ = "";
transactionId_ = "";
+ messageVersion_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getTransactionIdBytes());
}
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeInt32(3, messageVersion_);
+ }
getUnknownFields().writeTo(output);
}
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getTransactionIdBytes());
}
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(3, messageVersion_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
bitField0_ = (bitField0_ & ~0x00000001);
transactionId_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
+ messageVersion_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
to_bitField0_ |= 0x00000002;
}
result.transactionId_ = transactionId_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.messageVersion_ = messageVersion_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
transactionId_ = other.transactionId_;
onChanged();
}
+ if (other.hasMessageVersion()) {
+ setMessageVersion(other.getMessageVersion());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
return this;
}
+ // optional int32 messageVersion = 3;
+ private int messageVersion_ ;
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public boolean hasMessageVersion() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public int getMessageVersion() {
+ return messageVersion_;
+ }
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public Builder setMessageVersion(int value) {
+ bitField0_ |= 0x00000004;
+ messageVersion_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 messageVersion = 3;</code>
+ */
+ public Builder clearMessageVersion() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ messageVersion_ = 0;
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:org.opendaylight.controller.mdsal.CreateTransactionReply)
}
java.lang.String[] descriptorData = {
"\n\026ShardTransaction.proto\022!org.opendaylig" +
"ht.controller.mdsal\032\014Common.proto\"\022\n\020Clo" +
- "seTransaction\"\027\n\025CloseTransactionReply\"_" +
+ "seTransaction\"\027\n\025CloseTransactionReply\"w" +
"\n\021CreateTransaction\022\025\n\rtransactionId\030\001 \002" +
"(\t\022\027\n\017transactionType\030\002 \002(\005\022\032\n\022transacti" +
- "onChainId\030\003 \001(\t\"M\n\026CreateTransactionRepl" +
- "y\022\034\n\024transactionActorPath\030\001 \002(\t\022\025\n\rtrans" +
- "actionId\030\002 \002(\t\"\022\n\020ReadyTransaction\"*\n\025Re" +
- "adyTransactionReply\022\021\n\tactorPath\030\001 \002(\t\"l" +
- "\n\nDeleteData\022^\n\037instanceIdentifierPathAr",
+ "onChainId\030\003 \001(\t\022\026\n\016messageVersion\030\004 \001(\005\"" +
+ "e\n\026CreateTransactionReply\022\034\n\024transaction" +
+ "ActorPath\030\001 \002(\t\022\025\n\rtransactionId\030\002 \002(\t\022\026" +
+ "\n\016messageVersion\030\003 \001(\005\"\022\n\020ReadyTransacti" +
+ "on\"*\n\025ReadyTransactionReply\022\021\n\tactorPath",
+ "\030\001 \002(\t\"l\n\nDeleteData\022^\n\037instanceIdentifi" +
+ "erPathArguments\030\001 \002(\01325.org.opendaylight" +
+ ".controller.mdsal.InstanceIdentifier\"\021\n\017" +
+ "DeleteDataReply\"j\n\010ReadData\022^\n\037instanceI" +
+ "dentifierPathArguments\030\001 \002(\01325.org.opend" +
+ "aylight.controller.mdsal.InstanceIdentif" +
+ "ier\"P\n\rReadDataReply\022?\n\016normalizedNode\030\001" +
+ " \001(\0132\'.org.opendaylight.controller.mdsal" +
+ ".Node\"\254\001\n\tWriteData\022^\n\037instanceIdentifie" +
+ "rPathArguments\030\001 \002(\01325.org.opendaylight.",
+ "controller.mdsal.InstanceIdentifier\022?\n\016n" +
+ "ormalizedNode\030\002 \002(\0132\'.org.opendaylight.c" +
+ "ontroller.mdsal.Node\"\020\n\016WriteDataReply\"\254" +
+ "\001\n\tMergeData\022^\n\037instanceIdentifierPathAr" +
"guments\030\001 \002(\01325.org.opendaylight.control" +
- "ler.mdsal.InstanceIdentifier\"\021\n\017DeleteDa" +
- "taReply\"j\n\010ReadData\022^\n\037instanceIdentifie" +
- "rPathArguments\030\001 \002(\01325.org.opendaylight." +
- "controller.mdsal.InstanceIdentifier\"P\n\rR" +
- "eadDataReply\022?\n\016normalizedNode\030\001 \001(\0132\'.o" +
- "rg.opendaylight.controller.mdsal.Node\"\254\001" +
- "\n\tWriteData\022^\n\037instanceIdentifierPathArg" +
- "uments\030\001 \002(\01325.org.opendaylight.controll" +
- "er.mdsal.InstanceIdentifier\022?\n\016normalize",
- "dNode\030\002 \002(\0132\'.org.opendaylight.controlle" +
- "r.mdsal.Node\"\020\n\016WriteDataReply\"\254\001\n\tMerge" +
- "Data\022^\n\037instanceIdentifierPathArguments\030" +
- "\001 \002(\01325.org.opendaylight.controller.mdsa" +
- "l.InstanceIdentifier\022?\n\016normalizedNode\030\002" +
- " \002(\0132\'.org.opendaylight.controller.mdsal" +
- ".Node\"\020\n\016MergeDataReply\"l\n\nDataExists\022^\n" +
- "\037instanceIdentifierPathArguments\030\001 \002(\01325" +
- ".org.opendaylight.controller.mdsal.Insta" +
- "nceIdentifier\"!\n\017DataExistsReply\022\016\n\006exis",
- "ts\030\001 \002(\010BV\n:org.opendaylight.controller." +
- "protobuff.messages.transactionB\030ShardTra" +
- "nsactionMessages"
+ "ler.mdsal.InstanceIdentifier\022?\n\016normaliz" +
+ "edNode\030\002 \002(\0132\'.org.opendaylight.controll" +
+ "er.mdsal.Node\"\020\n\016MergeDataReply\"l\n\nDataE" +
+ "xists\022^\n\037instanceIdentifierPathArguments" +
+ "\030\001 \002(\01325.org.opendaylight.controller.mds",
+ "al.InstanceIdentifier\"!\n\017DataExistsReply" +
+ "\022\016\n\006exists\030\001 \002(\010BV\n:org.opendaylight.con" +
+ "troller.protobuff.messages.transactionB\030" +
+ "ShardTransactionMessages"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
internal_static_org_opendaylight_controller_mdsal_CreateTransaction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CreateTransaction_descriptor,
- new java.lang.String[] { "TransactionId", "TransactionType", "TransactionChainId", });
+ new java.lang.String[] { "TransactionId", "TransactionType", "TransactionChainId", "MessageVersion", });
internal_static_org_opendaylight_controller_mdsal_CreateTransactionReply_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_org_opendaylight_controller_mdsal_CreateTransactionReply_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_opendaylight_controller_mdsal_CreateTransactionReply_descriptor,
- new java.lang.String[] { "TransactionActorPath", "TransactionId", });
+ new java.lang.String[] { "TransactionActorPath", "TransactionId", "MessageVersion", });
internal_static_org_opendaylight_controller_mdsal_ReadyTransaction_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_org_opendaylight_controller_mdsal_ReadyTransaction_fieldAccessorTable = new
message CanCommitTransaction{
- required string transactionId = 1;
+ optional string transactionId = 1;
}
message CanCommitTransactionReply{
}
message AbortTransaction{
- required string transactionId = 1;
+ optional string transactionId = 1;
}
message AbortTransactionReply {
}
message CommitTransaction{
- required string transactionId = 1;
+ optional string transactionId = 1;
}
message CommitTransactionReply{
repeated string bitsValue = 11; // intValueType = Bits
repeated string code = 12; // A list of string codes which can be used for any repeated strings in the NormalizedNode
+
+ optional bytes bytesValue = 13;
}
message Container{
required string transactionId = 1;
required int32 transactionType =2;
optional string transactionChainId = 3;
+ optional int32 messageVersion = 4;
}
message CreateTransactionReply{
-required string transactionActorPath = 1;
-required string transactionId = 2;
-
+ required string transactionActorPath = 1;
+ required string transactionId = 2;
+ optional int32 messageVersion = 3;
}
message ReadyTransaction{
}
message ReadyTransactionReply{
-required string actorPath = 1;
+ required string actorPath = 1;
}
message DeleteData {
package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+import com.google.common.base.Optional;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.opendaylight.controller.cluster.datastore.util.TestModel;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
+import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
public class NormalizedNodeSerializerTest {
// created by serializing the original node and deSerializing it back.
assertEquals(expectedNode, actualNode);
+ byte[] binaryData = new byte[5];
+ for(byte i=0;i<5;i++){
+ binaryData[i] = i;
+ }
+
+ ContainerNode node1 = TestModel.createBaseTestContainerBuilder()
+ .withChild(ImmutableNodes.leafNode(TestModel.SOME_BINARY_DATE_QNAME, binaryData))
+ .build();
+
+ NormalizedNodeMessages.Node serializedNode1 = NormalizedNodeSerializer
+ .serialize(node1);
+
+ ContainerNode node2 =
+ (ContainerNode) NormalizedNodeSerializer.deSerialize(serializedNode1);
+
+
+ // FIXME: This will not work due to BUG 2326. Once that is fixed we can uncomment this assertion
+ // assertEquals(node1, node2);
+
+ Optional<DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?>> child = node2.getChild(new YangInstanceIdentifier.NodeIdentifier(TestModel.SOME_BINARY_DATE_QNAME));
+
+ Object value = child.get().getValue();
+
+ assertTrue("value should be of type byte[]", value instanceof byte[]);
+
+ byte[] bytesValue = (byte[]) value;
+
+ for(byte i=0;i<5;i++){
+ assertEquals(i, bytesValue[i]);
+ }
+
}
@Test(expected = NullPointerException.class)
--- /dev/null
+package org.opendaylight.controller.cluster.datastore.node.utils.serialization;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class ValueTypeTest {
+
+ @Test
+ public void testGetSerializableType(){
+ byte[] b = new byte[10];
+ b[0] = 1;
+ b[2] = 2;
+
+ ValueType serializableType = ValueType.getSerializableType(b);
+ assertEquals(ValueType.BINARY_TYPE, serializableType);
+ }
+}
\ No newline at end of file
public static final QName DESC_QNAME = QName.create(TEST_QNAME, "desc");
public static final QName POINTER_QNAME = QName.create(TEST_QNAME, "pointer");
+ public static final QName SOME_BINARY_DATE_QNAME = QName.create(TEST_QNAME, "some-binary-data");
public static final QName SOME_REF_QNAME = QName.create(TEST_QNAME,
"some-ref");
public static final QName MYIDENTITY_QNAME = QName.create(TEST_QNAME,
}
- public static ContainerNode createTestContainer() {
-
-
- // Create a list of shoes
- // This is to test leaf list entry
- final LeafSetEntryNode<Object> nike =
- ImmutableLeafSetEntryNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeWithValue(QName.create(
- TEST_QNAME, "shoe"), "nike")).withValue("nike").build();
-
- final LeafSetEntryNode<Object> puma =
- ImmutableLeafSetEntryNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeWithValue(QName.create(
- TEST_QNAME, "shoe"), "puma")).withValue("puma").build();
-
- final LeafSetNode<Object> shoes =
- ImmutableLeafSetNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(QName.create(
- TEST_QNAME, "shoe"))).withChild(nike).withChild(puma)
- .build();
-
-
- // Test a leaf-list where each entry contains an identity
- final LeafSetEntryNode<Object> cap1 =
- ImmutableLeafSetEntryNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeWithValue(QName.create(
- TEST_QNAME, "capability"), DESC_QNAME))
- .withValue(DESC_QNAME).build();
-
- final LeafSetNode<Object> capabilities =
- ImmutableLeafSetNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(QName.create(
- TEST_QNAME, "capability"))).withChild(cap1).build();
-
- ContainerNode switchFeatures =
- ImmutableContainerNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(SWITCH_FEATURES_QNAME))
- .withChild(capabilities).build();
-
- // Create a leaf list with numbers
- final LeafSetEntryNode<Object> five =
- ImmutableLeafSetEntryNodeBuilder
- .create()
- .withNodeIdentifier(
- (new YangInstanceIdentifier.NodeWithValue(QName.create(
- TEST_QNAME, "number"), 5))).withValue(5).build();
- final LeafSetEntryNode<Object> fifteen =
- ImmutableLeafSetEntryNodeBuilder
- .create()
- .withNodeIdentifier(
- (new YangInstanceIdentifier.NodeWithValue(QName.create(
- TEST_QNAME, "number"), 15))).withValue(15).build();
- final LeafSetNode<Object> numbers =
- ImmutableLeafSetNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(QName.create(
- TEST_QNAME, "number"))).withChild(five).withChild(fifteen)
- .build();
-
-
- // Create augmentations
- MapEntryNode mapEntry = createAugmentedListEntry(1, "First Test");
-
- // Create a bits leaf
+ public static DataContainerNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, ContainerNode> createBaseTestContainerBuilder() {
+ // Create a list of shoes
+ // This is to test leaf list entry
+ final LeafSetEntryNode<Object> nike =
+ ImmutableLeafSetEntryNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeWithValue(QName.create(
+ TEST_QNAME, "shoe"), "nike")).withValue("nike").build();
+
+ final LeafSetEntryNode<Object> puma =
+ ImmutableLeafSetEntryNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeWithValue(QName.create(
+ TEST_QNAME, "shoe"), "puma")).withValue("puma").build();
+
+ final LeafSetNode<Object> shoes =
+ ImmutableLeafSetNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(QName.create(
+ TEST_QNAME, "shoe"))).withChild(nike).withChild(puma)
+ .build();
+
+
+ // Test a leaf-list where each entry contains an identity
+ final LeafSetEntryNode<Object> cap1 =
+ ImmutableLeafSetEntryNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeWithValue(QName.create(
+ TEST_QNAME, "capability"), DESC_QNAME))
+ .withValue(DESC_QNAME).build();
+
+ final LeafSetNode<Object> capabilities =
+ ImmutableLeafSetNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(QName.create(
+ TEST_QNAME, "capability"))).withChild(cap1).build();
+
+ ContainerNode switchFeatures =
+ ImmutableContainerNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(SWITCH_FEATURES_QNAME))
+ .withChild(capabilities).build();
+
+ // Create a leaf list with numbers
+ final LeafSetEntryNode<Object> five =
+ ImmutableLeafSetEntryNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ (new YangInstanceIdentifier.NodeWithValue(QName.create(
+ TEST_QNAME, "number"), 5))).withValue(5).build();
+ final LeafSetEntryNode<Object> fifteen =
+ ImmutableLeafSetEntryNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ (new YangInstanceIdentifier.NodeWithValue(QName.create(
+ TEST_QNAME, "number"), 15))).withValue(15).build();
+ final LeafSetNode<Object> numbers =
+ ImmutableLeafSetNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(QName.create(
+ TEST_QNAME, "number"))).withChild(five).withChild(fifteen)
+ .build();
+
+
+ // Create augmentations
+ MapEntryNode mapEntry = createAugmentedListEntry(1, "First Test");
+
+ // Create a bits leaf
NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, Object, LeafNode<Object>>
- myBits = Builders.leafBuilder().withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(
- QName.create(TEST_QNAME, "my-bits"))).withValue(
- ImmutableSet.of("foo", "bar"));
+ myBits = Builders.leafBuilder().withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(
+ QName.create(TEST_QNAME, "my-bits"))).withValue(
+ ImmutableSet.of("foo", "bar"));
// Create the document
- return ImmutableContainerNodeBuilder
- .create()
- .withNodeIdentifier(
- new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME))
- .withChild(myBits.build())
- .withChild(ImmutableNodes.leafNode(DESC_QNAME, DESC))
- .withChild(ImmutableNodes.leafNode(POINTER_QNAME, "pointer"))
- .withChild(
- ImmutableNodes.leafNode(SOME_REF_QNAME, YangInstanceIdentifier
- .builder().build()))
- .withChild(ImmutableNodes.leafNode(MYIDENTITY_QNAME, DESC_QNAME))
-
- // .withChild(augmentationNode)
- .withChild(shoes)
- .withChild(numbers)
- .withChild(switchFeatures)
- .withChild(
- mapNodeBuilder(AUGMENTED_LIST_QNAME).withChild(mapEntry).build())
- .withChild(
- mapNodeBuilder(OUTER_LIST_QNAME)
- .withChild(mapEntry(OUTER_LIST_QNAME, ID_QNAME, ONE_ID))
- .withChild(BAR_NODE).build()).build();
+ return ImmutableContainerNodeBuilder
+ .create()
+ .withNodeIdentifier(
+ new YangInstanceIdentifier.NodeIdentifier(TEST_QNAME))
+ .withChild(myBits.build())
+ .withChild(ImmutableNodes.leafNode(DESC_QNAME, DESC))
+ .withChild(ImmutableNodes.leafNode(POINTER_QNAME, "pointer"))
+ .withChild(
+ ImmutableNodes.leafNode(SOME_REF_QNAME, YangInstanceIdentifier
+ .builder().build()))
+ .withChild(ImmutableNodes.leafNode(MYIDENTITY_QNAME, DESC_QNAME))
+
+ // .withChild(augmentationNode)
+ .withChild(shoes)
+ .withChild(numbers)
+ .withChild(switchFeatures)
+ .withChild(
+ mapNodeBuilder(AUGMENTED_LIST_QNAME).withChild(mapEntry).build())
+ .withChild(
+ mapNodeBuilder(OUTER_LIST_QNAME)
+ .withChild(mapEntry(OUTER_LIST_QNAME, ID_QNAME, ONE_ID))
+ .withChild(BAR_NODE).build());
+ }
+ public static ContainerNode createTestContainer() {
+ return createBaseTestContainerBuilder().build();
}
public static MapEntryNode createAugmentedListEntry(int id, String name) {
}
}
+ leaf some-binary-data {
+ type binary;
+ }
+
}
}
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-inmemory-datastore</artifactId>
- <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-clustering-commons</artifactId>
- <version>1.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
- <version>${slf4j.version}</version>
<scope>test</scope>
</dependency>
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigObject;
-import org.opendaylight.controller.cluster.datastore.shardstrategy.DefaultShardStrategy;
-import org.opendaylight.controller.cluster.datastore.shardstrategy.ModuleShardStrategy;
-import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategy;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.DefaultShardStrategy;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.ModuleShardStrategy;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class ConfigurationImpl implements Configuration {
// Look up maps to speed things up
// key = memberName, value = list of shardNames
- private Map<String, List<String>> memberShardNames = new HashMap<>();
+ private final Map<String, List<String>> memberShardNames = new HashMap<>();
// key = shardName, value = list of replicaNames (replicaNames are the same as memberNames)
- private Map<String, List<String>> shardReplicaNames = new HashMap<>();
+ private final Map<String, List<String>> shardReplicaNames = new HashMap<>();
- public ConfigurationImpl(String moduleShardsConfigPath,
+ public ConfigurationImpl(final String moduleShardsConfigPath,
- String modulesConfigPath){
+ final String modulesConfigPath){
Preconditions.checkNotNull(moduleShardsConfigPath, "moduleShardsConfigPath should not be null");
Preconditions.checkNotNull(modulesConfigPath, "modulesConfigPath should not be null");
readModules(modulesConfig);
}
- @Override public List<String> getMemberShardNames(String memberName){
+ @Override public List<String> getMemberShardNames(final String memberName){
Preconditions.checkNotNull(memberName, "memberName should not be null");
return memberShardNames.get(memberName);
}
- List<String> shards = new ArrayList();
+ List<String> shards = new ArrayList<>();
for(ModuleShard ms : moduleShards){
for(Shard s : ms.getShards()){
for(String m : s.getReplicas()){
}
- @Override public Optional<String> getModuleNameFromNameSpace(String nameSpace) {
+ @Override public Optional<String> getModuleNameFromNameSpace(final String nameSpace) {
Preconditions.checkNotNull(nameSpace, "nameSpace should not be null");
return map;
}
- @Override public List<String> getShardNamesFromModuleName(String moduleName) {
+ @Override public List<String> getShardNamesFromModuleName(final String moduleName) {
Preconditions.checkNotNull(moduleName, "moduleName should not be null");
}
}
- return Collections.EMPTY_LIST;
+ return Collections.emptyList();
}
- @Override public List<String> getMembersFromShardName(String shardName) {
+ @Override public List<String> getMembersFromShardName(final String shardName) {
Preconditions.checkNotNull(shardName, "shardName should not be null");
}
}
}
- shardReplicaNames.put(shardName, Collections.EMPTY_LIST);
- return Collections.EMPTY_LIST;
+ shardReplicaNames.put(shardName, Collections.<String>emptyList());
+ return Collections.emptyList();
}
@Override public Set<String> getAllShardNames() {
- private void readModules(Config modulesConfig) {
+ private void readModules(final Config modulesConfig) {
List<? extends ConfigObject> modulesConfigObjectList =
modulesConfig.getObjectList("modules");
}
}
- private void readModuleShards(Config moduleShardsConfig) {
+ private void readModuleShards(final Config moduleShardsConfig) {
List<? extends ConfigObject> moduleShardsConfigObjectList =
moduleShardsConfig.getObjectList("module-shards");
private final String moduleName;
private final List<Shard> shards;
- public ModuleShard(String moduleName, List<Shard> shards) {
+ public ModuleShard(final String moduleName, final List<Shard> shards) {
this.moduleName = moduleName;
this.shards = shards;
}
private final String name;
private final List<String> replicas;
- Shard(String name, List<String> replicas) {
+ Shard(final String name, final List<String> replicas) {
this.name = name;
this.replicas = replicas;
}
private final String nameSpace;
private final ShardStrategy shardStrategy;
- Module(String name, String nameSpace, String shardStrategy) {
+ Module(final String name, final String nameSpace, final String shardStrategy) {
this.name = name;
this.nameSpace = nameSpace;
if(ModuleShardStrategy.NAME.equals(shardStrategy)){
private final ConfigObject configObject;
- ConfigObjectWrapper(ConfigObject configObject){
+ ConfigObjectWrapper(final ConfigObject configObject){
this.configObject = configObject;
}
- public String stringValue(String name){
+ public String stringValue(final String name){
return configObject.get(name).unwrapped().toString();
}
}
import org.opendaylight.controller.cluster.common.actor.CommonConfig;
import org.opendaylight.controller.cluster.common.actor.MeteringBehavior;
import org.opendaylight.controller.cluster.datastore.ShardCommitCoordinator.CohortEntry;
+import org.opendaylight.controller.cluster.datastore.compat.BackwardsCompatibleThreePhaseCommitCohort;
import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardTransactionIdentifier;
}
private void handleForwardedReadyTransaction(ForwardedReadyTransaction ready) {
- LOG.debug("Readying transaction {}", ready.getTransactionID());
+ LOG.debug("Readying transaction {}, client version {}", ready.getTransactionID(),
+ ready.getTxnClientVersion());
// This message is forwarded by the ShardTransaction on ready. We cache the cohort in the
// commitCoordinator in preparation for the subsequent three phase commit initiated by
commitCoordinator.transactionReady(ready.getTransactionID(), ready.getCohort(),
ready.getModification());
- // Return our actor path as we'll handle the three phase commit.
- ReadyTransactionReply readyTransactionReply =
- new ReadyTransactionReply(Serialization.serializedActorPath(self()));
- getSender().tell(
- ready.isReturnSerialized() ? readyTransactionReply.toSerializable() : readyTransactionReply,
- getSelf());
+ // Return our actor path as we'll handle the three phase commit, except if the Tx client
+ // version < 1 (Helium-1 version). This means the Tx was initiated by a base Helium version
+ // node. In that case, the subsequent 3-phase commit messages won't contain the
+ // transactionId so to maintain backwards compatibility, we create a separate cohort actor
+ // to provide the compatible behavior.
+ ActorRef replyActorPath = self();
+ if(ready.getTxnClientVersion() < CreateTransaction.HELIUM_1_VERSION) {
+ LOG.debug("Creating BackwardsCompatibleThreePhaseCommitCohort");
+ replyActorPath = getContext().actorOf(BackwardsCompatibleThreePhaseCommitCohort.props(
+ ready.getTransactionID()));
+ }
+
+ ReadyTransactionReply readyTransactionReply = new ReadyTransactionReply(
+ Serialization.serializedActorPath(replyActorPath));
+ getSender().tell(ready.isReturnSerialized() ? readyTransactionReply.toSerializable() :
+ readyTransactionReply, getSelf());
}
private void handleAbortTransaction(AbortTransaction abort) {
}
}
- private ActorRef createTypedTransactionActor(
- int transactionType,
- ShardTransactionIdentifier transactionId,
- String transactionChainId ) {
+ private ActorRef createTypedTransactionActor(int transactionType,
+ ShardTransactionIdentifier transactionId, String transactionChainId, int clientVersion ) {
DOMStoreTransactionFactory factory = store;
return getContext().actorOf(
ShardTransaction.props(factory.newReadOnlyTransaction(), getSelf(),
schemaContext,datastoreContext, shardMBean,
- transactionId.getRemoteTransactionId()), transactionId.toString());
+ transactionId.getRemoteTransactionId(), clientVersion),
+ transactionId.toString());
} else if (transactionType == TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
return getContext().actorOf(
ShardTransaction.props(factory.newReadWriteTransaction(), getSelf(),
schemaContext, datastoreContext, shardMBean,
- transactionId.getRemoteTransactionId()), transactionId.toString());
+ transactionId.getRemoteTransactionId(), clientVersion),
+ transactionId.toString());
} else if (transactionType == TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props(factory.newWriteOnlyTransaction(), getSelf(),
schemaContext, datastoreContext, shardMBean,
- transactionId.getRemoteTransactionId()), transactionId.toString());
+ transactionId.getRemoteTransactionId(), clientVersion),
+ transactionId.toString());
} else {
throw new IllegalArgumentException(
"Shard="+name + ":CreateTransaction message has unidentified transaction type="
private void createTransaction(CreateTransaction createTransaction) {
createTransaction(createTransaction.getTransactionType(),
- createTransaction.getTransactionId(), createTransaction.getTransactionChainId());
+ createTransaction.getTransactionId(), createTransaction.getTransactionChainId(),
+ createTransaction.getVersion());
}
- private ActorRef createTransaction(int transactionType, String remoteTransactionId, String transactionChainId) {
+ private ActorRef createTransaction(int transactionType, String remoteTransactionId,
+ String transactionChainId, int clientVersion) {
ShardTransactionIdentifier transactionId =
ShardTransactionIdentifier.builder()
if(LOG.isDebugEnabled()) {
LOG.debug("Creating transaction : {} ", transactionId);
}
- ActorRef transactionActor =
- createTypedTransactionActor(transactionType, transactionId, transactionChainId);
+ ActorRef transactionActor = createTypedTransactionActor(transactionType, transactionId,
+ transactionChainId, clientVersion);
getSender()
.tell(new CreateTransactionReply(
LOG.debug("registerDataChangeListener sending reply, listenerRegistrationPath = {} ",
listenerRegistration.path());
- getSender().tell(new RegisterChangeListenerReply(listenerRegistration.path()),getSelf());
+ getSender().tell(new RegisterChangeListenerReply(listenerRegistration.path()), getSelf());
}
private ListenerRegistration<AsyncDataChangeListener<YangInstanceIdentifier,
// so that this actor does not get block building the snapshot
createSnapshotTransaction = createTransaction(
TransactionProxy.TransactionType.READ_ONLY.ordinal(),
- "createSnapshot" + ++createSnapshotTransactionCounter, "");
+ "createSnapshot" + ++createSnapshotTransactionCounter, "",
+ CreateTransaction.CURRENT_VERSION);
createSnapshotTransaction.tell(
new ReadData(YangInstanceIdentifier.builder().build()).toSerializable(), self());
private final DOMStoreReadTransaction transaction;
public ShardReadTransaction(DOMStoreReadTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
- super(shardActor, schemaContext, shardStats, transactionID);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID,
+ int txnClientVersion) {
+ super(shardActor, schemaContext, shardStats, transactionID, txnClientVersion);
this.transaction = transaction;
}
private final DOMStoreReadWriteTransaction transaction;
public ShardReadWriteTransaction(DOMStoreReadWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
- super(transaction, shardActor, schemaContext, shardStats, transactionID);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID,
+ int txnClientVersion) {
+ super(transaction, shardActor, schemaContext, shardStats, transactionID, txnClientVersion);
this.transaction = transaction;
}
*/
public abstract class ShardTransaction extends AbstractUntypedActorWithMetering {
+ protected static final boolean SERIALIZED_REPLY = true;
+
private final ActorRef shardActor;
private final SchemaContext schemaContext;
private final ShardStats shardStats;
private final String transactionID;
- protected static final boolean SERIALIZED_REPLY = true;
+ private final int txnClientVersion;
protected ShardTransaction(ActorRef shardActor, SchemaContext schemaContext,
- ShardStats shardStats, String transactionID) {
+ ShardStats shardStats, String transactionID, int txnClientVersion) {
super("shard-tx"); //actor name override used for metering. This does not change the "real" actor name
this.shardActor = shardActor;
this.schemaContext = schemaContext;
this.shardStats = shardStats;
this.transactionID = transactionID;
+ this.txnClientVersion = txnClientVersion;
}
public static Props props(DOMStoreTransaction transaction, ActorRef shardActor,
SchemaContext schemaContext,DatastoreContext datastoreContext, ShardStats shardStats,
- String transactionID) {
+ String transactionID, int txnClientVersion) {
return Props.create(new ShardTransactionCreator(transaction, shardActor, schemaContext,
- datastoreContext, shardStats, transactionID));
+ datastoreContext, shardStats, transactionID, txnClientVersion));
}
protected abstract DOMStoreTransaction getDOMStoreTransaction();
return schemaContext;
}
+ protected int getTxnClientVersion() {
+ return txnClientVersion;
+ }
+
@Override
public void handleReceive(Object message) throws Exception {
if (message.getClass().equals(CloseTransaction.SERIALIZABLE_CLASS)) {
final DatastoreContext datastoreContext;
final ShardStats shardStats;
final String transactionID;
+ final int txnClientVersion;
ShardTransactionCreator(DOMStoreTransaction transaction, ActorRef shardActor,
SchemaContext schemaContext, DatastoreContext datastoreContext,
- ShardStats shardStats, String transactionID) {
+ ShardStats shardStats, String transactionID, int txnClientVersion) {
this.transaction = transaction;
this.shardActor = shardActor;
this.shardStats = shardStats;
this.schemaContext = schemaContext;
this.datastoreContext = datastoreContext;
this.transactionID = transactionID;
+ this.txnClientVersion = txnClientVersion;
}
@Override
ShardTransaction tx;
if(transaction instanceof DOMStoreReadWriteTransaction) {
tx = new ShardReadWriteTransaction((DOMStoreReadWriteTransaction)transaction,
- shardActor, schemaContext, shardStats, transactionID);
+ shardActor, schemaContext, shardStats, transactionID, txnClientVersion);
} else if(transaction instanceof DOMStoreReadTransaction) {
tx = new ShardReadTransaction((DOMStoreReadTransaction)transaction, shardActor,
- schemaContext, shardStats, transactionID);
+ schemaContext, shardStats, transactionID, txnClientVersion);
} else {
tx = new ShardWriteTransaction((DOMStoreWriteTransaction)transaction,
- shardActor, schemaContext, shardStats, transactionID);
+ shardActor, schemaContext, shardStats, transactionID, txnClientVersion);
}
tx.getContext().setReceiveTimeout(datastoreContext.getShardTransactionIdleTimeout());
return getContext().actorOf(
ShardTransaction.props( chain.newReadOnlyTransaction(), getShardActor(),
schemaContext, datastoreContext, shardStats,
- createTransaction.getTransactionId()), transactionName);
+ createTransaction.getTransactionId(),
+ createTransaction.getVersion()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.READ_WRITE.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newReadWriteTransaction(), getShardActor(),
schemaContext, datastoreContext, shardStats,
- createTransaction.getTransactionId()), transactionName);
+ createTransaction.getTransactionId(),
+ createTransaction.getVersion()), transactionName);
} else if (createTransaction.getTransactionType() ==
TransactionProxy.TransactionType.WRITE_ONLY.ordinal()) {
return getContext().actorOf(
ShardTransaction.props( chain.newWriteOnlyTransaction(), getShardActor(),
schemaContext, datastoreContext, shardStats,
- createTransaction.getTransactionId()), transactionName);
+ createTransaction.getTransactionId(),
+ createTransaction.getVersion()), transactionName);
} else {
throw new IllegalArgumentException (
"CreateTransaction message has unidentified transaction type=" +
private final DOMStoreWriteTransaction transaction;
public ShardWriteTransaction(DOMStoreWriteTransaction transaction, ActorRef shardActor,
- SchemaContext schemaContext, ShardStats shardStats, String transactionID) {
- super(shardActor, schemaContext, shardStats, transactionID);
+ SchemaContext schemaContext, ShardStats shardStats, String transactionID,
+ int txnClientVersion) {
+ super(shardActor, schemaContext, shardStats, transactionID, txnClientVersion);
this.transaction = transaction;
}
DOMStoreThreePhaseCommitCohort cohort = transaction.ready();
- getShardActor().forward(new ForwardedReadyTransaction(transactionID, cohort, modification,
- returnSerialized), getContext());
+ getShardActor().forward(new ForwardedReadyTransaction(transactionID, getTxnClientVersion(),
+ cohort, modification, returnSerialized), getContext());
// The shard will handle the commit from here so we're no longer needed - self-destruct.
getSelf().tell(PoisonPill.getInstance(), getSelf());
package org.opendaylight.controller.cluster.datastore;
import akka.actor.ActorSelection;
-import akka.dispatch.Futures;
+import akka.dispatch.OnComplete;
+import java.util.AbstractMap.SimpleEntry;
+import java.util.List;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionChain;
import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransactionChain;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
-import scala.concurrent.Await;
import scala.concurrent.Future;
-
-import java.util.Collections;
-import java.util.List;
+import scala.concurrent.Promise;
/**
* TransactionChainProxy acts as a proxy for a DOMStoreTransactionChain created on a remote shard
public class TransactionChainProxy implements DOMStoreTransactionChain{
private final ActorContext actorContext;
private final String transactionChainId;
- private volatile List<Future<ActorSelection>> cohortFutures = Collections.emptyList();
+ private volatile SimpleEntry<Object, List<Future<ActorSelection>>> previousTxReadyFutures;
public TransactionChainProxy(ActorContext actorContext) {
this.actorContext = actorContext;
@Override
public DOMStoreReadTransaction newReadOnlyTransaction() {
- return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_ONLY, this);
+ return new ChainedTransactionProxy(actorContext, TransactionProxy.TransactionType.READ_ONLY);
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
- return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_WRITE, this);
+ return new ChainedTransactionProxy(actorContext, TransactionProxy.TransactionType.READ_WRITE);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
- return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.WRITE_ONLY, this);
+ return new ChainedTransactionProxy(actorContext, TransactionProxy.TransactionType.WRITE_ONLY);
}
@Override
return transactionChainId;
}
- public void onTransactionReady(List<Future<ActorSelection>> cohortFutures){
- this.cohortFutures = cohortFutures;
- }
+ private class ChainedTransactionProxy extends TransactionProxy {
+
+ ChainedTransactionProxy(ActorContext actorContext, TransactionType transactionType) {
+ super(actorContext, transactionType, transactionChainId);
+ }
+
+ @Override
+ protected void onTransactionReady(List<Future<ActorSelection>> cohortFutures) {
+ if(!cohortFutures.isEmpty()) {
+ previousTxReadyFutures = new SimpleEntry<>(getIdentifier(), cohortFutures);
+ } else {
+ previousTxReadyFutures = null;
+ }
+ }
+
+ /**
+ * This method is overridden to ensure the previous Tx's ready operations complete
+ * before we create the next shard Tx in the chain to avoid creation failures if the
+ * previous Tx's ready operations haven't completed yet.
+ */
+ @Override
+ protected Future<Object> sendCreateTransaction(final ActorSelection shard,
+ final Object serializedCreateMessage) {
+ // Check if there are any previous ready Futures. Also make sure the previous ready
+ // Futures aren't for this Tx as deadlock would occur if tried to wait on our own
+ // Futures. This may happen b/c the shard Tx creates are done async so it's possible
+ // for the client to ready this Tx before we've even attempted to create a shard Tx.
+ if(previousTxReadyFutures == null ||
+ previousTxReadyFutures.getKey().equals(getIdentifier())) {
+ return super.sendCreateTransaction(shard, serializedCreateMessage);
+ }
+
+ // Combine the ready Futures into 1.
+ Future<Iterable<ActorSelection>> combinedFutures = akka.dispatch.Futures.sequence(
+ previousTxReadyFutures.getValue(), actorContext.getActorSystem().dispatcher());
+
+ // Add a callback for completion of the combined Futures.
+ final Promise<Object> createTxPromise = akka.dispatch.Futures.promise();
+ OnComplete<Iterable<ActorSelection>> onComplete = new OnComplete<Iterable<ActorSelection>>() {
+ @Override
+ public void onComplete(Throwable failure, Iterable<ActorSelection> notUsed) {
+ if(failure != null) {
+ // A Ready Future failed so fail the returned Promise.
+ createTxPromise.failure(failure);
+ } else {
+ // Send the CreateTx message and use the resulting Future to complete the
+ // returned Promise.
+ createTxPromise.completeWith(actorContext.executeOperationAsync(shard,
+ serializedCreateMessage));
+ }
+ }
+ };
+
+ combinedFutures.onComplete(onComplete, actorContext.getActorSystem().dispatcher());
- public void waitTillCurrentTransactionReady(){
- try {
- Await.result(Futures
- .sequence(this.cohortFutures, actorContext.getActorSystem().dispatcher()),
- actorContext.getOperationDuration());
- } catch (Exception e) {
- throw new IllegalStateException("Failed when waiting for transaction on a chain to become ready", e);
+ return createTxPromise.future();
}
}
}
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.SettableFuture;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import javax.annotation.concurrent.GuardedBy;
import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException;
import org.opendaylight.controller.cluster.datastore.identifiers.TransactionIdentifier;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import scala.concurrent.Future;
import scala.concurrent.Promise;
import scala.concurrent.duration.FiniteDuration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-import javax.annotation.concurrent.GuardedBy;
/**
* TransactionProxy acts as a proxy for one or more transactions that were created on a remote shard
private final TransactionType transactionType;
private final ActorContext actorContext;
private final TransactionIdentifier identifier;
- private final TransactionChainProxy transactionChainProxy;
+ private final String transactionChainId;
private final SchemaContext schemaContext;
private boolean inReadyState;
public TransactionProxy(ActorContext actorContext, TransactionType transactionType) {
- this(actorContext, transactionType, null);
+ this(actorContext, transactionType, "");
}
public TransactionProxy(ActorContext actorContext, TransactionType transactionType,
- TransactionChainProxy transactionChainProxy) {
+ String transactionChainId) {
this.actorContext = Preconditions.checkNotNull(actorContext,
"actorContext should not be null");
this.transactionType = Preconditions.checkNotNull(transactionType,
"transactionType should not be null");
this.schemaContext = Preconditions.checkNotNull(actorContext.getSchemaContext(),
"schemaContext should not be null");
- this.transactionChainProxy = transactionChainProxy;
+ this.transactionChainId = transactionChainId;
String memberName = actorContext.getCurrentMemberName();
if(memberName == null){
return recordedOperationFutures;
}
+ @VisibleForTesting
+ boolean hasTransactionContext() {
+ for(TransactionFutureCallback txFutureCallback : txFutureCallbackMap.values()) {
+ TransactionContext transactionContext = txFutureCallback.getTransactionContext();
+ if(transactionContext != null) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
@Override
public CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read(
final YangInstanceIdentifier path) {
}
}
- if(transactionChainProxy != null){
- transactionChainProxy.onTransactionReady(cohortFutures);
- }
+ onTransactionReady(cohortFutures);
return new ThreePhaseCommitCohortProxy(actorContext, cohortFutures,
identifier.toString());
}
+ /**
+ * Method for derived classes to be notified when the transaction has been readied.
+ *
+ * @param cohortFutures the cohort Futures for each shard transaction.
+ */
+ protected void onTransactionReady(List<Future<ActorSelection>> cohortFutures) {
+ }
+
+ /**
+ * Method called to send a CreateTransaction message to a shard.
+ *
+ * @param shard the shard actor to send to
+ * @param serializedCreateMessage the serialized message to send
+ * @return the response Future
+ */
+ protected Future<Object> sendCreateTransaction(ActorSelection shard,
+ Object serializedCreateMessage) {
+ return actorContext.executeOperationAsync(shard, serializedCreateMessage);
+ }
+
@Override
public Object getIdentifier() {
return this.identifier;
}
public String getTransactionChainId() {
- if(transactionChainProxy == null){
- return "";
- }
- return transactionChainProxy.getTransactionChainId();
+ return transactionChainId;
}
/**
* Performs a CreateTransaction try async.
*/
private void tryCreateTransaction() {
- Future<Object> createTxFuture = actorContext.executeOperationAsync(primaryShard,
+ Future<Object> createTxFuture = sendCreateTransaction(primaryShard,
new CreateTransaction(identifier.toString(),
TransactionProxy.this.transactionType.ordinal(),
getTransactionChainId()).toSerializable());
// respect to #addTxOperationOnComplete to handle timing issues and ensure no
// TransactionOperation is missed and that they are processed in the order they occurred.
synchronized(txOperationsOnComplete) {
+ // Store the new TransactionContext locally until we've completed invoking the
+ // TransactionOperations. This avoids thread timing issues which could cause
+ // out-of-order TransactionOperations. Eg, on a modification operation, if the
+ // TransactionContext is non-null, then we directly call the TransactionContext.
+ // However, at the same time, the code may be executing the cached
+ // TransactionOperations. So to avoid thus timing, we don't publish the
+ // TransactionContext until after we've executed all cached TransactionOperations.
+ TransactionContext localTransactionContext;
if(failure != null) {
LOG.debug("Tx {} Creating NoOpTransaction because of error: {}", identifier,
failure.getMessage());
- transactionContext = new NoOpTransactionContext(failure, identifier);
+ localTransactionContext = new NoOpTransactionContext(failure, identifier);
} else if (response.getClass().equals(CreateTransactionReply.SERIALIZABLE_CLASS)) {
- createValidTransactionContext(CreateTransactionReply.fromSerializable(response));
+ localTransactionContext = createValidTransactionContext(
+ CreateTransactionReply.fromSerializable(response));
} else {
IllegalArgumentException exception = new IllegalArgumentException(String.format(
"Invalid reply type %s for CreateTransaction", response.getClass()));
- transactionContext = new NoOpTransactionContext(exception, identifier);
+ localTransactionContext = new NoOpTransactionContext(exception, identifier);
}
for(TransactionOperation oper: txOperationsOnComplete) {
- oper.invoke(transactionContext);
+ oper.invoke(localTransactionContext);
}
txOperationsOnComplete.clear();
+
+ // We're done invoking the TransactionOperations so we can now publish the
+ // TransactionContext.
+ transactionContext = localTransactionContext;
}
}
- private void createValidTransactionContext(CreateTransactionReply reply) {
+ private TransactionContext createValidTransactionContext(CreateTransactionReply reply) {
String transactionPath = reply.getTransactionPath();
LOG.debug("Tx {} Received transaction actor path {}", identifier, transactionPath);
// Check if TxActor is created in the same node
boolean isTxActorLocal = actorContext.isLocalPath(transactionPath);
- transactionContext = new TransactionContextImpl(transactionActor, identifier,
- actorContext, schemaContext, isTxActorLocal);
+ return new TransactionContextImpl(transactionPath, transactionActor, identifier,
+ actorContext, schemaContext, isTxActorLocal, reply.getVersion());
}
}
private final ActorContext actorContext;
private final SchemaContext schemaContext;
+ private final String transactionPath;
private final ActorSelection actor;
private final boolean isTxActorLocal;
+ private final int remoteTransactionVersion;
- private TransactionContextImpl(ActorSelection actor, TransactionIdentifier identifier,
+ private TransactionContextImpl(String transactionPath, ActorSelection actor, TransactionIdentifier identifier,
ActorContext actorContext, SchemaContext schemaContext,
- boolean isTxActorLocal) {
+ boolean isTxActorLocal, int remoteTransactionVersion) {
super(identifier);
+ this.transactionPath = transactionPath;
this.actor = actor;
this.actorContext = actorContext;
this.schemaContext = schemaContext;
this.isTxActorLocal = isTxActorLocal;
+ this.remoteTransactionVersion = remoteTransactionVersion;
}
private ActorSelection getActor() {
} else if(serializedReadyReply.getClass().equals(ReadyTransactionReply.SERIALIZABLE_CLASS)) {
ReadyTransactionReply reply = ReadyTransactionReply.fromSerializable(serializedReadyReply);
- return actorContext.actorSelection(reply.getCohortPath());
+ String cohortPath = reply.getCohortPath();
+
+ // In Helium we used to return the local path of the actor which represented
+ // a remote ThreePhaseCommitCohort. The local path would then be converted to
+ // a remote path using this resolvePath method. To maintain compatibility with
+ // a Helium node we need to continue to do this conversion.
+ // At some point in the future when upgrades from Helium are not supported
+ // we could remove this code to resolvePath and just use the cohortPath as the
+ // resolved cohortPath
+ if(TransactionContextImpl.this.remoteTransactionVersion < CreateTransaction.HELIUM_1_VERSION) {
+ cohortPath = actorContext.resolvePath(transactionPath, cohortPath);
+ }
+
+ return actorContext.actorSelection(cohortPath);
} else {
// Throwing an exception here will fail the Future.
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore.compat;
+
+import org.opendaylight.controller.cluster.common.actor.AbstractUntypedActor;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import akka.event.Logging;
+import akka.event.LoggingAdapter;
+import akka.japi.Creator;
+
+/**
+ * An actor to maintain backwards compatibility for the base Helium version where the 3-phase commit
+ * messages don't contain the transactionId. This actor just forwards a new message containing the
+ * transactionId to the parent Shard.
+ *
+ * @author Thomas Pantelis
+ */
+public class BackwardsCompatibleThreePhaseCommitCohort extends AbstractUntypedActor {
+
+ private final LoggingAdapter LOG = Logging.getLogger(getContext().system(), this);
+
+ private final String transactionId;
+
+ private BackwardsCompatibleThreePhaseCommitCohort(String transactionId) {
+ this.transactionId = transactionId;
+ }
+
+ @Override
+ public void handleReceive(Object message) throws Exception {
+ if(message.getClass().equals(CanCommitTransaction.SERIALIZABLE_CLASS)) {
+ LOG.debug("BackwardsCompatibleThreePhaseCommitCohort CanCommitTransaction");
+
+ getContext().parent().forward(new CanCommitTransaction(transactionId).toSerializable(),
+ getContext());
+ } else if(message.getClass().equals(PreCommitTransaction.SERIALIZABLE_CLASS)) {
+ LOG.debug("BackwardsCompatibleThreePhaseCommitCohort PreCommitTransaction");
+
+ // The Shard doesn't need the PreCommitTransaction message so just return the reply here.
+ getSender().tell(new PreCommitTransactionReply().toSerializable(), self());
+ } else if(message.getClass().equals(CommitTransaction.SERIALIZABLE_CLASS)) {
+ LOG.debug("BackwardsCompatibleThreePhaseCommitCohort CommitTransaction");
+
+ getContext().parent().forward(new CommitTransaction(transactionId).toSerializable(),
+ getContext());
+
+ // We're done now - we can self-destruct
+ self().tell(PoisonPill.getInstance(), self());
+ } else if(message.getClass().equals(AbortTransaction.SERIALIZABLE_CLASS)) {
+ LOG.debug("BackwardsCompatibleThreePhaseCommitCohort AbortTransaction");
+
+ getContext().parent().forward(new AbortTransaction(transactionId).toSerializable(),
+ getContext());
+ self().tell(PoisonPill.getInstance(), self());
+ }
+ }
+
+ public static Props props(String transactionId) {
+ return Props.create(new BackwardsCompatibleThreePhaseCommitCohortCreator(transactionId));
+ }
+
+ private static class BackwardsCompatibleThreePhaseCommitCohortCreator
+ implements Creator<BackwardsCompatibleThreePhaseCommitCohort> {
+ private static final long serialVersionUID = 1L;
+
+ private final String transactionId;
+
+ BackwardsCompatibleThreePhaseCommitCohortCreator(String transactionId) {
+ this.transactionId = transactionId;
+ }
+
+ @Override
+ public BackwardsCompatibleThreePhaseCommitCohort create() throws Exception {
+ return new BackwardsCompatibleThreePhaseCommitCohort(transactionId);
+ }
+ }
+}
import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
public class CanCommitTransactionReply implements SerializableMessage {
- public static Class<ThreePhaseCommitCohortMessages.CanCommitTransactionReply> SERIALIZABLE_CLASS =
+ public static final Class<ThreePhaseCommitCohortMessages.CanCommitTransactionReply> SERIALIZABLE_CLASS =
ThreePhaseCommitCohortMessages.CanCommitTransactionReply.class;
private final Boolean canCommit;
- public CanCommitTransactionReply(Boolean canCommit) {
+ public CanCommitTransactionReply(final Boolean canCommit) {
this.canCommit = canCommit;
}
return ThreePhaseCommitCohortMessages.CanCommitTransactionReply.newBuilder().setCanCommit(canCommit).build();
}
- public static CanCommitTransactionReply fromSerializable(Object message) {
+ public static CanCommitTransactionReply fromSerializable(final Object message) {
return new CanCommitTransactionReply(
((ThreePhaseCommitCohortMessages.CanCommitTransactionReply) message).getCanCommit());
}
import org.opendaylight.controller.protobuff.messages.registration.ListenerRegistrationMessages;
public class CloseDataChangeListenerRegistration implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.CloseDataChangeListenerRegistration.class;
+ public static final Class<ListenerRegistrationMessages.CloseDataChangeListenerRegistration> SERIALIZABLE_CLASS =
+ ListenerRegistrationMessages.CloseDataChangeListenerRegistration.class;
@Override
public Object toSerializable() {
return ListenerRegistrationMessages.CloseDataChangeListenerRegistration.newBuilder().build();
import org.opendaylight.controller.protobuff.messages.registration.ListenerRegistrationMessages;
public class CloseDataChangeListenerRegistrationReply implements SerializableMessage{
- public static Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.CloseDataChangeListenerRegistrationReply.class;
+ public static final Class<ListenerRegistrationMessages.CloseDataChangeListenerRegistrationReply> SERIALIZABLE_CLASS =
+ ListenerRegistrationMessages.CloseDataChangeListenerRegistrationReply.class;
@Override
public Object toSerializable() {
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class CloseTransaction implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.CloseTransaction.class;
+ public static final Class<ShardTransactionMessages.CloseTransaction> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.CloseTransaction.class;
@Override
public Object toSerializable() {
return ShardTransactionMessages.CloseTransaction.newBuilder().build();
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
public class CloseTransactionChain implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS =
+ public static final Class<ShardTransactionChainMessages.CloseTransactionChain> SERIALIZABLE_CLASS =
ShardTransactionChainMessages.CloseTransactionChain.class;
private final String transactionChainId;
- public CloseTransactionChain(String transactionChainId){
+ public CloseTransactionChain(final String transactionChainId){
this.transactionChainId = transactionChainId;
}
.setTransactionChainId(transactionChainId).build();
}
- public static CloseTransactionChain fromSerializable(Object message){
+ public static CloseTransactionChain fromSerializable(final Object message){
ShardTransactionChainMessages.CloseTransactionChain closeTransactionChain
= (ShardTransactionChainMessages.CloseTransactionChain) message;
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionChainMessages;
public class CloseTransactionChainReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionChainMessages.CloseTransactionChainReply.class;
+ public static final Class<ShardTransactionChainMessages.CloseTransactionChainReply> SERIALIZABLE_CLASS =
+ ShardTransactionChainMessages.CloseTransactionChainReply.class;
@Override
public Object toSerializable() {
return ShardTransactionChainMessages.CloseTransactionChainReply.newBuilder().build();
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class CloseTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.CloseTransactionReply.class;
+ public static final Class<ShardTransactionMessages.CloseTransactionReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.CloseTransactionReply.class;
@Override
public Object toSerializable() {
return ShardTransactionMessages.CloseTransactionReply.newBuilder().build();
public class CreateTransaction implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.CreateTransaction.class;
+ public static final Class<ShardTransactionMessages.CreateTransaction> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.CreateTransaction.class;
+
+ public static final int HELIUM_1_VERSION = 1;
+ public static final int CURRENT_VERSION = HELIUM_1_VERSION;
+
private final String transactionId;
private final int transactionType;
private final String transactionChainId;
+ private final int version;
public CreateTransaction(String transactionId, int transactionType) {
this(transactionId, transactionType, "");
}
public CreateTransaction(String transactionId, int transactionType, String transactionChainId) {
+ this(transactionId, transactionType, transactionChainId, CURRENT_VERSION);
+ }
+ private CreateTransaction(String transactionId, int transactionType, String transactionChainId,
+ int version) {
this.transactionId = transactionId;
this.transactionType = transactionType;
this.transactionChainId = transactionChainId;
-
+ this.version = version;
}
-
public String getTransactionId() {
return transactionId;
}
return transactionType;
}
+ public int getVersion() {
+ return version;
+ }
+
@Override
public Object toSerializable() {
return ShardTransactionMessages.CreateTransaction.newBuilder()
.setTransactionId(transactionId)
.setTransactionType(transactionType)
- .setTransactionChainId(transactionChainId).build();
+ .setTransactionChainId(transactionChainId)
+ .setMessageVersion(version).build();
}
public static CreateTransaction fromSerializable(Object message) {
ShardTransactionMessages.CreateTransaction createTransaction =
(ShardTransactionMessages.CreateTransaction) message;
return new CreateTransaction(createTransaction.getTransactionId(),
- createTransaction.getTransactionType(), createTransaction.getTransactionChainId());
+ createTransaction.getTransactionType(), createTransaction.getTransactionChainId(),
+ createTransaction.getMessageVersion());
}
public String getTransactionChainId() {
public class CreateTransactionReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.CreateTransactionReply.class;
+ public static final Class<ShardTransactionMessages.CreateTransactionReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.CreateTransactionReply.class;
private final String transactionPath;
private final String transactionId;
+ private final int version;
- public CreateTransactionReply(String transactionPath,
- String transactionId) {
+ public CreateTransactionReply(final String transactionPath,
+ final String transactionId) {
+ this(transactionPath, transactionId, CreateTransaction.CURRENT_VERSION);
+ }
+
+ public CreateTransactionReply(final String transactionPath,
+ final String transactionId, final int version) {
this.transactionPath = transactionPath;
this.transactionId = transactionId;
+ this.version = version;
}
+
public String getTransactionPath() {
return transactionPath;
}
return transactionId;
}
+ public int getVersion() {
+ return version;
+ }
+
+ @Override
public Object toSerializable(){
return ShardTransactionMessages.CreateTransactionReply.newBuilder()
.setTransactionActorPath(transactionPath)
.setTransactionId(transactionId)
+ .setMessageVersion(version)
.build();
}
- public static CreateTransactionReply fromSerializable(Object serializable){
+ public static CreateTransactionReply fromSerializable(final Object serializable){
ShardTransactionMessages.CreateTransactionReply o = (ShardTransactionMessages.CreateTransactionReply) serializable;
- return new CreateTransactionReply(o.getTransactionActorPath(), o.getTransactionId());
+ return new CreateTransactionReply(o.getTransactionActorPath(), o.getTransactionId(), o.getMessageVersion());
}
}
import org.opendaylight.controller.protobuff.messages.datachange.notification.DataChangeListenerMessages;
public class DataChangedReply implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = DataChangeListenerMessages.DataChangedReply.class;
+ public static final Class<DataChangeListenerMessages.DataChangedReply> SERIALIZABLE_CLASS =
+ DataChangeListenerMessages.DataChangedReply.class;
@Override
public Object toSerializable() {
return DataChangeListenerMessages.DataChangedReply.newBuilder().build();
public class DataExists implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DataExists.class;
+ public static final Class<ShardTransactionMessages.DataExists> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.DataExists.class;
private final YangInstanceIdentifier path;
- public DataExists(YangInstanceIdentifier path) {
+ public DataExists(final YangInstanceIdentifier path) {
this.path = path;
}
InstanceIdentifierUtils.toSerializable(path)).build();
}
- public static DataExists fromSerializable(Object serializable){
+ public static DataExists fromSerializable(final Object serializable){
ShardTransactionMessages.DataExists o = (ShardTransactionMessages.DataExists) serializable;
return new DataExists(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
}
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class DataExistsReply implements SerializableMessage{
-
-
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DataExistsReply.class;
+ public static final Class<ShardTransactionMessages.DataExistsReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.DataExistsReply.class;
private final boolean exists;
- public DataExistsReply(boolean exists) {
+ public DataExistsReply(final boolean exists) {
this.exists = exists;
}
.setExists(exists).build();
}
- public static DataExistsReply fromSerializable(Object serializable){
+ public static DataExistsReply fromSerializable(final Object serializable){
ShardTransactionMessages.DataExistsReply o = (ShardTransactionMessages.DataExistsReply) serializable;
return new DataExistsReply(o.getExists());
}
public class DeleteData implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DeleteData.class;
+ public static final Class<ShardTransactionMessages.DeleteData> SERIALIZABLE_CLASS = ShardTransactionMessages.DeleteData.class;
private final YangInstanceIdentifier path;
- public DeleteData(YangInstanceIdentifier path) {
+ public DeleteData(final YangInstanceIdentifier path) {
this.path = path;
}
.setInstanceIdentifierPathArguments(InstanceIdentifierUtils.toSerializable(path)).build();
}
- public static DeleteData fromSerializable(Object serializable){
+ public static DeleteData fromSerializable(final Object serializable){
ShardTransactionMessages.DeleteData o = (ShardTransactionMessages.DeleteData) serializable;
return new DeleteData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
}
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class DeleteDataReply implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.DeleteDataReply.class;
+ public static final Class<ShardTransactionMessages.DeleteDataReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.DeleteDataReply.class;
@Override
public Object toSerializable() {
return ShardTransactionMessages.DeleteDataReply.newBuilder().build();
private final DOMStoreThreePhaseCommitCohort cohort;
private final Modification modification;
private final boolean returnSerialized;
+ private final int txnClientVersion;
- public ForwardedReadyTransaction(String transactionID, DOMStoreThreePhaseCommitCohort cohort,
- Modification modification, boolean returnSerialized) {
+ public ForwardedReadyTransaction(String transactionID, int txnClientVersion,
+ DOMStoreThreePhaseCommitCohort cohort, Modification modification,
+ boolean returnSerialized) {
this.transactionID = transactionID;
this.cohort = cohort;
this.modification = modification;
this.returnSerialized = returnSerialized;
-
+ this.txnClientVersion = txnClientVersion;
}
public String getTransactionID() {
public boolean isReturnSerialized() {
return returnSerialized;
}
+
+ public int getTxnClientVersion() {
+ return txnClientVersion;
+ }
}
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class MergeDataReply implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.MergeDataReply.class;
+ public static final Class<ShardTransactionMessages.MergeDataReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.MergeDataReply.class;
@Override
public Object toSerializable() {
public class PreCommitTransaction implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransaction.class;
+ public static final Class<ThreePhaseCommitCohortMessages.PreCommitTransaction> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.PreCommitTransaction.class;
@Override
public Object toSerializable() {
public class PreCommitTransactionReply implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ThreePhaseCommitCohortMessages.PreCommitTransactionReply.class;
+ public static final Class<ThreePhaseCommitCohortMessages.PreCommitTransactionReply> SERIALIZABLE_CLASS =
+ ThreePhaseCommitCohortMessages.PreCommitTransactionReply.class;
@Override
public Object toSerializable() {
public class PrimaryFound implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = PrimaryFound.class;
+ public static final Class<PrimaryFound> SERIALIZABLE_CLASS = PrimaryFound.class;
private final String primaryPath;
- public PrimaryFound(String primaryPath) {
+ public PrimaryFound(final String primaryPath) {
this.primaryPath = primaryPath;
}
}
@Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
PrimaryFound that = (PrimaryFound) o;
- if (!primaryPath.equals(that.primaryPath)) return false;
+ if (!primaryPath.equals(that.primaryPath)) {
+ return false;
+ }
return true;
}
return this;
}
- public static PrimaryFound fromSerializable(Object message){
+ public static PrimaryFound fromSerializable(final Object message){
return (PrimaryFound) message;
}
}
import com.google.common.base.Preconditions;
public class PrimaryNotFound implements SerializableMessage {
- public static final Class SERIALIZABLE_CLASS = PrimaryNotFound.class;
+ public static final Class<PrimaryNotFound> SERIALIZABLE_CLASS = PrimaryNotFound.class;
private final String shardName;
- public PrimaryNotFound(String shardName){
+ public PrimaryNotFound(final String shardName){
Preconditions.checkNotNull(shardName, "shardName should not be null");
}
@Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
PrimaryNotFound that = (PrimaryNotFound) o;
- if (shardName != null ? !shardName.equals(that.shardName) : that.shardName != null) return false;
+ if (shardName != null ? !shardName.equals(that.shardName) : that.shardName != null) {
+ return false;
+ }
return true;
}
return this;
}
- public static PrimaryNotFound fromSerializable(Object message){
+ public static PrimaryNotFound fromSerializable(final Object message){
return (PrimaryNotFound) message;
}
}
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public class ReadData {
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.ReadData.class;
+ public static final Class<ShardTransactionMessages.ReadData> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.ReadData.class;
private final YangInstanceIdentifier path;
- public ReadData(YangInstanceIdentifier path) {
+ public ReadData(final YangInstanceIdentifier path) {
this.path = path;
}
.build();
}
- public static ReadData fromSerializable(Object serializable){
+ public static ReadData fromSerializable(final Object serializable){
ShardTransactionMessages.ReadData o = (ShardTransactionMessages.ReadData) serializable;
return new ReadData(InstanceIdentifierUtils.fromSerializable(o.getInstanceIdentifierPathArguments()));
}
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class ReadyTransaction implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.ReadyTransaction.class;
+ public static final Class<ShardTransactionMessages.ReadyTransaction> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.ReadyTransaction.class;
@Override
public Object toSerializable() {
private final String cohortPath;
public ReadyTransactionReply(String cohortPath) {
-
this.cohortPath = cohortPath;
}
@Override
public ShardTransactionMessages.ReadyTransactionReply toSerializable() {
- return ShardTransactionMessages.ReadyTransactionReply.newBuilder().
- setActorPath(cohortPath).build();
+ return ShardTransactionMessages.ReadyTransactionReply.newBuilder()
+ .setActorPath(cohortPath)
+ .build();
}
public static ReadyTransactionReply fromSerializable(Object serializable) {
import org.opendaylight.controller.protobuff.messages.registration.ListenerRegistrationMessages;
public class RegisterChangeListenerReply implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ListenerRegistrationMessages.RegisterChangeListenerReply.class;
+ public static final Class<ListenerRegistrationMessages.RegisterChangeListenerReply> SERIALIZABLE_CLASS =
+ ListenerRegistrationMessages.RegisterChangeListenerReply.class;
private final ActorPath listenerRegistrationPath;
- public RegisterChangeListenerReply(ActorPath listenerRegistrationPath) {
+ public RegisterChangeListenerReply(final ActorPath listenerRegistrationPath) {
this.listenerRegistrationPath = listenerRegistrationPath;
}
.setListenerRegistrationPath(listenerRegistrationPath.toString()).build();
}
- public static RegisterChangeListenerReply fromSerializable(ActorSystem actorSystem,Object serializable){
+ public static RegisterChangeListenerReply fromSerializable(final ActorSystem actorSystem,final Object serializable){
ListenerRegistrationMessages.RegisterChangeListenerReply o = (ListenerRegistrationMessages.RegisterChangeListenerReply) serializable;
return new RegisterChangeListenerReply(
actorSystem.actorFor(o.getListenerRegistrationPath()).path()
import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
public class WriteDataReply implements SerializableMessage{
- public static final Class SERIALIZABLE_CLASS = ShardTransactionMessages.WriteDataReply.class;
+ public static final Class<ShardTransactionMessages.WriteDataReply> SERIALIZABLE_CLASS =
+ ShardTransactionMessages.WriteDataReply.class;
@Override
public Object toSerializable() {
return ShardTransactionMessages.WriteDataReply.newBuilder().build();
* MergeModification stores all the parameters required to merge data into the specified path
*/
public class MergeModification extends WriteModification {
+ private static final long serialVersionUID = 1L;
- public MergeModification(YangInstanceIdentifier path, NormalizedNode data,
- SchemaContext schemaContext) {
+ public MergeModification(final YangInstanceIdentifier path, final NormalizedNode<?, ?> data,
+ final SchemaContext schemaContext) {
super(path, data, schemaContext);
}
@Override
- public void apply(DOMStoreWriteTransaction transaction) {
+ public void apply(final DOMStoreWriteTransaction transaction) {
transaction.merge(path, data);
}
- public static MergeModification fromSerializable(Object serializable, SchemaContext schemaContext) {
+ public static MergeModification fromSerializable(final Object serializable, final SchemaContext schemaContext) {
PersistentMessages.Modification o = (PersistentMessages.Modification) serializable;
Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(o.getPath(), o.getData());
return new MergeModification(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
* WriteModification stores all the parameters required to write data to the specified path
*/
public class WriteModification extends AbstractModification {
-
- protected final NormalizedNode data;
+ private static final long serialVersionUID = 1L;
+ protected final NormalizedNode<?, ?> data;
private final SchemaContext schemaContext;
- public WriteModification(YangInstanceIdentifier path, NormalizedNode data, SchemaContext schemaContext) {
+ public WriteModification(final YangInstanceIdentifier path, final NormalizedNode<?, ?> data, final SchemaContext schemaContext) {
super(path);
this.data = data;
this.schemaContext = schemaContext;
}
@Override
- public void apply(DOMStoreWriteTransaction transaction) {
+ public void apply(final DOMStoreWriteTransaction transaction) {
transaction.write(path, data);
}
- public NormalizedNode getData() {
+ public NormalizedNode<?, ?> getData() {
return data;
}
.build();
}
- public static WriteModification fromSerializable(Object serializable, SchemaContext schemaContext) {
+ public static WriteModification fromSerializable(final Object serializable, final SchemaContext schemaContext) {
PersistentMessages.Modification o = (PersistentMessages.Modification) serializable;
Decoded decoded = new NormalizedNodeToNodeCodec(schemaContext).decode(o.getPath(), o.getData());
return new WriteModification(decoded.getDecodedPath(), decoded.getDecodedNode(), schemaContext);
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
-import org.opendaylight.controller.cluster.datastore.Configuration;
-import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
+import org.opendaylight.controller.cluster.datastore.Configuration;
+import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public class ShardStrategyFactory {
private static Map<String, ShardStrategy> moduleNameToStrategyMap =
- new ConcurrentHashMap();
+ new ConcurrentHashMap<>();
private static final String UNKNOWN_MODULE_NAME = "unknown";
private static Configuration configuration;
- public static void setConfiguration(Configuration configuration){
+ public static void setConfiguration(final Configuration configuration){
ShardStrategyFactory.configuration = configuration;
moduleNameToStrategyMap = configuration.getModuleNameToShardStrategyMap();
}
- public static ShardStrategy getStrategy(YangInstanceIdentifier path) {
+ public static ShardStrategy getStrategy(final YangInstanceIdentifier path) {
Preconditions.checkState(configuration != null, "configuration should not be missing");
Preconditions.checkNotNull(path, "path should not be null");
}
- private static String getModuleName(YangInstanceIdentifier path) {
+ private static String getModuleName(final YangInstanceIdentifier path) {
String namespace = path.getPathArguments().iterator().next().getNodeType().getNamespace().toASCIIString();
Optional<String> optional =
return hostPort1.equals(hostPort2);
}
+
+ /**
+ * @deprecated This method is present only to support backward compatibility with Helium and should not be
+ * used any further
+ *
+ *
+ * @param primaryPath
+ * @param localPathOfRemoteActor
+ * @return
+ */
+ @Deprecated
+ public String resolvePath(final String primaryPath,
+ final String localPathOfRemoteActor) {
+ StringBuilder builder = new StringBuilder();
+ String[] primaryPathElements = primaryPath.split("/");
+ builder.append(primaryPathElements[0]).append("//")
+ .append(primaryPathElements[1]).append(primaryPathElements[2]);
+ String[] remotePathElements = localPathOfRemoteActor.split("/");
+ for (int i = 3; i < remotePathElements.length; i++) {
+ builder.append("/").append(remotePathElements[i]);
+ }
+
+ return builder.toString();
+ }
}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import com.typesafe.config.ConfigFactory;
-import junit.framework.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
import java.io.File;
import java.util.List;
import java.util.Set;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
public class ConfigurationImplTest {
import akka.actor.ActorRef;
import akka.actor.Props;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
public class DataChangeListenerProxyTest extends AbstractActorTest {
private static class MockDataChangedEvent implements AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> createdData = new HashMap();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> updatedData = new HashMap();
- Map<YangInstanceIdentifier,NormalizedNode<?,?>> originalData = new HashMap();
+ Map<YangInstanceIdentifier,NormalizedNode<?,?>> createdData = new HashMap<>();
+ Map<YangInstanceIdentifier,NormalizedNode<?,?>> updatedData = new HashMap<>();
+ Map<YangInstanceIdentifier,NormalizedNode<?,?>> originalData = new HashMap<>();
Assert.assertTrue(messages instanceof List);
- List<Object> listMessages = (List<Object>) messages;
+ List<?> listMessages = (List<?>) messages;
Assert.assertEquals(1, listMessages.size());
import akka.actor.DeadLetter;
import akka.actor.Props;
import akka.testkit.JavaTestKit;
+import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.opendaylight.controller.cluster.datastore.messages.DataChanged;
final Props props = DataChangeListener.props(mockListener);
final ActorRef subject = getSystem().actorOf(props, "testDataChangedWithNoSender");
- // Let the DataChangeListener know that notifications should be enabled
- subject.tell(new EnableNotification(true), ActorRef.noSender());
+ getSystem().eventStream().subscribe(getRef(), DeadLetter.class);
subject.tell(new DataChanged(CompositeModel.createTestContext(), mockChangeEvent),
ActorRef.noSender());
- getSystem().eventStream().subscribe(getRef(), DeadLetter.class);
- new Within(duration("1 seconds")) {
- @Override
- protected void run() {
- expectNoMsg();
+ // Make sure no DataChangedReply is sent to DeadLetters.
+ while(true) {
+ DeadLetter deadLetter;
+ try {
+ deadLetter = expectMsgClass(duration("1 seconds"), DeadLetter.class);
+ } catch (AssertionError e) {
+ // Timed out - got no DeadLetter - this is good
+ break;
}
- };
+
+ // We may get DeadLetters for other messages we don't care about.
+ Assert.assertFalse("Unexpected DataChangedReply",
+ deadLetter.message() instanceof DataChangedReply);
+ }
}};
}
}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.PoisonPill;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.Uninterruptibles;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException;
import org.opendaylight.controller.cluster.datastore.exceptions.NotInitializedException;
import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
+import org.opendaylight.yangtools.yang.data.api.schema.MapNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
public class DistributedDataStoreIntegrationTest extends AbstractActorTest {
DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
- Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
- assertEquals("canCommit", true, canCommit);
- cohort.preCommit().get(5, TimeUnit.SECONDS);
- cohort.commit().get(5, TimeUnit.SECONDS);
+ doCommit(cohort);
// Verify the data in the store
// 5. Commit the Tx
- Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
- assertEquals("canCommit", true, canCommit);
- cohort.preCommit().get(5, TimeUnit.SECONDS);
- cohort.commit().get(5, TimeUnit.SECONDS);
+ doCommit(cohort);
// 6. Verify the data in the store
// Wait for the Tx commit to complete.
- assertEquals("canCommit", true, txCohort.get().canCommit().get(5, TimeUnit.SECONDS));
- txCohort.get().preCommit().get(5, TimeUnit.SECONDS);
- txCohort.get().commit().get(5, TimeUnit.SECONDS);
+ doCommit(txCohort.get());
// Verify the data in the store
@Test
public void testTransactionChain() throws Exception{
- System.setProperty("shard.persistent", "true");
new IntegrationTestKit(getSystem()) {{
- DistributedDataStore dataStore =
- setupDistributedDataStore("transactionChainIntegrationTest", "test-1");
+ DistributedDataStore dataStore = setupDistributedDataStore("testTransactionChain", "test-1");
// 1. Create a Tx chain and write-only Tx
// 2. Write some data
- NormalizedNode<?, ?> containerNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
- writeTx.write(TestModel.TEST_PATH, containerNode);
+ NormalizedNode<?, ?> testNode = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+ writeTx.write(TestModel.TEST_PATH, testNode);
// 3. Ready the Tx for commit
- DOMStoreThreePhaseCommitCohort cohort = writeTx.ready();
+ final DOMStoreThreePhaseCommitCohort cohort1 = writeTx.ready();
- // 4. Commit the Tx
+ // 4. Commit the Tx on another thread that first waits for the second read Tx.
- Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
- assertEquals("canCommit", true, canCommit);
- cohort.preCommit().get(5, TimeUnit.SECONDS);
- cohort.commit().get(5, TimeUnit.SECONDS);
+ final CountDownLatch continueCommit1 = new CountDownLatch(1);
+ final CountDownLatch commit1Done = new CountDownLatch(1);
+ final AtomicReference<Exception> commit1Error = new AtomicReference<>();
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ continueCommit1.await();
+ doCommit(cohort1);
+ } catch (Exception e) {
+ commit1Error.set(e);
+ } finally {
+ commit1Done.countDown();
+ }
+ }
+ }.start();
- // 5. Verify the data in the store
+ // 5. Create a new read Tx from the chain to read and verify the data from the first
+ // Tx is visible after being readied.
DOMStoreReadTransaction readTx = txChain.newReadOnlyTransaction();
-
Optional<NormalizedNode<?, ?>> optional = readTx.read(TestModel.TEST_PATH).get(5, TimeUnit.SECONDS);
assertEquals("isPresent", true, optional.isPresent());
- assertEquals("Data node", containerNode, optional.get());
+ assertEquals("Data node", testNode, optional.get());
+
+ // 6. Create a new RW Tx from the chain, write more data, and ready it
+
+ DOMStoreReadWriteTransaction rwTx = txChain.newReadWriteTransaction();
+ MapNode outerNode = ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build();
+ rwTx.write(TestModel.OUTER_LIST_PATH, outerNode);
+
+ DOMStoreThreePhaseCommitCohort cohort2 = rwTx.ready();
+
+ // 7. Create a new read Tx from the chain to read the data from the last RW Tx to
+ // verify it is visible.
+
+ readTx = txChain.newReadOnlyTransaction();
+ optional = readTx.read(TestModel.OUTER_LIST_PATH).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", outerNode, optional.get());
+
+ // 8. Wait for the 2 commits to complete and close the chain.
+
+ continueCommit1.countDown();
+ Uninterruptibles.awaitUninterruptibly(commit1Done, 5, TimeUnit.SECONDS);
+
+ if(commit1Error.get() != null) {
+ throw commit1Error.get();
+ }
+
+ doCommit(cohort2);
txChain.close();
+ // 9. Create a new read Tx from the data store and verify committed data.
+
+ readTx = dataStore.newReadOnlyTransaction();
+ optional = readTx.read(TestModel.OUTER_LIST_PATH).get(5, TimeUnit.SECONDS);
+ assertEquals("isPresent", true, optional.isPresent());
+ assertEquals("Data node", outerNode, optional.get());
+
cleanup(dataStore);
}};
}
DistributedDataStore dataStore =
setupDistributedDataStore("testChangeListenerRegistration", "test-1");
- MockDataChangeListener listener = new MockDataChangeListener(3);
+ testWriteTransaction(dataStore, TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+
+ MockDataChangeListener listener = new MockDataChangeListener(1);
ListenerRegistration<MockDataChangeListener>
listenerReg = dataStore.registerChangeListener(TestModel.TEST_PATH, listener,
assertNotNull("registerChangeListener returned null", listenerReg);
- testWriteTransaction(dataStore, TestModel.TEST_PATH,
- ImmutableNodes.containerNode(TestModel.TEST_QNAME));
+ // Wait for the initial notification
+
+ listener.waitForChangeEvents(TestModel.TEST_PATH);
+
+ listener.reset(2);
+
+ // Write 2 updates.
testWriteTransaction(dataStore, TestModel.OUTER_LIST_PATH,
ImmutableNodes.mapNodeBuilder(TestModel.OUTER_LIST_QNAME).build());
testWriteTransaction(dataStore, listPath,
ImmutableNodes.mapEntry(TestModel.OUTER_LIST_QNAME, TestModel.ID_QNAME, 1));
- listener.waitForChangeEvents(TestModel.TEST_PATH, TestModel.OUTER_LIST_PATH, listPath );
+ // Wait for the 2 updates.
+
+ listener.waitForChangeEvents(TestModel.OUTER_LIST_PATH, listPath);
listenerReg.close();
// 4. Commit the Tx
- Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
- assertEquals("canCommit", true, canCommit);
- cohort.preCommit().get(5, TimeUnit.SECONDS);
- cohort.commit().get(5, TimeUnit.SECONDS);
+ doCommit(cohort);
// 5. Verify the data in the store
assertEquals("Data node", nodeToWrite, optional.get());
}
+ void doCommit(final DOMStoreThreePhaseCommitCohort cohort) throws Exception {
+ Boolean canCommit = cohort.canCommit().get(5, TimeUnit.SECONDS);
+ assertEquals("canCommit", true, canCommit);
+ cohort.preCommit().get(5, TimeUnit.SECONDS);
+ cohort.commit().get(5, TimeUnit.SECONDS);
+ }
+
void cleanup(DistributedDataStore dataStore) {
dataStore.getActorContext().getShardManager().tell(PoisonPill.getInstance(), null);
}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.mock;
+import static org.opendaylight.controller.cluster.datastore.messages.CreateTransaction.CURRENT_VERSION;
import akka.actor.ActorRef;
import akka.actor.PoisonPill;
import akka.actor.Props;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Uninterruptibles;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicReference;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.inOrder;
-import static org.mockito.Mockito.mock;
-
public class ShardTest extends AbstractActorTest {
}};
}
+ @SuppressWarnings("serial")
@Test
public void testPeerAddressResolved() throws Exception {
new ShardTestKit(getSystem()) {{
shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
}
- private CompositeModificationPayload newPayload(Modification... mods) {
+ private CompositeModificationPayload newPayload(final Modification... mods) {
MutableCompositeModification compMod = new MutableCompositeModification();
for(Modification mod: mods) {
compMod.addModification(mod);
return new CompositeModificationPayload(compMod.toSerializable());
}
- private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
- InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
- MutableCompositeModification modification) {
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(final String cohortName,
+ final InMemoryDOMDataStore dataStore, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data,
+ final MutableCompositeModification modification) {
return setupMockWriteTransaction(cohortName, dataStore, path, data, modification, null);
}
- private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(String cohortName,
- InMemoryDOMDataStore dataStore, YangInstanceIdentifier path, NormalizedNode data,
- MutableCompositeModification modification,
+ private DOMStoreThreePhaseCommitCohort setupMockWriteTransaction(final String cohortName,
+ final InMemoryDOMDataStore dataStore, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data,
+ final MutableCompositeModification modification,
final Function<DOMStoreThreePhaseCommitCohort,ListenableFuture<Void>> preCommit) {
DOMStoreWriteTransaction tx = dataStore.newWriteOnlyTransaction();
doAnswer(new Answer<ListenableFuture<Boolean>>() {
@Override
- public ListenableFuture<Boolean> answer(InvocationOnMock invocation) {
+ public ListenableFuture<Boolean> answer(final InvocationOnMock invocation) {
return realCohort.canCommit();
}
}).when(cohort).canCommit();
doAnswer(new Answer<ListenableFuture<Void>>() {
@Override
- public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
if(preCommit != null) {
return preCommit.apply(realCohort);
} else {
doAnswer(new Answer<ListenableFuture<Void>>() {
@Override
- public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
return realCohort.commit();
}
}).when(cohort).commit();
doAnswer(new Answer<ListenableFuture<Void>>() {
@Override
- public ListenableFuture<Void> answer(InvocationOnMock invocation) throws Throwable {
+ public ListenableFuture<Void> answer(final InvocationOnMock invocation) throws Throwable {
return realCohort.abort();
}
}).when(cohort).abort();
// Simulate the ForwardedReadyTransaction message for the first Tx that would be sent
// by the ShardTransaction.
- shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
+ cohort1, modification1, true), getRef());
ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
assertEquals("Cohort path", shard.path().toString(), readyReply.getCohortPath());
// Send the ForwardedReadyTransaction for the next 2 Tx's.
- shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
+ cohort2, modification2, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID3, CURRENT_VERSION,
+ cohort3, modification3, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// Send the CanCommitTransaction message for the next 2 Tx's. These should get queued and
class OnFutureComplete extends OnComplete<Object> {
private final Class<?> expRespType;
- OnFutureComplete(Class<?> expRespType) {
+ OnFutureComplete(final Class<?> expRespType) {
this.expRespType = expRespType;
}
@Override
- public void onComplete(Throwable error, Object resp) {
+ public void onComplete(final Throwable error, final Object resp) {
if(error != null) {
caughtEx.set(new AssertionError(getClass().getSimpleName() + " failure", error));
} else {
}
}
- void onSuccess(Object resp) throws Exception {
+ void onSuccess(final Object resp) throws Exception {
}
}
}
@Override
- public void onComplete(Throwable error, Object resp) {
+ public void onComplete(final Throwable error, final Object resp) {
super.onComplete(error, resp);
commitLatch.countDown();
}
class OnCanCommitFutureComplete extends OnFutureComplete {
private final String transactionID;
- OnCanCommitFutureComplete(String transactionID) {
+ OnCanCommitFutureComplete(final String transactionID) {
super(CanCommitTransactionReply.SERIALIZABLE_CLASS);
this.transactionID = transactionID;
}
@Override
- void onSuccess(Object resp) throws Exception {
+ void onSuccess(final Object resp) throws Exception {
CanCommitTransactionReply canCommitReply =
CanCommitTransactionReply.fromSerializable(resp);
assertEquals("Can commit", true, canCommitReply.getCanCommit());
// Simulate the ForwardedReadyTransaction messages that would be sent
// by the ShardTransaction.
- shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
+ cohort1, modification1, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
+ cohort2, modification2, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// Send the CanCommitTransaction message for the first Tx.
final CountDownLatch latch = new CountDownLatch(1);
canCommitFuture.onComplete(new OnComplete<Object>() {
@Override
- public void onComplete(Throwable t, Object resp) {
+ public void onComplete(final Throwable t, final Object resp) {
latch.countDown();
}
}, getSystem().dispatcher());
// Simulate the ForwardedReadyTransaction messages that would be sent
// by the ShardTransaction.
- shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// Send the CanCommitTransaction message.
// Simulate the ForwardedReadyTransaction messages that would be sent
// by the ShardTransaction.
- shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// Send the CanCommitTransaction message.
new AbortTransaction(transactionID).toSerializable(), timeout);
abortFuture.onComplete(new OnComplete<Object>() {
@Override
- public void onComplete(Throwable e, Object resp) {
+ public void onComplete(final Throwable e, final Object resp) {
abortComplete.countDown();
}
}, getSystem().dispatcher());
TestModel.TEST_PATH, ImmutableNodes.containerNode(TestModel.TEST_QNAME),
modification, preCommit);
- shard.tell(new ForwardedReadyTransaction(transactionID, cohort, modification, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID, CURRENT_VERSION,
+ cohort, modification, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
shard.tell(new CanCommitTransaction(transactionID).toSerializable(), getRef());
// Ready the Tx's
- shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
+ cohort1, modification1, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
+ cohort2, modification2, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// canCommit 1st Tx. We don't send the commit so it should timeout.
// Ready the Tx's
- shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
+ cohort1, modification1, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
+ cohort2, modification2, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID3, cohort3, modification3, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID3, CURRENT_VERSION,
+ cohort3, modification3, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// canCommit 1st Tx.
// Simulate the ForwardedReadyTransaction messages that would be sent
// by the ShardTransaction.
- shard.tell(new ForwardedReadyTransaction(transactionID1, cohort1, modification1, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID1, CURRENT_VERSION,
+ cohort1, modification1, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
- shard.tell(new ForwardedReadyTransaction(transactionID2, cohort2, modification2, true), getRef());
+ shard.tell(new ForwardedReadyTransaction(transactionID2, CURRENT_VERSION,
+ cohort2, modification2, true), getRef());
expectMsgClass(duration, ReadyTransactionReply.SERIALIZABLE_CLASS);
// Send the CanCommitTransaction message for the first Tx.
final CountDownLatch latch = new CountDownLatch(1);
canCommitFuture.onComplete(new OnComplete<Object>() {
@Override
- public void onComplete(Throwable t, Object resp) {
+ public void onComplete(final Throwable t, final Object resp) {
latch.countDown();
}
}, getSystem().dispatcher());
testCreateSnapshot(false, "testCreateSnapshotWithNonPersistentData");
}
- public void testCreateSnapshot(boolean persistent, final String shardActorName) throws IOException, InterruptedException {
+ @SuppressWarnings("serial")
+ public void testCreateSnapshot(final boolean persistent, final String shardActorName) throws IOException, InterruptedException {
final DatastoreContext dataStoreContext = DatastoreContext.newBuilder().
shardJournalRecoveryLogBatchSize(3).shardSnapshotBatchCount(5000).persistent(persistent).build();
return new Shard(shardID, Collections.<ShardIdentifier,String>emptyMap(),
dataStoreContext, SCHEMA_CONTEXT) {
@Override
- protected void commitSnapshot(long sequenceNumber) {
+ protected void commitSnapshot(final long sequenceNumber) {
super.commitSnapshot(sequenceNumber);
latch.get().countDown();
}
commitTransaction(putTransaction);
- NormalizedNode expected = readStore(store);
+ NormalizedNode<?, ?> expected = readStore(store);
DOMStoreWriteTransaction writeTransaction = store.newWriteOnlyTransaction();
commitTransaction(writeTransaction);
- NormalizedNode actual = readStore(store);
+ NormalizedNode<?, ?> actual = readStore(store);
assertEquals(expected, actual);
}
- private NormalizedNode readStore(InMemoryDOMDataStore store) throws ReadFailedException {
+ private NormalizedNode<?, ?> readStore(final InMemoryDOMDataStore store) throws ReadFailedException {
DOMStoreReadTransaction transaction = store.newReadOnlyTransaction();
CheckedFuture<Optional<NormalizedNode<?, ?>>, ReadFailedException> read =
transaction.read(YangInstanceIdentifier.builder().build());
return normalizedNode;
}
- private void commitTransaction(DOMStoreWriteTransaction transaction) {
+ private void commitTransaction(final DOMStoreWriteTransaction transaction) {
DOMStoreThreePhaseCommitCohort commitCohort = transaction.ready();
ListenableFuture<Void> future =
commitCohort.preCommit();
return new AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>>() {
@Override
public void onDataChanged(
- AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
+ final AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>> change) {
}
};
}
- private NormalizedNode<?,?> readStore(TestActorRef<Shard> shard, YangInstanceIdentifier id)
+ static NormalizedNode<?,?> readStore(final TestActorRef<Shard> shard, final YangInstanceIdentifier id)
throws ExecutionException, InterruptedException {
DOMStoreReadTransaction transaction = shard.underlyingActor().getDataStore().newReadOnlyTransaction();
return node;
}
- private void writeToStore(TestActorRef<Shard> shard, YangInstanceIdentifier id, NormalizedNode<?,?> node)
+ private void writeToStore(final TestActorRef<Shard> shard, final YangInstanceIdentifier id, final NormalizedNode<?,?> node)
throws ExecutionException, InterruptedException {
DOMStoreWriteTransaction transaction = shard.underlyingActor().getDataStore().newWriteOnlyTransaction();
commitCohort.commit().get();
}
+ @SuppressWarnings("serial")
private static final class DelegatingShardCreator implements Creator<Shard> {
private final Creator<Shard> delegate;
- DelegatingShardCreator(Creator<Shard> delegate) {
+ DelegatingShardCreator(final Creator<Shard> delegate) {
this.delegate = delegate;
}
import akka.testkit.TestActorRef;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
+import java.util.Collections;
+import java.util.concurrent.TimeUnit;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
+import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.node.utils.serialization.NormalizedNodeSerializer;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
-import java.util.Collections;
-import java.util.concurrent.TimeUnit;
/**
* Covers negative test cases
}
private ActorRef createShard(){
- return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.EMPTY_MAP, datastoreContext,
+ return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER, Collections.<ShardIdentifier, String>emptyMap(), datastoreContext,
TestModel.createTestContext()));
}
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props, "testNegativeMergeTransactionReady");
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> subject = TestActorRef
.create(getSystem(), props,
--- /dev/null
+/*
+ * Copyright (c) 2014 Brocade Communications Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.cluster.datastore;
+
+import java.util.Collections;
+import org.junit.Assert;
+import org.junit.Test;
+import org.opendaylight.controller.cluster.datastore.identifiers.ShardIdentifier;
+import org.opendaylight.controller.cluster.datastore.messages.AbortTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CanCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.PreCommitTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransaction;
+import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.WriteData;
+import org.opendaylight.controller.cluster.datastore.messages.WriteDataReply;
+import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
+import org.opendaylight.controller.protobuff.messages.cohort3pc.ThreePhaseCommitCohortMessages;
+import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages;
+import org.opendaylight.controller.protobuff.messages.transaction.ShardTransactionMessages.CreateTransactionReply;
+import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
+import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+import scala.concurrent.duration.FiniteDuration;
+import akka.actor.ActorRef;
+import akka.actor.ActorSelection;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import akka.dispatch.Dispatchers;
+import akka.testkit.TestActorRef;
+
+/**
+ * Tests backwards compatibility support from Helium-1 to Helium.
+ *
+ * In Helium-1, the 3-phase commit support was moved from the ThreePhaseCommitCohort actor to the
+ * Shard. As a consequence, a new transactionId field was added to the CanCommitTransaction,
+ * CommitTransaction and AbortTransaction messages. With a base Helium version node, these messages
+ * would be sans transactionId so this test verifies the Shard handles that properly.
+ *
+ * @author Thomas Pantelis
+ */
+public class ShardTransactionHeliumBackwardsCompatibilityTest extends AbstractActorTest {
+
+ @Test
+ public void testTransactionCommit() throws Exception {
+ new ShardTestKit(getSystem()) {{
+ SchemaContext schemaContext = TestModel.createTestContext();
+ Props shardProps = Shard.props(ShardIdentifier.builder().memberName("member-1").
+ shardName("inventory").type("config").build(),
+ Collections.<ShardIdentifier,String>emptyMap(),
+ DatastoreContext.newBuilder().shardHeartbeatIntervalInMillis(100).build(),
+ schemaContext).withDispatcher(Dispatchers.DefaultDispatcherId());
+
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(), shardProps,
+ "testTransactionCommit");
+
+ waitUntilLeader(shard);
+
+ // Send CreateTransaction message with no messages version
+
+ String transactionID = "txn-1";
+ shard.tell(ShardTransactionMessages.CreateTransaction.newBuilder()
+ .setTransactionId(transactionID)
+ .setTransactionType(TransactionProxy.TransactionType.WRITE_ONLY.ordinal())
+ .setTransactionChainId("").build(), getRef());
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ CreateTransactionReply reply = expectMsgClass(duration, CreateTransactionReply.class);
+
+ ActorSelection txActor = getSystem().actorSelection(reply.getTransactionActorPath());
+
+ // Write data to the Tx
+
+ txActor.tell(new WriteData(TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME), schemaContext), getRef());
+
+ expectMsgClass(duration, WriteDataReply.class);
+
+ // Ready the Tx
+
+ txActor.tell(new ReadyTransaction().toSerializable(), getRef());
+
+ ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(expectMsgClass(
+ duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
+
+ ActorSelection cohortActor = getSystem().actorSelection(readyReply.getCohortPath());
+
+ // Send the CanCommitTransaction message with no transactionId.
+
+ cohortActor.tell(ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().build(),
+ getRef());
+
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the PreCommitTransaction message with no transactionId.
+
+ cohortActor.tell(ThreePhaseCommitCohortMessages.PreCommitTransaction.newBuilder().build(),
+ getRef());
+
+ expectMsgClass(duration, PreCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the CommitTransaction message with no transactionId.
+
+ cohortActor.tell(ThreePhaseCommitCohortMessages.CommitTransaction.newBuilder().build(),
+ getRef());
+
+ expectMsgClass(duration, CommitTransactionReply.SERIALIZABLE_CLASS);
+
+ NormalizedNode<?, ?> node = ShardTest.readStore(shard, TestModel.TEST_PATH);
+ Assert.assertNotNull("Data not found in store", node);
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+
+ @Test
+ public void testTransactionAbort() throws Exception {
+ new ShardTestKit(getSystem()) {{
+ SchemaContext schemaContext = TestModel.createTestContext();
+ Props shardProps = Shard.props(ShardIdentifier.builder().memberName("member-1").
+ shardName("inventory").type("config").build(),
+ Collections.<ShardIdentifier,String>emptyMap(),
+ DatastoreContext.newBuilder().shardHeartbeatIntervalInMillis(100).build(),
+ schemaContext).withDispatcher(Dispatchers.DefaultDispatcherId());
+
+ final TestActorRef<Shard> shard = TestActorRef.create(getSystem(), shardProps,
+ "testTransactionAbort");
+
+ waitUntilLeader(shard);
+
+ // Send CreateTransaction message with no messages version
+
+ String transactionID = "txn-1";
+ shard.tell(ShardTransactionMessages.CreateTransaction.newBuilder()
+ .setTransactionId(transactionID)
+ .setTransactionType(TransactionProxy.TransactionType.WRITE_ONLY.ordinal())
+ .setTransactionChainId("").build(), getRef());
+
+ final FiniteDuration duration = duration("5 seconds");
+
+ CreateTransactionReply reply = expectMsgClass(duration, CreateTransactionReply.class);
+
+ ActorSelection txActor = getSystem().actorSelection(reply.getTransactionActorPath());
+
+ // Write data to the Tx
+
+ txActor.tell(new WriteData(TestModel.TEST_PATH,
+ ImmutableNodes.containerNode(TestModel.TEST_QNAME), schemaContext), getRef());
+
+ expectMsgClass(duration, WriteDataReply.class);
+
+ // Ready the Tx
+
+ txActor.tell(new ReadyTransaction().toSerializable(), getRef());
+
+ ReadyTransactionReply readyReply = ReadyTransactionReply.fromSerializable(expectMsgClass(
+ duration, ReadyTransactionReply.SERIALIZABLE_CLASS));
+
+ ActorSelection cohortActor = getSystem().actorSelection(readyReply.getCohortPath());
+
+ // Send the CanCommitTransaction message with no transactionId.
+
+ cohortActor.tell(ThreePhaseCommitCohortMessages.CanCommitTransaction.newBuilder().build(),
+ getRef());
+
+ expectMsgClass(duration, CanCommitTransactionReply.SERIALIZABLE_CLASS);
+
+ // Send the AbortTransaction message with no transactionId.
+
+ cohortActor.tell(ThreePhaseCommitCohortMessages.AbortTransaction.newBuilder().build(),
+ getRef());
+
+ expectMsgClass(duration, AbortTransactionReply.SERIALIZABLE_CLASS);
+
+ shard.tell(PoisonPill.getInstance(), ActorRef.noSender());
+ }};
+ }
+}
package org.opendaylight.controller.cluster.datastore;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import akka.actor.ActorRef;
import akka.actor.Props;
import akka.actor.Terminated;
import akka.testkit.TestActorRef;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
+import java.util.Collections;
+import java.util.concurrent.TimeUnit;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.ShardWriteTransaction.GetCompositeModificationReply;
import org.opendaylight.controller.cluster.datastore.jmx.mbeans.shard.ShardStats;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransaction;
import org.opendaylight.controller.cluster.datastore.messages.CloseTransactionReply;
+import org.opendaylight.controller.cluster.datastore.messages.CreateTransaction;
import org.opendaylight.controller.cluster.datastore.messages.DataExists;
import org.opendaylight.controller.cluster.datastore.messages.DataExistsReply;
import org.opendaylight.controller.cluster.datastore.messages.DeleteData;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import scala.concurrent.duration.Duration;
-import java.util.Collections;
-import java.util.concurrent.TimeUnit;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
public class ShardTransactionTest extends AbstractActorTest {
private static ListeningExecutorService storeExecutor =
private ActorRef createShard(){
return getSystem().actorOf(Shard.props(SHARD_IDENTIFIER,
- Collections.EMPTY_MAP, datastoreContext, TestModel.createTestContext()));
+ Collections.<ShardIdentifier, String>emptyMap(), datastoreContext, TestModel.createTestContext()));
}
@Test
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveReadData(getSystem().actorOf(props, "testReadDataRO"));
props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveReadData(getSystem().actorOf(props, "testReadDataRW"));
}
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
Props props = ShardTransaction.props( store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveReadDataWhenDataNotFound(getSystem().actorOf(
props, "testReadDataWhenDataNotFoundRO"));
props = ShardTransaction.props( store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveReadDataWhenDataNotFound(getSystem().actorOf(
props, "testReadDataWhenDataNotFoundRW"));
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveDataExistsPositive(getSystem().actorOf(props, "testDataExistsPositiveRO"));
props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveDataExistsPositive(getSystem().actorOf(props, "testDataExistsPositiveRW"));
}
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveDataExistsNegative(getSystem().actorOf(props, "testDataExistsNegativeRO"));
props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
testOnReceiveDataExistsNegative(getSystem().actorOf(props, "testDataExistsNegativeRW"));
}
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testWriteData");
transaction.tell(new WriteData(TestModel.TEST_PATH,
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testMergeData");
transaction.tell(new MergeData(TestModel.TEST_PATH,
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newWriteOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testDeleteData");
transaction.tell(new DeleteData(TestModel.TEST_PATH).toSerializable(), getRef());
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testReadyTransaction");
watch(transaction);
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props( store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testReadyTransaction2");
watch(transaction);
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction = getSystem().actorOf(props, "testCloseTransaction");
watch(transaction);
public void testNegativePerformingWriteOperationOnReadTransaction() throws Exception {
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadOnlyTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final TestActorRef<ShardTransaction> transaction = TestActorRef.apply(props,getSystem());
transaction.receive(new DeleteData(TestModel.TEST_PATH).toSerializable(), ActorRef.noSender());
new JavaTestKit(getSystem()) {{
final ActorRef shard = createShard();
final Props props = ShardTransaction.props(store.newReadWriteTransaction(), shard,
- testSchemaContext, datastoreContext, shardStats, "txn");
+ testSchemaContext, datastoreContext, shardStats, "txn",
+ CreateTransaction.CURRENT_VERSION);
final ActorRef transaction =
getSystem().actorOf(props, "testShardTransactionInactivity");
return getSystem().actorSelection(actorRef.path());
}
- private CreateTransactionReply createTransactionReply(ActorRef actorRef){
+ private CreateTransactionReply createTransactionReply(ActorRef actorRef, int transactionVersion){
return CreateTransactionReply.newBuilder()
.setTransactionActorPath(actorRef.path().toString())
- .setTransactionId("txn-1").build();
+ .setTransactionId("txn-1")
+ .setMessageVersion(transactionVersion)
+ .build();
}
- private ActorRef setupActorContextWithInitialCreateTransaction(ActorSystem actorSystem, TransactionType type) {
+ private ActorRef setupActorContextWithInitialCreateTransaction(ActorSystem actorSystem, TransactionType type, int transactionVersion) {
ActorRef actorRef = actorSystem.actorOf(Props.create(DoNothingActor.class));
doReturn(actorSystem.actorSelection(actorRef.path())).
when(mockActorContext).actorSelection(actorRef.path().toString());
doReturn(Futures.successful(actorSystem.actorSelection(actorRef.path()))).
when(mockActorContext).findPrimaryShardAsync(eq(DefaultShardStrategy.DEFAULT_SHARD));
- doReturn(Futures.successful(createTransactionReply(actorRef))).when(mockActorContext).
+ doReturn(Futures.successful(createTransactionReply(actorRef, transactionVersion))).when(mockActorContext).
executeOperationAsync(eq(actorSystem.actorSelection(actorRef.path())),
eqCreateTransaction(memberName, type));
return actorRef;
}
+ private ActorRef setupActorContextWithInitialCreateTransaction(ActorSystem actorSystem, TransactionType type) {
+ return setupActorContextWithInitialCreateTransaction(actorSystem, type, CreateTransaction.CURRENT_VERSION);
+ }
+
+
private void propagateReadFailedExceptionCause(CheckedFuture<?, ReadFailedException> future)
throws Throwable {
verifyCohortFutures(proxy, getSystem().actorSelection(actorRef.path()));
}
+ @SuppressWarnings("unchecked")
+ @Test
+ public void testReadyForwardCompatibility() throws Exception {
+ ActorRef actorRef = setupActorContextWithInitialCreateTransaction(getSystem(), READ_WRITE, 0);
+
+ NormalizedNode<?, ?> nodeToWrite = ImmutableNodes.containerNode(TestModel.TEST_QNAME);
+
+ doReturn(readSerializedDataReply(null)).when(mockActorContext).executeOperationAsync(
+ eq(actorSelection(actorRef)), eqSerializedReadData());
+
+ doReturn(writeSerializedDataReply()).when(mockActorContext).executeOperationAsync(
+ eq(actorSelection(actorRef)), eqSerializedWriteData(nodeToWrite));
+
+ doReturn(readySerializedTxReply(actorRef.path().toString())).when(mockActorContext).executeOperationAsync(
+ eq(actorSelection(actorRef)), isA(ReadyTransaction.SERIALIZABLE_CLASS));
+
+ doReturn(actorRef.path().toString()).when(mockActorContext).resolvePath(eq(actorRef.path().toString()),
+ eq(actorRef.path().toString()));
+
+ TransactionProxy transactionProxy = new TransactionProxy(mockActorContext,
+ READ_WRITE);
+
+ transactionProxy.read(TestModel.TEST_PATH);
+
+ transactionProxy.write(TestModel.TEST_PATH, nodeToWrite);
+
+ DOMStoreThreePhaseCommitCohort ready = transactionProxy.ready();
+
+ assertTrue(ready instanceof ThreePhaseCommitCohortProxy);
+
+ ThreePhaseCommitCohortProxy proxy = (ThreePhaseCommitCohortProxy) ready;
+
+ verifyRecordingOperationFutures(transactionProxy.getRecordedOperationFutures(),
+ WriteDataReply.SERIALIZABLE_CLASS);
+
+ verifyCohortFutures(proxy, getSystem().actorSelection(actorRef.path()));
+
+ verify(mockActorContext).resolvePath(eq(actorRef.path().toString()),
+ eq(actorRef.path().toString()));
+ }
+
@SuppressWarnings("unchecked")
@Test
public void testReadyWithRecordingOperationFailure() throws Exception {
package org.opendaylight.controller.cluster.datastore.shardstrategy;
-import junit.framework.Assert;
+import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
package org.opendaylight.controller.cluster.datastore.shardstrategy;
+import static org.junit.Assert.assertEquals;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import static junit.framework.Assert.assertEquals;
-
public class ModuleShardStrategyTest {
@Rule
public ExpectedException expectedEx = ExpectedException.none();
package org.opendaylight.controller.cluster.datastore.shardstrategy;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
-import static junit.framework.Assert.assertNotNull;
-import static junit.framework.Assert.assertTrue;
-
public class ShardStrategyFactoryTest {
@Rule
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
+import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.japi.Creator;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.Duration;
+
import java.util.concurrent.TimeUnit;
+
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
clusterWrapper.setSelfAddress("akka.tcp://system@127.0.0.1:2551/");
assertEquals(false, actorContext.isLocalPath("akka.tcp://system@127.0.0.1:2550/"));
}
+
+ @Test
+ public void testResolvePathForRemoteActor() {
+ ActorContext actorContext =
+ new ActorContext(mock(ActorSystem.class), mock(ActorRef.class), mock(
+ ClusterWrapper.class),
+ mock(Configuration.class));
+
+ String actual = actorContext.resolvePath(
+ "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard",
+ "akka://system/user/shardmanager/shard/transaction");
+
+ String expected = "akka.tcp://system@127.0.0.1:2550/user/shardmanager/shard/transaction";
+
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testResolvePathForLocalActor() {
+ ActorContext actorContext =
+ new ActorContext(getSystem(), mock(ActorRef.class), mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ String actual = actorContext.resolvePath(
+ "akka://system/user/shardmanager/shard",
+ "akka://system/user/shardmanager/shard/transaction");
+
+ String expected = "akka://system/user/shardmanager/shard/transaction";
+
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testResolvePathForRemoteActorWithProperRemoteAddress() {
+ ActorContext actorContext =
+ new ActorContext(getSystem(), mock(ActorRef.class), mock(ClusterWrapper.class),
+ mock(Configuration.class));
+
+ String actual = actorContext.resolvePath(
+ "akka.tcp://system@7.0.0.1:2550/user/shardmanager/shard",
+ "akka.tcp://system@7.0.0.1:2550/user/shardmanager/shard/transaction");
+
+ String expected = "akka.tcp://system@7.0.0.1:2550/user/shardmanager/shard/transaction";
+
+ assertEquals(expected, actual);
+ }
+
}
package org.opendaylight.controller.cluster.datastore.utils;
import com.google.common.base.Optional;
-import org.opendaylight.controller.cluster.datastore.Configuration;
-import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategy;
-
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.opendaylight.controller.cluster.datastore.Configuration;
+import org.opendaylight.controller.cluster.datastore.shardstrategy.ShardStrategy;
public class MockConfiguration implements Configuration{
- @Override public List<String> getMemberShardNames(String memberName) {
+ @Override public List<String> getMemberShardNames(final String memberName) {
return Arrays.asList("default");
}
@Override public Optional<String> getModuleNameFromNameSpace(
- String nameSpace) {
+ final String nameSpace) {
return Optional.absent();
}
@Override
public Map<String, ShardStrategy> getModuleNameToShardStrategyMap() {
- return Collections.EMPTY_MAP;
+ return Collections.emptyMap();
}
@Override public List<String> getShardNamesFromModuleName(
- String moduleName) {
- return Collections.EMPTY_LIST;
+ final String moduleName) {
+ return Collections.emptyList();
}
- @Override public List<String> getMembersFromShardName(String shardName) {
+ @Override public List<String> getMembersFromShardName(final String shardName) {
if("default".equals(shardName)) {
return Arrays.asList("member-1", "member-2");
} else if("astronauts".equals(shardName)){
return Arrays.asList("member-2", "member-3");
}
- return Collections.EMPTY_LIST;
+ return Collections.emptyList();
}
@Override public Set<String> getAllShardNames() {
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.Uninterruptibles;
+import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.opendaylight.controller.md.sal.common.api.data.AsyncDataChangeListener;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.Uninterruptibles;
/**
* A mock DataChangeListener implementation.
public class MockDataChangeListener implements
AsyncDataChangeListener<YangInstanceIdentifier, NormalizedNode<?, ?>> {
- private final List<AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>>>
- changeList = Lists.newArrayList();
- private final CountDownLatch changeLatch;
- private final int expChangeEventCount;
+ private final List<AsyncDataChangeEvent<YangInstanceIdentifier, NormalizedNode<?, ?>>> changeList =
+ Collections.synchronizedList(Lists.<AsyncDataChangeEvent<YangInstanceIdentifier,
+ NormalizedNode<?, ?>>>newArrayList());
+
+ private volatile CountDownLatch changeLatch;
+ private int expChangeEventCount;
public MockDataChangeListener(int expChangeEventCount) {
+ reset(expChangeEventCount);
+ }
+
+ public void reset(int expChangeEventCount) {
changeLatch = new CountDownLatch(expChangeEventCount);
this.expChangeEventCount = expChangeEventCount;
+ changeList.clear();
}
@Override
}
public void waitForChangeEvents(YangInstanceIdentifier... expPaths) {
- assertEquals("Change notifications complete", true,
- Uninterruptibles.awaitUninterruptibly(changeLatch, 5, TimeUnit.SECONDS));
+ boolean done = Uninterruptibles.awaitUninterruptibly(changeLatch, 5, TimeUnit.SECONDS);
+ if(!done) {
+ fail(String.format("Missing change notifications. Expected: %d. Actual: %d",
+ expChangeEventCount, (expChangeEventCount - changeLatch.getCount())));
+ }
+
+ assertEquals("Change notifications complete", true, done);
for(int i = 0; i < expPaths.length; i++) {
assertTrue(String.format("Change %d does not contain %s", (i+1), expPaths[i]),
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
-import junit.framework.Assert;
-
import java.util.List;
+import org.junit.Assert;
public class TestUtils {
- public static void assertFirstSentMessage(ActorSystem actorSystem, ActorRef actorRef, Class clazz){
+ public static void assertFirstSentMessage(final ActorSystem actorSystem, final ActorRef actorRef, final Class<?> clazz){
ActorContext testContext = new ActorContext(actorSystem, actorSystem.actorOf(
Props.create(DoNothingActor.class)), new MockClusterWrapper(), new MockConfiguration());
Object messages = testContext
Assert.assertTrue(messages instanceof List);
- List<Object> listMessages = (List<Object>) messages;
+ List<?> listMessages = (List<?>) messages;
Assert.assertEquals(1, listMessages.size());
public static final QName CAR_PRICE_QNAME = QName.create(CAR_QNAME, "price");
- public static NormalizedNode create(){
+ public static NormalizedNode<?, ?> create(){
// Create a list builder
CollectionNodeBuilder<MapEntryNode, MapNode> cars =
}
- public static NormalizedNode emptyContainer(){
+ public static NormalizedNode<?, ?> emptyContainer(){
return ImmutableContainerNodeBuilder.create()
.withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(BASE_QNAME))
.build();
- public static NormalizedNode create(){
+ public static NormalizedNode<?, ?> create(){
// Create a list builder
CollectionNodeBuilder<MapEntryNode, MapNode> cars =
}
- public static NormalizedNode emptyContainer(){
+ public static NormalizedNode<?, ?> emptyContainer(){
return ImmutableContainerNodeBuilder.create()
.withNodeIdentifier(
new YangInstanceIdentifier.NodeIdentifier(BASE_QNAME))
package org.opendaylight.controller.md.cluster.datastore.model;
-import junit.framework.Assert;
+import org.junit.Assert;
import org.junit.Test;
import org.opendaylight.controller.cluster.datastore.node.NormalizedNodeToNodeCodec;
import org.opendaylight.controller.protobuff.messages.common.NormalizedNodeMessages;
<version>1.2.0-SNAPSHOT</version>\r
</parent>\r
<artifactId>sal-dom-xsql-config</artifactId>\r
- <groupId>org.opendaylight.controller</groupId>\r
<description>Configuration files for md-sal</description>\r
<packaging>jar</packaging>\r
<properties>\r
import org.opendaylight.yangtools.yang.data.impl.util.CompositeNodeBuilder;
import org.opendaylight.yangtools.yang.model.api.DataNodeContainer;
import org.opendaylight.yangtools.yang.model.api.DataSchemaNode;
+import org.opendaylight.yangtools.yang.model.api.RpcDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
Preconditions.checkNotNull(rpcName);
Preconditions.checkNotNull(schemaContext);
- final NodeContainerProxy rpcBodyProxy = new NodeContainerProxy(rpcName, schemaContext.getChildNodes());
- return new NodeContainerProxy(NETCONF_RPC_QNAME, Sets.<DataSchemaNode>newHashSet(rpcBodyProxy));
+ for (final RpcDefinition rpcDefinition : schemaContext.getOperations()) {
+ if(rpcDefinition.getQName().equals(rpcName)) {
+ final NodeContainerProxy rpcBodyProxy = new NodeContainerProxy(rpcName, rpcDefinition.getInput().getChildNodes());
+ return new NodeContainerProxy(NETCONF_RPC_QNAME, Sets.<DataSchemaNode>newHashSet(rpcBodyProxy));
+ }
+ }
+ throw new IllegalArgumentException("Rpc " + rpcName + " not found in schema context " + schemaContext + ". Unable to invoke Rpc");
}
public static CompositeNodeTOImpl wrap(final QName name, final Node<?> node) {
assertEquals(streamName.getLocalName(), "stream-name");
}
- @Test
- public void testNoSchemaContextToRpcRequest() throws Exception {
- final String exampleNamespace = "http://example.net/me/my-own/1.0";
- final String exampleRevision = "2014-07-22";
- final QName myOwnMethodRpcQName = QName.create(exampleNamespace, exampleRevision, "my-own-method");
-
- final CompositeNodeBuilder<ImmutableCompositeNode> rootBuilder = ImmutableCompositeNode.builder();
- rootBuilder.setQName(myOwnMethodRpcQName);
-
- final CompositeNodeBuilder<ImmutableCompositeNode> inputBuilder = ImmutableCompositeNode.builder();
- inputBuilder.setQName(QName.create(exampleNamespace, exampleRevision, "input"));
- inputBuilder.addLeaf(QName.create(exampleNamespace, exampleRevision, "my-first-parameter"), "14");
- inputBuilder.addLeaf(QName.create(exampleNamespace, exampleRevision, "another-parameter"), "fred");
-
- rootBuilder.add(inputBuilder.toInstance());
- final ImmutableCompositeNode root = rootBuilder.toInstance();
-
- final NetconfMessage message = messageTransformer.toRpcRequest(myOwnMethodRpcQName, root);
- assertNotNull(message);
-
- final Document xmlDoc = message.getDocument();
- final org.w3c.dom.Node rpcChild = xmlDoc.getFirstChild();
- assertEquals(rpcChild.getLocalName(), "rpc");
-
- final org.w3c.dom.Node myOwnMethodNode = rpcChild.getFirstChild();
- assertEquals(myOwnMethodNode.getLocalName(), "my-own-method");
-
- final org.w3c.dom.Node firstParamNode = myOwnMethodNode.getFirstChild();
- assertEquals(firstParamNode.getLocalName(), "my-first-parameter");
-
- final org.w3c.dom.Node secParamNode = firstParamNode.getNextSibling();
- assertEquals(secParamNode.getLocalName(), "another-parameter");
- }
}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.opendaylight.controller.samples</groupId>
- <artifactId>sal-samples</artifactId>
- <version>1.2.0-SNAPSHOT</version>
- <relativePath>../..</relativePath>
- </parent>
- <groupId>org.opendaylight.controller.samples.l2switch.md</groupId>
- <artifactId>l2switch-impl</artifactId>
- <packaging>bundle</packaging>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-binding-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller.model</groupId>
- <artifactId>model-flow-service</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.opendaylight.controller.model</groupId>
- <artifactId>model-inventory</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller.model</groupId>
- <artifactId>model-topology</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller.samples.l2switch.md</groupId>
- <artifactId>l2switch-model</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.controller.thirdparty</groupId>
- <artifactId>net.sf.jung2</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-common</artifactId>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <extensions>true</extensions>
- <configuration>
-
- <instructions>
- <Bundle-Activator>org.opendaylight.controller.sample.l2switch.md.L2SwitchProvider</Bundle-Activator>
- </instructions>
- <manifestLocation>${project.build.directory}/META-INF</manifestLocation>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md;
-
-import org.opendaylight.controller.sample.l2switch.md.addresstracker.AddressTracker;
-import org.opendaylight.controller.sample.l2switch.md.flow.FlowWriterService;
-import org.opendaylight.controller.sample.l2switch.md.flow.FlowWriterServiceImpl;
-import org.opendaylight.controller.sample.l2switch.md.inventory.InventoryService;
-import org.opendaylight.controller.sample.l2switch.md.packet.PacketHandler;
-import org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphDijkstra;
-import org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService;
-import org.opendaylight.controller.sample.l2switch.md.topology.TopologyLinkDataChangeHandler;
-import org.opendaylight.controller.sal.binding.api.AbstractBindingAwareConsumer;
-import org.opendaylight.controller.sal.binding.api.BindingAwareBroker;
-import org.opendaylight.controller.sal.binding.api.NotificationService;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.PacketProcessingService;
-import org.opendaylight.yangtools.concepts.ListenerRegistration;
-import org.opendaylight.yangtools.yang.binding.NotificationListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * L2SwitchProvider serves as the Activator for our L2Switch OSGI bundle.
- */
-public class L2SwitchProvider extends AbstractBindingAwareConsumer
- implements AutoCloseable {
-
- private final static Logger _logger = LoggerFactory.getLogger(L2SwitchProvider.class);
-
- private ListenerRegistration<NotificationListener> listenerRegistration;
- private AddressTracker addressTracker;
- private TopologyLinkDataChangeHandler topologyLinkDataChangeHandler;
-
-
- /**
- * Setup the L2Switch.
- * @param consumerContext The context of the L2Switch.
- */
- @Override
- public void onSessionInitialized(BindingAwareBroker.ConsumerContext consumerContext) {
- DataBrokerService dataService = consumerContext.<DataBrokerService>getSALService(DataBrokerService.class);
- addressTracker = new AddressTracker(dataService);
-
- NetworkGraphService networkGraphService = new NetworkGraphDijkstra();
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataService, networkGraphService);
-
- NotificationService notificationService =
- consumerContext.<NotificationService>getSALService(NotificationService.class);
- PacketProcessingService packetProcessingService =
- consumerContext.<PacketProcessingService>getRpcService(PacketProcessingService.class);
- PacketHandler packetHandler = new PacketHandler();
- packetHandler.setAddressTracker(addressTracker);
- packetHandler.setFlowWriterService(flowWriterService);
- packetHandler.setPacketProcessingService(packetProcessingService);
- packetHandler.setInventoryService(new InventoryService(dataService));
-
- this.listenerRegistration = notificationService.registerNotificationListener(packetHandler);
- this.topologyLinkDataChangeHandler = new TopologyLinkDataChangeHandler(dataService, networkGraphService);
- topologyLinkDataChangeHandler.registerAsDataChangeListener();
- }
-
- /**
- * Cleanup the L2Switch.
- * @throws Exception occurs when the NotificationListener is closed
- */
- @Override
- public void close() throws Exception {
- if (listenerRegistration != null)
- listenerRegistration.close();
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.addresstracker;
-
-import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev100924.MacAddress;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.address.tracker.rev140402.L2Addresses;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.address.tracker.rev140402.l2.addresses.L2Address;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.address.tracker.rev140402.l2.addresses.L2AddressBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.address.tracker.rev140402.l2.addresses.L2AddressKey;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.common.RpcResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.concurrent.Future;
-
-/**
- * AddressTracker manages the MD-SAL data tree for L2Address (mac, node connector pairings) information.
- */
-public class AddressTracker {
-
- private final static Logger _logger = LoggerFactory.getLogger(AddressTracker.class);
- private DataBrokerService dataService;
-
- /**
- * Construct an AddressTracker with the specified inputs
- * @param dataService The DataBrokerService for the AddressTracker
- */
- public AddressTracker(DataBrokerService dataService) {
- this.dataService = dataService;
- }
-
- /**
- * Get all the L2 Addresses in the MD-SAL data tree
- * @return All the L2 Addresses in the MD-SAL data tree
- */
- public L2Addresses getAddresses() {
- return (L2Addresses)dataService.readOperationalData(InstanceIdentifier.<L2Addresses>builder(L2Addresses.class).toInstance());
- }
-
- /**
- * Get a specific L2 Address in the MD-SAL data tree
- * @param macAddress A MacAddress associated with an L2 Address object
- * @return The L2 Address corresponding to the specified macAddress
- */
- public L2Address getAddress(MacAddress macAddress) {
- return (L2Address) dataService.readOperationalData(createPath(macAddress));
- }
-
- /**
- * Add L2 Address into the MD-SAL data tree
- * @param macAddress The MacAddress of the new L2Address object
- * @param nodeConnectorRef The NodeConnectorRef of the new L2Address object
- * @return Future containing the result of the add operation
- */
- public Future<RpcResult<TransactionStatus>> addAddress(MacAddress macAddress, NodeConnectorRef nodeConnectorRef) {
- if(macAddress == null || nodeConnectorRef == null) {
- return null;
- }
-
- // Create L2Address
- final L2AddressBuilder builder = new L2AddressBuilder();
- builder.setKey(new L2AddressKey(macAddress))
- .setMac(macAddress)
- .setNodeConnectorRef(nodeConnectorRef);
-
- // Add L2Address to MD-SAL data tree
- final DataModificationTransaction it = dataService.beginTransaction();
- it.putOperationalData(createPath(macAddress), builder.build());
- return it.commit();
- }
-
- /**
- * Remove L2Address from the MD-SAL data tree
- * @param macAddress The MacAddress of an L2Address object
- * @return Future containing the result of the remove operation
- */
- public Future<RpcResult<TransactionStatus>> removeHost(MacAddress macAddress) {
- final DataModificationTransaction it = dataService.beginTransaction();
- it.removeOperationalData(createPath(macAddress));
- return it.commit();
- }
-
- /**
- * Create InstanceIdentifier path for an L2Address in the MD-SAL data tree
- * @param macAddress The MacAddress of an L2Address object
- * @return InstanceIdentifier of the L2Address corresponding to the specified macAddress
- */
- private InstanceIdentifier<L2Address> createPath(MacAddress macAddress) {
- return InstanceIdentifier.<L2Addresses>builder(L2Addresses.class)
- .<L2Address, L2AddressKey>child(L2Address.class, new L2AddressKey(macAddress)).toInstance();
- }
-}
\ No newline at end of file
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.flow;
-
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev100924.MacAddress;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-
-/**
- * Service that adds packet forwarding flows to configuration data store.
- */
-public interface FlowWriterService {
-
- /**
- * Writes a flow that forwards packets to destPort if destination mac in packet is destMac and
- * source Mac in packet is sourceMac. If sourceMac is null then flow would not set any source mac,
- * resulting in all packets with destMac being forwarded to destPort.
- *
- * @param sourceMac
- * @param destMac
- * @param destNodeConnectorRef
- */
- public void addMacToMacFlow(MacAddress sourceMac, MacAddress destMac, NodeConnectorRef destNodeConnectorRef);
-
- /**
- * Writes mac-to-mac flow on all ports that are in the path between given source and destination ports.
- * It uses path provided by NetworkGraphService{@link org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService} to find a links{@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link}
- * between given ports. And then writes appropriate flow on each port that is covered in that path.
- *
- * @param sourceMac
- * @param sourceNodeConnectorRef
- * @param destMac
- * @param destNodeConnectorRef
- */
- public void addMacToMacFlowsUsingShortestPath(MacAddress sourceMac, NodeConnectorRef sourceNodeConnectorRef, MacAddress destMac, NodeConnectorRef destNodeConnectorRef);
-
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.flow;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-
-import org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService;
-import org.opendaylight.controller.sample.l2switch.md.util.InstanceIdentifierUtils;
-import org.opendaylight.controller.md.sal.common.api.TransactionStatus;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev100924.MacAddress;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.OutputActionCaseBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.action.output.action._case.OutputActionBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.list.Action;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.action.types.rev131112.action.list.ActionBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.TableKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowCookie;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.FlowModFlags;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.InstructionsBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.Match;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.flow.MatchBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.ApplyActionsCaseBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.apply.actions._case.ApplyActions;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.instruction.apply.actions._case.ApplyActionsBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.Instruction;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.InstructionBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.model.match.types.rev131026.ethernet.match.fields.EthernetDestinationBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.model.match.types.rev131026.ethernet.match.fields.EthernetSourceBuilder;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.model.match.types.rev131026.match.EthernetMatch;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.model.match.types.rev131026.match.EthernetMatchBuilder;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.opendaylight.yangtools.yang.common.RpcResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.math.BigInteger;
-import java.util.List;
-import java.util.concurrent.Future;
-import java.util.concurrent.atomic.AtomicLong;
-
-/**
- * Implementation of FlowWriterService{@link org.opendaylight.controller.sample.l2switch.md.flow.FlowWriterService},
- * that builds required flow and writes to configuration data store using provided DataBrokerService
- * {@link org.opendaylight.controller.sal.binding.api.data.DataBrokerService}
- */
-public class FlowWriterServiceImpl implements FlowWriterService {
- private static final Logger _logger = LoggerFactory.getLogger(FlowWriterServiceImpl.class);
- private final DataBrokerService dataBrokerService;
- private final NetworkGraphService networkGraphService;
- private AtomicLong flowIdInc = new AtomicLong();
- private AtomicLong flowCookieInc = new AtomicLong(0x2a00000000000000L);
-
-
- public FlowWriterServiceImpl(DataBrokerService dataBrokerService, NetworkGraphService networkGraphService) {
- Preconditions.checkNotNull(dataBrokerService, "dataBrokerService should not be null.");
- Preconditions.checkNotNull(networkGraphService, "networkGraphService should not be null.");
- this.dataBrokerService = dataBrokerService;
- this.networkGraphService = networkGraphService;
- }
-
- /**
- * Writes a flow that forwards packets to destPort if destination mac in packet is destMac and
- * source Mac in packet is sourceMac. If sourceMac is null then flow would not set any source mac,
- * resulting in all packets with destMac being forwarded to destPort.
- *
- * @param sourceMac
- * @param destMac
- * @param destNodeConnectorRef
- */
- @Override
- public void addMacToMacFlow(MacAddress sourceMac, MacAddress destMac, NodeConnectorRef destNodeConnectorRef) {
-
- Preconditions.checkNotNull(destMac, "Destination mac address should not be null.");
- Preconditions.checkNotNull(destNodeConnectorRef, "Destination port should not be null.");
-
-
- // do not add flow if both macs are same.
- if(sourceMac != null && destMac.equals(sourceMac)) {
- _logger.info("In addMacToMacFlow: No flows added. Source and Destination mac are same.");
- return;
- }
-
- // get flow table key
- TableKey flowTableKey = new TableKey((short) 0); //TODO: Hard coded Table Id 0, need to get it from Configuration data.
-
- //build a flow path based on node connector to program flow
- InstanceIdentifier<Flow> flowPath = buildFlowPath(destNodeConnectorRef, flowTableKey);
-
- // build a flow that target given mac id
- Flow flowBody = createMacToMacFlow(flowTableKey.getId(), 0, sourceMac, destMac, destNodeConnectorRef);
-
- // commit the flow in config data
- writeFlowToConfigData(flowPath, flowBody);
- }
-
- /**
- * Writes mac-to-mac flow on all ports that are in the path between given source and destination ports.
- * It uses path provided by NetworkGraphService
- * {@link org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService} to find a links
- * {@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link}
- * between given ports. And then writes appropriate flow on each port that is covered in that path.
- *
- * @param sourceMac
- * @param sourceNodeConnectorRef
- * @param destMac
- * @param destNodeConnectorRef
- */
- @Override
- public void addMacToMacFlowsUsingShortestPath(MacAddress sourceMac,
- NodeConnectorRef sourceNodeConnectorRef,
- MacAddress destMac,
- NodeConnectorRef destNodeConnectorRef) {
- Preconditions.checkNotNull(sourceMac, "Source mac address should not be null.");
- Preconditions.checkNotNull(sourceNodeConnectorRef, "Source port should not be null.");
- Preconditions.checkNotNull(destMac, "Destination mac address should not be null.");
- Preconditions.checkNotNull(destNodeConnectorRef, "Destination port should not be null.");
-
- if(sourceNodeConnectorRef.equals(destNodeConnectorRef)) {
- _logger.info("In addMacToMacFlowsUsingShortestPath: No flows added. Source and Destination ports are same.");
- return;
-
- }
- NodeId sourceNodeId = new NodeId(sourceNodeConnectorRef.getValue().firstKeyOf(Node.class, NodeKey.class).getId().getValue());
- NodeId destNodeId = new NodeId(destNodeConnectorRef.getValue().firstKeyOf(Node.class, NodeKey.class).getId().getValue());
-
- // add destMac-To-sourceMac flow on source port
- addMacToMacFlow(destMac, sourceMac, sourceNodeConnectorRef);
-
- // add sourceMac-To-destMac flow on destination port
- addMacToMacFlow(sourceMac, destMac, destNodeConnectorRef);
-
- if(!sourceNodeId.equals(destNodeId)) {
- List<Link> linksInBeween = networkGraphService.getPath(sourceNodeId, destNodeId);
-
- if(linksInBeween != null) {
- // assumes the list order is maintained and starts with link that has source as source node
- for(Link link : linksInBeween) {
- // add sourceMac-To-destMac flow on source port
- addMacToMacFlow(sourceMac, destMac, getSourceNodeConnectorRef(link));
-
- // add destMac-To-sourceMac flow on destination port
- addMacToMacFlow(destMac, sourceMac, getDestNodeConnectorRef(link));
- }
- }
- }
- }
-
- private NodeConnectorRef getSourceNodeConnectorRef(Link link) {
- InstanceIdentifier<NodeConnector> nodeConnectorInstanceIdentifier
- = InstanceIdentifierUtils.createNodeConnectorIdentifier(
- link.getSource().getSourceNode().getValue(),
- link.getSource().getSourceTp().getValue());
- return new NodeConnectorRef(nodeConnectorInstanceIdentifier);
- }
-
- private NodeConnectorRef getDestNodeConnectorRef(Link link) {
- InstanceIdentifier<NodeConnector> nodeConnectorInstanceIdentifier
- = InstanceIdentifierUtils.createNodeConnectorIdentifier(
- link.getDestination().getDestNode().getValue(),
- link.getDestination().getDestTp().getValue());
-
- return new NodeConnectorRef(nodeConnectorInstanceIdentifier);
- }
-
- /**
- * @param nodeConnectorRef
- * @return
- */
- private InstanceIdentifier<Flow> buildFlowPath(NodeConnectorRef nodeConnectorRef, TableKey flowTableKey) {
-
- // generate unique flow key
- FlowId flowId = new FlowId(String.valueOf(flowIdInc.getAndIncrement()));
- FlowKey flowKey = new FlowKey(flowId);
-
- return InstanceIdentifierUtils.generateFlowInstanceIdentifier(nodeConnectorRef, flowTableKey, flowKey);
- }
-
- /**
- * @param tableId
- * @param priority
- * @param sourceMac
- * @param destMac
- * @param destPort
- * @return {@link org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowBuilder}
- * builds flow that forwards all packets with destMac to given port
- */
- private Flow createMacToMacFlow(Short tableId, int priority,
- MacAddress sourceMac, MacAddress destMac, NodeConnectorRef destPort) {
-
- // start building flow
- FlowBuilder macToMacFlow = new FlowBuilder() //
- .setTableId(tableId) //
- .setFlowName("mac2mac");
-
- // use its own hash code for id.
- macToMacFlow.setId(new FlowId(Long.toString(macToMacFlow.hashCode())));
-
- // create a match that has mac to mac ethernet match
- EthernetMatchBuilder ethernetMatchBuilder = new EthernetMatchBuilder() //
- .setEthernetDestination(new EthernetDestinationBuilder() //
- .setAddress(destMac) //
- .build());
- // set source in the match only if present
- if(sourceMac != null) {
- ethernetMatchBuilder.setEthernetSource(new EthernetSourceBuilder()
- .setAddress(sourceMac)
- .build());
- }
- EthernetMatch ethernetMatch = ethernetMatchBuilder.build();
- Match match = new MatchBuilder()
- .setEthernetMatch(ethernetMatch)
- .build();
-
-
- Uri destPortUri = destPort.getValue().firstKeyOf(NodeConnector.class, NodeConnectorKey.class).getId();
-
- Action outputToControllerAction = new ActionBuilder() //
- .setOrder(0)
- .setAction(new OutputActionCaseBuilder() //
- .setOutputAction(new OutputActionBuilder() //
- .setMaxLength(new Integer(0xffff)) //
- .setOutputNodeConnector(destPortUri) //
- .build()) //
- .build()) //
- .build();
-
- // Create an Apply Action
- ApplyActions applyActions = new ApplyActionsBuilder().setAction(ImmutableList.of(outputToControllerAction))
- .build();
-
- // Wrap our Apply Action in an Instruction
- Instruction applyActionsInstruction = new InstructionBuilder() //
- .setOrder(0)
- .setInstruction(new ApplyActionsCaseBuilder()//
- .setApplyActions(applyActions) //
- .build()) //
- .build();
-
- // Put our Instruction in a list of Instructions
- macToMacFlow
- .setMatch(match) //
- .setInstructions(new InstructionsBuilder() //
- .setInstruction(ImmutableList.of(applyActionsInstruction)) //
- .build()) //
- .setPriority(priority) //
- .setBufferId(0L) //
- .setHardTimeout(0) //
- .setIdleTimeout(0) //
- .setCookie(new FlowCookie(BigInteger.valueOf(flowCookieInc.getAndIncrement())))
- .setFlags(new FlowModFlags(false, false, false, false, false));
-
- return macToMacFlow.build();
- }
-
- /**
- * Starts and commits data change transaction which
- * modifies provided flow path with supplied body.
- *
- * @param flowPath
- * @param flowBody
- * @return transaction commit
- */
- private Future<RpcResult<TransactionStatus>> writeFlowToConfigData(InstanceIdentifier<Flow> flowPath,
- Flow flowBody) {
- DataModificationTransaction addFlowTransaction = dataBrokerService.beginTransaction();
- addFlowTransaction.putConfigurationData(flowPath, flowBody);
- return addFlowTransaction.commit();
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.inventory;
-
-import org.opendaylight.controller.sample.l2switch.md.util.InstanceIdentifierUtils;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNodeConnector;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-/**
- * InventoryService provides functions related to Nodes & NodeConnectors.
- */
-public class InventoryService {
- private DataBrokerService dataService;
- // Key: SwitchId, Value: NodeConnectorRef that corresponds to NC between controller & switch
- private HashMap<String, NodeConnectorRef> controllerSwitchConnectors;
-
- /**
- * Construct an InventoryService object with the specified inputs.
- * @param dataService The DataBrokerService associated with the InventoryService.
- */
- public InventoryService(DataBrokerService dataService) {
- this.dataService = dataService;
- controllerSwitchConnectors = new HashMap<String, NodeConnectorRef>();
- }
-
- public HashMap<String, NodeConnectorRef> getControllerSwitchConnectors() {
- return controllerSwitchConnectors;
- }
-
- // ToDo: Improve performance for thousands of switch ports
- /**
- * Get the External NodeConnectors of the network, which are the NodeConnectors connected to hosts.
- * @return The list of external node connectors.
- */
- public List<NodeConnectorRef> getExternalNodeConnectors() {
- // External NodeConnectors = All - Internal
- ArrayList<NodeConnectorRef> externalNodeConnectors = new ArrayList<NodeConnectorRef>();
- Set<String> internalNodeConnectors = new HashSet<>();
-
- // Read Topology -- find list of switch-to-switch internal node connectors
- NetworkTopology networkTopology =
- (NetworkTopology)dataService.readOperationalData(
- InstanceIdentifier.<NetworkTopology>builder(NetworkTopology.class).toInstance());
-
- for (Topology topology : networkTopology.getTopology()) {
- Topology completeTopology =
- (Topology)dataService.readOperationalData(
- InstanceIdentifierUtils.generateTopologyInstanceIdentifier(
- topology.getTopologyId().getValue()));
-
- for (Link link : completeTopology.getLink()) {
- internalNodeConnectors.add(link.getDestination().getDestTp().getValue());
- internalNodeConnectors.add(link.getSource().getSourceTp().getValue());
- }
- }
-
- // Read Inventory -- contains list of all nodeConnectors
- InstanceIdentifier.InstanceIdentifierBuilder<Nodes> nodesInsIdBuilder = InstanceIdentifier.<Nodes>builder(Nodes.class);
- Nodes nodes = (Nodes)dataService.readOperationalData(nodesInsIdBuilder.toInstance());
- if (nodes != null) {
- for (Node node : nodes.getNode()) {
- Node completeNode = (Node)dataService.readOperationalData(InstanceIdentifierUtils.createNodePath(node.getId()));
- for (NodeConnector nodeConnector : completeNode.getNodeConnector()) {
- // NodeConnector isn't switch-to-switch, so it must be controller-to-switch (internal) or external
- if (!internalNodeConnectors.contains(nodeConnector.getId().getValue())) {
- NodeConnectorRef ncRef = new NodeConnectorRef(
- InstanceIdentifier.<Nodes>builder(Nodes.class).<Node, NodeKey>child(Node.class, node.getKey())
- .<NodeConnector, NodeConnectorKey>child(NodeConnector.class, nodeConnector.getKey()).toInstance());
-
- // External node connectors have "-" in their name for mininet, i.e. "s1-eth1"
- if (nodeConnector.getAugmentation(FlowCapableNodeConnector.class).getName().contains("-")) {
- externalNodeConnectors.add(ncRef);
- }
- // Controller-to-switch internal node connectors
- else {
- controllerSwitchConnectors.put(node.getId().getValue(), ncRef);
- }
- }
- }
- }
- }
-
- return externalNodeConnectors;
- }
-}
\ No newline at end of file
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.packet;
-
-import org.opendaylight.controller.sample.l2switch.md.addresstracker.AddressTracker;
-import org.opendaylight.controller.sample.l2switch.md.flow.FlowWriterService;
-import org.opendaylight.controller.sample.l2switch.md.inventory.InventoryService;
-import org.opendaylight.controller.sample.l2switch.md.util.InstanceIdentifierUtils;
-import org.opendaylight.controller.sal.packet.Ethernet;
-import org.opendaylight.controller.sal.packet.LLDP;
-import org.opendaylight.controller.sal.packet.LinkEncap;
-import org.opendaylight.controller.sal.packet.Packet;
-import org.opendaylight.controller.sal.packet.RawPacket;
-import org.opendaylight.controller.sal.utils.HexEncode;
-import org.opendaylight.controller.sal.utils.NetUtils;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev100924.MacAddress;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.l2.address.tracker.rev140402.l2.addresses.L2Address;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.PacketProcessingListener;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.PacketProcessingService;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.PacketReceived;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.TransmitPacketInput;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.packet.service.rev130709.TransmitPacketInputBuilder;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * PacketHandler examines Ethernet packets to find L2Addresses (mac, nodeConnector) pairings
- * of the sender and learns them.
- * It also forwards the data packets appropriately dependending upon whether it knows about the
- * target or not.
- */
-public class PacketHandler implements PacketProcessingListener {
-
- private final static Logger _logger = LoggerFactory.getLogger(PacketHandler.class);
-
- private PacketProcessingService packetProcessingService;
- private AddressTracker addressTracker;
- private FlowWriterService flowWriterService;
- private InventoryService inventoryService;
-
- public void setAddressTracker(AddressTracker addressTracker) {
- this.addressTracker = addressTracker;
- }
-
- public void setPacketProcessingService(PacketProcessingService packetProcessingService) {
- this.packetProcessingService = packetProcessingService;
- }
-
- public void setFlowWriterService(FlowWriterService flowWriterService) {
- this.flowWriterService = flowWriterService;
- }
-
- public void setInventoryService(InventoryService inventoryService) {
- this.inventoryService = inventoryService;
- }
-
- /**
- * The handler function for all incoming packets.
- * @param packetReceived The incoming packet.
- */
- @Override
- public void onPacketReceived(PacketReceived packetReceived) {
-
- if(packetReceived == null) return;
-
- try {
- byte[] payload = packetReceived.getPayload();
- RawPacket rawPacket = new RawPacket(payload);
- NodeConnectorRef ingress = packetReceived.getIngress();
-
- Packet packet = decodeDataPacket(rawPacket);
-
- if(!(packet instanceof Ethernet)) return;
-
- handleEthernetPacket(packet, ingress);
-
- } catch(Exception e) {
- _logger.error("Failed to handle packet {}", packetReceived, e);
- }
- }
-
- /**
- * The handler function for Ethernet packets.
- * @param packet The incoming Ethernet packet.
- * @param ingress The NodeConnector where the Ethernet packet came from.
- */
- private void handleEthernetPacket(Packet packet, NodeConnectorRef ingress) {
- byte[] srcMac = ((Ethernet) packet).getSourceMACAddress();
- byte[] destMac = ((Ethernet) packet).getDestinationMACAddress();
-
- if (srcMac == null || srcMac.length == 0) return;
-
- Object enclosedPacket = packet.getPayload();
-
- if (enclosedPacket instanceof LLDP)
- return; // LLDP packets are handled by OpenFlowPlugin
-
- // get l2address by src mac
- // if unknown, add l2address
- MacAddress srcMacAddress = toMacAddress(srcMac);
- L2Address src = addressTracker.getAddress(srcMacAddress);
- boolean isSrcKnown = (src != null);
- if (!isSrcKnown) {
- addressTracker.addAddress(srcMacAddress, ingress);
- }
-
- // get host by dest mac
- // if known set dest known to true
- MacAddress destMacAddress = toMacAddress(destMac);
- L2Address dest = addressTracker.getAddress(destMacAddress);
- boolean isDestKnown = (dest != null);
-
- byte[] payload = packet.getRawPayload();
- // if (src and dest known)
- // sendpacket to dest and add src<->dest flow
- if(isSrcKnown & isDestKnown) {
- flowWriterService.addMacToMacFlowsUsingShortestPath(srcMacAddress, src.getNodeConnectorRef(),
- destMacAddress, dest.getNodeConnectorRef());
- sendPacketOut(payload, getControllerNodeConnector(dest.getNodeConnectorRef()), dest.getNodeConnectorRef());
- } else {
- // if (dest unknown)
- // sendpacket to external links minus ingress
- floodExternalPorts(payload, ingress);
- }
- }
-
- /**
- * Floods the specified payload on external ports, which are ports not connected to switches.
- * @param payload The payload to be flooded.
- * @param ingress The NodeConnector where the payload came from.
- */
- private void floodExternalPorts(byte[] payload, NodeConnectorRef ingress) {
- List<NodeConnectorRef> externalPorts = inventoryService.getExternalNodeConnectors();
- externalPorts.remove(ingress);
-
- for (NodeConnectorRef egress : externalPorts) {
- sendPacketOut(payload, getControllerNodeConnector(egress), egress);
- }
- }
-
- /**
- * Sends the specified packet on the specified port.
- * @param payload The payload to be sent.
- * @param ingress The NodeConnector where the payload came from.
- * @param egress The NodeConnector where the payload will go.
- */
- private void sendPacketOut(byte[] payload, NodeConnectorRef ingress, NodeConnectorRef egress) {
- if (ingress == null || egress == null) return;
- InstanceIdentifier<Node> egressNodePath = InstanceIdentifierUtils.getNodePath(egress.getValue());
- TransmitPacketInput input = new TransmitPacketInputBuilder() //
- .setPayload(payload) //
- .setNode(new NodeRef(egressNodePath)) //
- .setEgress(egress) //
- .setIngress(ingress) //
- .build();
- packetProcessingService.transmitPacket(input);
- }
-
- /**
- * Decodes an incoming packet.
- * @param raw The raw packet to be decoded.
- * @return The decoded form of the raw packet.
- */
- private Packet decodeDataPacket(RawPacket raw) {
- if(raw == null) {
- return null;
- }
- byte[] data = raw.getPacketData();
- if(data.length <= 0) {
- return null;
- }
- if(raw.getEncap().equals(LinkEncap.ETHERNET)) {
- Ethernet res = new Ethernet();
- try {
- res.deserialize(data, 0, data.length * NetUtils.NumBitsInAByte);
- res.setRawPayload(raw.getPacketData());
- } catch(Exception e) {
- _logger.warn("Failed to decode packet: {}", e.getMessage());
- }
- return res;
- }
- return null;
- }
-
- /**
- * Creates a MacAddress object out of a byte array.
- * @param dataLinkAddress The byte-array form of a MacAddress
- * @return MacAddress of the specified dataLinkAddress.
- */
- private MacAddress toMacAddress(byte[] dataLinkAddress) {
- return new MacAddress(HexEncode.bytesToHexStringFormat(dataLinkAddress));
- }
-
- /**
- * Gets the NodeConnector that connects the controller & switch for a specified switch port/node connector.
- * @param nodeConnectorRef The nodeConnector of a switch.
- * @return The NodeConnector that that connects the controller & switch.
- */
- private NodeConnectorRef getControllerNodeConnector(NodeConnectorRef nodeConnectorRef) {
- NodeConnectorRef controllerSwitchNodeConnector = null;
- HashMap<String, NodeConnectorRef> controllerSwitchConnectors = inventoryService.getControllerSwitchConnectors();
- InstanceIdentifier<Node> nodePath = InstanceIdentifierUtils.getNodePath(nodeConnectorRef.getValue());
- if (nodePath != null) {
- NodeKey nodeKey = InstanceIdentifierUtils.getNodeKey(nodePath);
- if (nodeKey != null) {
- controllerSwitchNodeConnector = controllerSwitchConnectors.get(nodeKey.getId().getValue());
- }
- }
- return controllerSwitchNodeConnector;
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.topology;
-
-import com.google.common.base.Preconditions;
-import edu.uci.ics.jung.algorithms.shortestpath.DijkstraShortestPath;
-import edu.uci.ics.jung.graph.DirectedSparseGraph;
-import edu.uci.ics.jung.graph.Graph;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-/**
- * Implementation of NetworkGraphService{@link org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService}.
- * It uses Jung graph library internally to maintain a graph and optimum way to return shortest path using
- * Dijkstra algorithm.
- */
-public class NetworkGraphDijkstra implements NetworkGraphService {
-
- private static final Logger _logger = LoggerFactory.getLogger(NetworkGraphDijkstra.class);
-
- DijkstraShortestPath<NodeId, Link> shortestPath = null;
- Graph<NodeId, Link> networkGraph = null;
-
- /**
- * Adds links to existing graph or creates new directed graph with given links if graph was not initialized.
- * @param links
- */
- @Override
- public synchronized void addLinks(List<Link> links) {
- if(links == null || links.isEmpty()) {
- _logger.info("In addLinks: No link added as links is null or empty.");
- return;
- }
-
- if(networkGraph == null) {
- networkGraph = new DirectedSparseGraph<>();
- }
-
- for(Link link : links) {
- NodeId sourceNodeId = link.getSource().getSourceNode();
- NodeId destinationNodeId = link.getDestination().getDestNode();
- networkGraph.addVertex(sourceNodeId);
- networkGraph.addVertex(destinationNodeId);
- networkGraph.addEdge(link, sourceNodeId, destinationNodeId);
- }
- if(shortestPath == null) {
- shortestPath = new DijkstraShortestPath<>(networkGraph);
- } else {
- shortestPath.reset();
- }
- }
-
- /**
- * removes links from existing graph.
- * @param links
- */
- @Override
- public synchronized void removeLinks(List<Link> links) {
- Preconditions.checkNotNull(networkGraph, "Graph is not initialized, add links first.");
-
- if(links == null || links.isEmpty()) {
- _logger.info("In removeLinks: No link removed as links is null or empty.");
- return;
- }
-
- for(Link link : links) {
- networkGraph.removeEdge(link);
- }
-
- if(shortestPath == null) {
- shortestPath = new DijkstraShortestPath<>(networkGraph);
- } else {
- shortestPath.reset();
- }
- }
-
- /**
- * returns a path between 2 nodes. Uses Dijkstra's algorithm to return shortest path.
- * @param sourceNodeId
- * @param destinationNodeId
- * @return
- */
- @Override
- public synchronized List<Link> getPath(NodeId sourceNodeId, NodeId destinationNodeId) {
- Preconditions.checkNotNull(shortestPath, "Graph is not initialized, add links first.");
-
- if(sourceNodeId == null || destinationNodeId == null) {
- _logger.info("In getPath: returning null, as sourceNodeId or destinationNodeId is null.");
- return null;
- }
-
- return shortestPath.getPath(sourceNodeId, destinationNodeId);
- }
-
- /**
- * Clears the prebuilt graph, in case same service instance is required to process a new graph.
- */
- @Override
- public synchronized void clear() {
- networkGraph = null;
- shortestPath = null;
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.topology;
-
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-
-import java.util.List;
-
-/**
- * Service that allows to build a network graph using Topology links
- * {@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link}
- * and exposes operation that can be performed on such graph.
- */
-public interface NetworkGraphService {
-
- /**
- * Adds links to existing graph or creates new graph with given links if graph was not initialized.
- * @param links
- */
- public void addLinks(List<Link> links);
-
- /**
- * removes links from existing graph.
- * @param links
- */
- public void removeLinks(List<Link> links);
-
- /**
- * returns a path between 2 nodes. Implementation should ideally return shortest path.
- * @param sourceNodeId
- * @param destinationNodeId
- * @return
- */
- public List<Link> getPath(NodeId sourceNodeId, NodeId destinationNodeId);
-
- /**
- * Clears the prebuilt graph, in case same service instance is required to process a new graph.
- */
- public void clear();
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.topology;
-
-import com.google.common.base.Preconditions;
-import org.opendaylight.controller.sample.l2switch.md.util.InstanceIdentifierUtils;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TopologyId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-import org.opendaylight.yangtools.yang.binding.DataObject;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Listens to data change events on topology links
- * {@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link}
- * and maintains a topology graph using provided NetworkGraphService
- * {@link org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService}.
- * It refreshes the graph after a delay(default 10 sec) to accommodate burst of change events if they come in bulk.
- * This is to avoid continuous refresh of graph on a series of change events in short time.
- */
-public class TopologyLinkDataChangeHandler implements DataChangeListener {
- private static final Logger _logger = LoggerFactory.getLogger(TopologyLinkDataChangeHandler.class);
- private static final String DEFAULT_TOPOLOGY_ID = "flow:1";
-
- private boolean networkGraphRefreshScheduled = false;
- private final ScheduledExecutorService networkGraphRefreshScheduler = Executors.newScheduledThreadPool(1);
- private final long DEFAULT_GRAPH_REFRESH_DELAY = 10;
- private final long graphRefreshDelayInSec;
-
- private final NetworkGraphService networkGraphService;
- private final DataBrokerService dataBrokerService;
-
- /**
- * Uses default delay to refresh topology graph if this constructor is used.
- * @param dataBrokerService
- * @param networkGraphService
- */
- public TopologyLinkDataChangeHandler(DataBrokerService dataBrokerService, NetworkGraphService networkGraphService) {
- Preconditions.checkNotNull(dataBrokerService, "dataBrokerService should not be null.");
- Preconditions.checkNotNull(networkGraphService, "networkGraphService should not be null.");
- this.dataBrokerService = dataBrokerService;
- this.networkGraphService = networkGraphService;
- this.graphRefreshDelayInSec = DEFAULT_GRAPH_REFRESH_DELAY;
- }
-
- /**
- *
- * @param dataBrokerService
- * @param networkGraphService
- * @param graphRefreshDelayInSec
- */
- public TopologyLinkDataChangeHandler(DataBrokerService dataBrokerService, NetworkGraphService networkGraphService,
- long graphRefreshDelayInSec) {
- Preconditions.checkNotNull(dataBrokerService, "dataBrokerService should not be null.");
- Preconditions.checkNotNull(networkGraphService, "networkGraphService should not be null.");
- this.dataBrokerService = dataBrokerService;
- this.networkGraphService = networkGraphService;
- this.graphRefreshDelayInSec = graphRefreshDelayInSec;
- }
-
- /**
- * Based on if links have been added or removed in topology data store, schedules a refresh of network graph.
- * @param dataChangeEvent
- */
- @Override
- public void onDataChanged(DataChangeEvent<InstanceIdentifier<?>, DataObject> dataChangeEvent) {
- if(dataChangeEvent == null) {
- _logger.info("In onDataChanged: No Processing done as dataChangeEvent is null.");
- }
- Map<InstanceIdentifier<?>, DataObject> linkOriginalData = dataChangeEvent.getOriginalOperationalData();
- Map<InstanceIdentifier<?>, DataObject> linkUpdatedData = dataChangeEvent.getUpdatedOperationalData();
- // change this logic, once MD-SAL start populating DeletedOperationData Set
- if(linkOriginalData != null && linkUpdatedData != null
- && (linkOriginalData.size() != 0 || linkUpdatedData.size() != 0)
- && !networkGraphRefreshScheduled) {
- networkGraphRefreshScheduled = linkOriginalData.size() != linkUpdatedData.size();
- if(networkGraphRefreshScheduled) {
- networkGraphRefreshScheduler.schedule(new NetworkGraphRefresher(), graphRefreshDelayInSec, TimeUnit.SECONDS);
- }
- }
-
- }
-
- /**
- * Registers as a data listener to receive changes done to
- * {@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link}
- * under {@link org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology}
- * operation data root.
- */
-
- public void registerAsDataChangeListener() {
- InstanceIdentifier<Link> linkInstance = InstanceIdentifier.builder(NetworkTopology.class)
- .child(Topology.class, new TopologyKey(new TopologyId(DEFAULT_TOPOLOGY_ID))).child(Link.class).toInstance();
- dataBrokerService.registerDataChangeListener(linkInstance, this);
- }
-
- /**
- *
- */
- private class NetworkGraphRefresher implements Runnable {
- /**
- *
- */
- @Override
- public void run() {
- networkGraphRefreshScheduled = false;
- //TODO: it should refer to changed links only from DataChangeEvent above.
- List<Link> links = getLinksFromTopology(DEFAULT_TOPOLOGY_ID);
- networkGraphService.clear();// can remove this once changed links are addressed
- if(links != null && !links.isEmpty()) {
- networkGraphService.addLinks(links);
- }
- }
-
- /**
- * @param topologyId
- * @return
- */
- private List<Link> getLinksFromTopology(String topologyId) {
- InstanceIdentifier<Topology> topologyInstanceIdentifier = InstanceIdentifierUtils.generateTopologyInstanceIdentifier(topologyId);
- Topology topology = (Topology) dataBrokerService.readOperationalData(topologyInstanceIdentifier);
- return topology.getLink();
- }
- }
-}
+++ /dev/null
-/*
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.util;
-
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowCapableNode;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.Table;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.TableKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.Flow;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.tables.table.FlowKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TopologyId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-/* InstanceIdentifierUtils provides utility functions related to InstanceIdentifiers.
- */
-public final class InstanceIdentifierUtils {
-
- private InstanceIdentifierUtils() {
- throw new UnsupportedOperationException("Utility class should never be instantiated");
- }
-
- /**
- * Creates an Instance Identifier (path) for node with specified id
- *
- * @param nodeId
- * @return
- */
- public static final InstanceIdentifier<Node> createNodePath(final NodeId nodeId) {
- return InstanceIdentifier.builder(Nodes.class) //
- .child(Node.class, new NodeKey(nodeId)) //
- .build();
- }
-
- /**
- * Shorten's node child path to node path.
- *
- * @param nodeChild child of node, from which we want node path.
- * @return
- */
- public static final InstanceIdentifier<Node> getNodePath(final InstanceIdentifier<?> nodeChild) {
- return nodeChild.firstIdentifierOf(Node.class);
- }
-
-
- /**
- * Creates a table path by appending table specific location to node path
- *
- * @param nodePath
- * @param tableKey
- * @return
- */
- public static final InstanceIdentifier<Table> createTablePath(final InstanceIdentifier<Node> nodePath, final TableKey tableKey) {
- return nodePath.builder()
- .augmentation(FlowCapableNode.class)
- .child(Table.class, tableKey)
- .build();
- }
-
- /**
- * Creates a path for particular flow, by appending flow-specific information
- * to table path.
- *
- * @param table
- * @param flowKey
- * @return
- */
- public static InstanceIdentifier<Flow> createFlowPath(final InstanceIdentifier<Table> table, final FlowKey flowKey) {
- return table.child(Flow.class, flowKey);
- }
-
- /**
- * Extract table id from table path.
- *
- * @param tablePath
- * @return
- */
- public static Short getTableId(final InstanceIdentifier<Table> tablePath) {
- return tablePath.firstKeyOf(Table.class, TableKey.class).getId();
- }
-
- /**
- * Extracts NodeConnectorKey from node connector path.
- */
- public static NodeConnectorKey getNodeConnectorKey(final InstanceIdentifier<?> nodeConnectorPath) {
- return nodeConnectorPath.firstKeyOf(NodeConnector.class, NodeConnectorKey.class);
- }
-
- /**
- * Extracts NodeKey from node path.
- */
- public static NodeKey getNodeKey(final InstanceIdentifier<?> nodePath) {
- return nodePath.firstKeyOf(Node.class, NodeKey.class);
- }
-
-
- //
- public static final InstanceIdentifier<NodeConnector> createNodeConnectorIdentifier(final String nodeIdValue,
- final String nodeConnectorIdValue) {
- return createNodePath(new NodeId(nodeIdValue))
- .child(NodeConnector.class, new NodeConnectorKey(new NodeConnectorId(nodeConnectorIdValue)));
- }
-
- /**
- * @param nodeConnectorRef
- * @return
- */
- public static InstanceIdentifier<Node> generateNodeInstanceIdentifier(final NodeConnectorRef nodeConnectorRef) {
- return nodeConnectorRef.getValue().firstIdentifierOf(Node.class);
- }
-
- /**
- * @param nodeConnectorRef
- * @param flowTableKey
- * @return
- */
- public static InstanceIdentifier<Table> generateFlowTableInstanceIdentifier(final NodeConnectorRef nodeConnectorRef, final TableKey flowTableKey) {
- return generateNodeInstanceIdentifier(nodeConnectorRef).builder()
- .augmentation(FlowCapableNode.class)
- .child(Table.class, flowTableKey)
- .build();
- }
-
- /**
- * @param nodeConnectorRef
- * @param flowTableKey
- * @param flowKey
- * @return
- */
- public static InstanceIdentifier<Flow> generateFlowInstanceIdentifier(final NodeConnectorRef nodeConnectorRef,
- final TableKey flowTableKey,
- final FlowKey flowKey) {
- return generateFlowTableInstanceIdentifier(nodeConnectorRef, flowTableKey).child(Flow.class, flowKey);
- }
-
- public static InstanceIdentifier<Topology> generateTopologyInstanceIdentifier(final String topologyId) {
- return InstanceIdentifier.builder(NetworkTopology.class)
- .child(Topology.class, new TopologyKey(new TopologyId(topologyId)))
- .build();
- }
-}
-
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.flow;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.fail;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
-import org.opendaylight.controller.sample.l2switch.md.topology.NetworkGraphService;
-import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev100924.MacAddress;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorRef;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeId;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.Nodes;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnector;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.node.NodeConnectorKey;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.Node;
-import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.nodes.NodeKey;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-/**
- */
-public class FlowWriterServiceImplTest {
- private DataBrokerService dataBrokerService;
- private NodeConnectorRef srcNodeConnectorRef;
- private NodeConnectorRef destNodeConnectorRef;
- private MacAddress destMacAddress;
- private MacAddress srcMacAddress;
- private DataModificationTransaction dataModificationTransaction;
- private NetworkGraphService networkGraphService;
-
- @Before
- public void init() {
- dataBrokerService = mock(DataBrokerService.class);
- networkGraphService = mock(NetworkGraphService.class);
- //build source node connector ref
- InstanceIdentifier<Nodes> srcNodesInstanceIdentifier = InstanceIdentifier.create(Nodes.class);
- InstanceIdentifier<Node> srcNodeInstanceIdentifier = srcNodesInstanceIdentifier
- .child(Node.class, new NodeKey(new NodeId("openflow:1")));
- InstanceIdentifier<NodeConnector> srcNodeConnectorInstanceIdentifier = srcNodeInstanceIdentifier
- .child(NodeConnector.class, new NodeConnectorKey(new NodeConnectorId("openflow:1:2")));
- srcNodeConnectorRef = new NodeConnectorRef(srcNodeConnectorInstanceIdentifier);
-
- //build dest node connector ref
- InstanceIdentifier<Nodes> nodesInstanceIdentifier
- = InstanceIdentifier.builder(Nodes.class)
- .build();
- InstanceIdentifier<Node> nodeInstanceIdentifier =
- nodesInstanceIdentifier.child(Node.class, new NodeKey(new NodeId("openflow:2")));
- InstanceIdentifier<NodeConnector> nodeConnectorInstanceIdentifier =
- nodeInstanceIdentifier.child(NodeConnector.class, new NodeConnectorKey(new NodeConnectorId("openflow:2:2")));
- destNodeConnectorRef = new NodeConnectorRef(nodeConnectorInstanceIdentifier);
- destMacAddress = new MacAddress("00:0a:95:9d:68:16");
- srcMacAddress = new MacAddress("00:0a:95:8c:97:24");
- dataModificationTransaction = mock(DataModificationTransaction.class);
- when(dataBrokerService.beginTransaction()).thenReturn(dataModificationTransaction);
- }
-
- @Test
- public void testFlowWriterServiceImpl_NPEWhenDataBrokerServiceIsNull() throws Exception {
- try {
- new FlowWriterServiceImpl(null, networkGraphService);
- fail("Expected null pointer exception.");
- } catch(NullPointerException npe) {
- assertEquals("dataBrokerService should not be null.", npe.getMessage());
- }
- }
-
- @Test
- public void testAddMacToMacFlow_NPEWhenNullSourceMacDestMacAndNodeConnectorRef() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- try {
- flowWriterService.addMacToMacFlow(null, null, null);
- fail("Expected null pointer exception.");
- } catch(NullPointerException npe) {
- assertEquals("Destination mac address should not be null.", npe.getMessage());
- }
- }
-
- @Test
- public void testAddMacToMacFlow_NPEWhenSourceMacNullMac() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- try {
- flowWriterService.addMacToMacFlow(null, null, destNodeConnectorRef);
- fail("Expected null pointer exception.");
- } catch(NullPointerException npe) {
- assertEquals("Destination mac address should not be null.", npe.getMessage());
- }
- }
-
- @Test
- public void testAddMacToMacFlow_NPEWhenNullSourceMacNodeConnectorRef() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- try {
- flowWriterService.addMacToMacFlow(null, destMacAddress, null);
- fail("Expected null pointer exception.");
- } catch(NullPointerException npe) {
- assertEquals("Destination port should not be null.", npe.getMessage());
- }
- }
-
- @Test
- public void testAddMacToMacFlow_WhenNullSourceMac() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- flowWriterService.addMacToMacFlow(null, destMacAddress, destNodeConnectorRef);
- verify(dataBrokerService, times(1)).beginTransaction();
- verify(dataModificationTransaction, times(1)).commit();
- }
-
- @Test
- public void testAddMacToMacFlow_WhenSrcAndDestMacAreSame() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- flowWriterService.addMacToMacFlow(new MacAddress(destMacAddress.getValue()), destMacAddress, destNodeConnectorRef);
- verify(dataBrokerService, never()).beginTransaction();
- verify(dataModificationTransaction, never()).commit();
-
- }
-
- @Test
- public void testAddMacToMacFlow_SunnyDay() throws Exception {
- FlowWriterService flowWriterService = new FlowWriterServiceImpl(dataBrokerService, networkGraphService);
- flowWriterService.addMacToMacFlow(srcMacAddress, destMacAddress, destNodeConnectorRef);
- verify(dataBrokerService, times(1)).beginTransaction();
- verify(dataModificationTransaction, times(1)).commit();
- }
-
-}
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.topology;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.link.attributes.Destination;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.link.attributes.Source;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static junit.framework.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-/**
- */
-public class NetworkGraphDijkstraTest {
- Link link1, link2, link3, link4, link5, link6, link7, link8, link9, link10,link11,link12;
- Destination dest1, dest2, dest3, dest4, dest5, dest6,dest7,dest8,dest9,dest10,dest11,dest12;
- Source src1, src2, src3, src4, src5, src6,src7,src8,src9,src10,src11,src12;
- NodeId nodeId1 = new NodeId("openflow:1");
- NodeId nodeId2 = new NodeId("openflow:2");
- NodeId nodeId3 = new NodeId("openflow:3");
- NodeId nodeId4 = new NodeId("openflow:4");
- NodeId nodeId5 = new NodeId("openflow:5");
- NodeId nodeId6 = new NodeId("openflow:6");
- NodeId nodeId7 = new NodeId("openflow:7");
- List<Link> links = new ArrayList<>();
-
- @Before
- public void init() {
- link1 = mock(Link.class);
- link2 = mock(Link.class);
- link3 = mock(Link.class);
- link4 = mock(Link.class);
- link5 = mock(Link.class);
- link6 = mock(Link.class);
- link7 = mock(Link.class);
- link8 = mock(Link.class);
- link9 = mock(Link.class);
- link10 = mock(Link.class);
- link11 = mock(Link.class);
- link12 = mock(Link.class);
- dest1 = mock(Destination.class);
- dest2 = mock(Destination.class);
- dest3 = mock(Destination.class);
- dest4 = mock(Destination.class);
- dest5 = mock(Destination.class);
- dest6 = mock(Destination.class);
- dest7 = mock(Destination.class);
- dest8 = mock(Destination.class);
- dest9 = mock(Destination.class);
- dest10 = mock(Destination.class);
- dest11 = mock(Destination.class);
- dest12 = mock(Destination.class);
- src1 = mock(Source.class);
- src2 = mock(Source.class);
- src3 = mock(Source.class);
- src4 = mock(Source.class);
- src5 = mock(Source.class);
- src6 = mock(Source.class);
- src7 = mock(Source.class);
- src8 = mock(Source.class);
- src9 = mock(Source.class);
- src10 = mock(Source.class);
- src11 = mock(Source.class);
- src12 = mock(Source.class);
- when(link1.getSource()).thenReturn(src1);
- when(link2.getSource()).thenReturn(src2);
- when(link3.getSource()).thenReturn(src3);
- when(link4.getSource()).thenReturn(src4);
- when(link5.getSource()).thenReturn(src5);
- when(link6.getSource()).thenReturn(src6);
- when(link7.getSource()).thenReturn(src7);
- when(link8.getSource()).thenReturn(src8);
- when(link9.getSource()).thenReturn(src9);
- when(link10.getSource()).thenReturn(src10);
- when(link11.getSource()).thenReturn(src11);
- when(link12.getSource()).thenReturn(src12);
- when(link1.getDestination()).thenReturn(dest1);
- when(link2.getDestination()).thenReturn(dest2);
- when(link3.getDestination()).thenReturn(dest3);
- when(link4.getDestination()).thenReturn(dest4);
- when(link5.getDestination()).thenReturn(dest5);
- when(link6.getDestination()).thenReturn(dest6);
- when(link7.getDestination()).thenReturn(dest7);
- when(link8.getDestination()).thenReturn(dest8);
- when(link9.getDestination()).thenReturn(dest9);
- when(link10.getDestination()).thenReturn(dest10);
- when(link11.getDestination()).thenReturn(dest11);
- when(link12.getDestination()).thenReturn(dest12);
- when(src1.getSourceNode()).thenReturn(nodeId1);
- when(dest1.getDestNode()).thenReturn(nodeId2);
- when(src2.getSourceNode()).thenReturn(nodeId2);
- when(dest2.getDestNode()).thenReturn(nodeId1);
- when(src3.getSourceNode()).thenReturn(nodeId1);
- when(dest3.getDestNode()).thenReturn(nodeId3);
- when(src4.getSourceNode()).thenReturn(nodeId3);
- when(dest4.getDestNode()).thenReturn(nodeId1);
- when(src5.getSourceNode()).thenReturn(nodeId2);
- when(dest5.getDestNode()).thenReturn(nodeId4);
- when(src6.getSourceNode()).thenReturn(nodeId4);
- when(dest6.getDestNode()).thenReturn(nodeId2);
- when(src7.getSourceNode()).thenReturn(nodeId2);
- when(dest7.getDestNode()).thenReturn(nodeId5);
- when(src8.getSourceNode()).thenReturn(nodeId5);
- when(dest8.getDestNode()).thenReturn(nodeId2);
- when(src9.getSourceNode()).thenReturn(nodeId6);
- when(dest9.getDestNode()).thenReturn(nodeId3);
- when(src10.getSourceNode()).thenReturn(nodeId3);
- when(dest10.getDestNode()).thenReturn(nodeId6);
- when(src11.getSourceNode()).thenReturn(nodeId7);
- when(dest11.getDestNode()).thenReturn(nodeId3);
- when(src12.getSourceNode()).thenReturn(nodeId3);
- when(dest12.getDestNode()).thenReturn(nodeId7);
- links.add(link1);
- links.add(link2);
- links.add(link3);
- links.add(link4);
- links.add(link5);
- links.add(link6);
- links.add(link7);
- links.add(link8);
- links.add(link9);
- links.add(link10);
- links.add(link11);
- links.add(link12);
-
- }
-
- @Test
- public void testAddLinksAndGetPath() throws Exception {
- NetworkGraphService networkGraphService = new NetworkGraphDijkstra();
- networkGraphService.addLinks(links);
- List<Link> path = networkGraphService.getPath(nodeId2, nodeId3);
- assertEquals("path size is not as expected.", 2, path.size());
- assertEquals("link source is not as expected.", nodeId2, path.get(0).getSource().getSourceNode());
- assertEquals("link destination is not as expected.", nodeId1, path.get(0).getDestination().getDestNode());
- path = networkGraphService.getPath(nodeId3, nodeId2);
- assertEquals("path size is not as expected.", 2, path.size());
- assertEquals("link source is not as expected.", nodeId3, path.get(0).getSource().getSourceNode());
- assertEquals("link destination is not as expected.", nodeId1, path.get(0).getDestination().getDestNode());
-
- path = networkGraphService.getPath(nodeId4, nodeId6);
- assertEquals("path size is not as expected.", 4, path.size());
- assertEquals("link source is not as expected.", nodeId4, path.get(0).getSource().getSourceNode());
- assertEquals("link destination is not as expected.", nodeId2, path.get(0).getDestination().getDestNode());
- }
-}
+++ /dev/null
-/**
- * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.sample.l2switch.md.topology;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.opendaylight.controller.sample.l2switch.md.util.InstanceIdentifierUtils;
-import org.opendaylight.controller.md.sal.common.api.data.DataChangeEvent;
-import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
-import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Link;
-import org.opendaylight.yangtools.yang.binding.DataObject;
-import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-/**
- */
-public class TopologyLinkDataChangeHandlerTest {
- NetworkGraphService networkGraphService;
- DataBrokerService dataBrokerService;
- DataChangeEvent dataChangeEvent;
- Topology topology;
- Link link;
-
- @Before
- public void init() {
- networkGraphService = mock(NetworkGraphService.class);
- dataBrokerService = mock(DataBrokerService.class);
- dataChangeEvent = mock(DataChangeEvent.class);
- link = mock(Link.class);
- topology = mock(Topology.class);
- }
-
- @Test
- public void testOnDataChange() throws Exception {
- TopologyLinkDataChangeHandler topologyLinkDataChangeHandler = new TopologyLinkDataChangeHandler(dataBrokerService, networkGraphService, 2);
- Map<InstanceIdentifier<?>, DataObject> original = new HashMap<InstanceIdentifier<?>, DataObject>();
- InstanceIdentifier<?> instanceIdentifier = InstanceIdentifierUtils.generateTopologyInstanceIdentifier("flow:1");
- DataObject dataObject = mock(DataObject.class);
- Map<InstanceIdentifier<?>, DataObject> updated = new HashMap<InstanceIdentifier<?>, DataObject>();
- updated.put(instanceIdentifier, dataObject);
- when(dataChangeEvent.getUpdatedOperationalData()).thenReturn(updated);
- when(dataChangeEvent.getOriginalOperationalData()).thenReturn(original);
- List<Link> links = new ArrayList<>();
- links.add(link);
- when(dataBrokerService.readOperationalData(instanceIdentifier)).thenReturn(topology);
- when(topology.getLink()).thenReturn(links);
-
- topologyLinkDataChangeHandler.onDataChanged(dataChangeEvent);
- Thread.sleep(2100);
- verify(networkGraphService, times(1)).addLinks(links);
- }
-}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.opendaylight.controller.samples</groupId>
- <artifactId>sal-samples</artifactId>
- <version>1.2.0-SNAPSHOT</version>
- <relativePath>../..</relativePath>
- </parent>
- <groupId>org.opendaylight.controller.samples.l2switch.md</groupId>
- <artifactId>l2switch-model</artifactId>
- <packaging>bundle</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.controller.model</groupId>
- <artifactId>model-inventory</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools.model</groupId>
- <artifactId>ietf-yang-types</artifactId>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <extensions>true</extensions>
- <configuration>
- <instructions>
- <Bundle-Name>${project.groupId}.${project.artifactId}</Bundle-Name>
- <Import-Package>org.opendaylight.yangtools.yang.binding.annotations, *</Import-Package>
- <manifestLocation>${project.basedir}/META-INF</manifestLocation>
- </instructions>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-maven-plugin</artifactId>
- <version>${yangtools.version}</version>
- <dependencies>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>maven-sal-api-gen-plugin</artifactId>
- <version>${yangtools.version}</version>
- <type>jar</type>
- </dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-binding</artifactId>
- <version>${yangtools.version}</version>
- <type>jar</type>
- </dependency>
- </dependencies>
- <executions>
- <execution>
- <goals>
- <goal>generate-sources</goal>
- </goals>
- <configuration>
- <yangFilesRootDir>src/main/yang</yangFilesRootDir>
- <codeGenerators>
- <generator>
- <codeGeneratorClass>org.opendaylight.yangtools.maven.sal.api.gen.plugin.CodeGeneratorImpl</codeGeneratorClass>
- <outputBaseDir>${salGeneratorPath}</outputBaseDir>
- </generator>
- <generator>
- <codeGeneratorClass>org.opendaylight.yangtools.yang.unified.doc.generator.maven.DocumentationGeneratorImpl</codeGeneratorClass>
- <outputBaseDir>target/site/models</outputBaseDir>
- </generator>
- <generator>
- <codeGeneratorClass>org.opendaylight.yangtools.yang.wadl.generator.maven.WadlGenerator</codeGeneratorClass>
- <outputBaseDir>target/site/models</outputBaseDir>
- </generator>
- </codeGenerators>
- <inspectDependencies>true</inspectDependencies>
- </configuration>
- </execution>
- </executions>
-
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-module l2-address-tracker {
- yang-version 1;
- namespace "urn:opendaylight:l2-address-tracker";
- prefix l2-address-tracker;
-
- import ietf-yang-types {
- prefix yang;
- revision-date 2010-09-24;
- }
- import opendaylight-inventory {
- prefix inv;
- revision-date 2013-08-19;
- }
-
- organization "Cisco Systems Inc";
- contact
- "Alex Fan <alefan@cisco.com>";
- description
- "YANG version of the L2 Address Tracker Data Model";
-
- revision 2014-04-02 {
- description
- "L2 Address Tracker module draft.";
- }
-
- grouping l2-address {
- leaf mac {
- type yang:mac-address;
- mandatory true;
- description
- "the mac address of the host.";
- }
- leaf node-connector-ref {
- type inv:node-connector-ref;
- }
- }
-
- container l2-addresses {
- config false;
- list l2-address {
- key "mac";
- uses l2-address;
- }
- }
-}
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.opendaylight.controller.samples</groupId>
- <artifactId>sal-samples</artifactId>
- <version>1.2.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>l2switch.aggregator</artifactId>
- <groupId>org.opendaylight.controller.samples.l2switch</groupId>
- <version>1.1.0-SNAPSHOT</version>
- <packaging>pom</packaging>
-
- <modules>
- <module>model</module>
- <module>implementation</module>
- </modules>
-
-</project>
<module>toaster-consumer</module>
<module>toaster-provider</module>
<module>toaster-config</module>
- <module>l2switch</module>
<module>clustering-test-app</module>
</modules>
<scm>
org.opendaylight.controller.netconf.confignetconfconnector.util,
org.opendaylight.controller.netconf.confignetconfconnector.osgi,
org.opendaylight.controller.netconf.confignetconfconnector.exception,</Private-Package>
- <Import-Package>com.google.common.base,
- com.google.common.collect,
- javax.annotation,
- javax.management,
- javax.management.openmbean,
- org.opendaylight.controller.config.api,
- org.opendaylight.controller.config.api.jmx,
- org.opendaylight.controller.config.yangjmxgenerator,
- org.opendaylight.controller.config.yangjmxgenerator.attribute,
- org.opendaylight.controller.netconf.api,
- org.opendaylight.controller.netconf.mapping.api,
- org.opendaylight.controller.netconf.util.mapping,
- org.opendaylight.controller.netconf.util.xml,
- org.opendaylight.controller.netconf.util.exception,
- org.opendaylight.yangtools.yang.common,
- org.opendaylight.yangtools.yang.model.api,
- org.osgi.framework,
- org.osgi.util.tracker,
- org.slf4j,
- org.w3c.dom,
- com.google.common.io,
- org.opendaylight.yangtools.yang.model.api.type,
- org.opendaylight.yangtools.sal.binding.generator.spi,
- org.opendaylight.yangtools.sal.binding.yang.types,
- org.opendaylight.controller.config.util
- </Import-Package>
+ <Import-Package>*</Import-Package>
<Export-Package></Export-Package>
</instructions>
</configuration>
package org.opendaylight.controller.netconf.confignetconfconnector.operations.runtimerpc;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.Maps;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.management.ObjectName;
import org.opendaylight.controller.config.api.jmx.ObjectNameUtil;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.confignetconfconnector.mapping.rpc.ModuleRpcs;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.config.rev130405.Modules;
+import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.config.rev130405.modules.Module;
-import javax.management.ObjectName;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.common.collect.Maps;
/**
* Represents parsed xpath to runtime bean instance
return moduleName;
}
+ @VisibleForTesting
+ Map<String, String> getAdditionalAttributes() {
+ return additionalAttributes;
+ }
+
public String getInstanceName() {
return instanceName;
}
return ObjectNameUtil.createRuntimeBeanName(moduleName, instanceName, additionalAttributesJavaNames);
}
+ /**
+ * Pattern for an absolute instance identifier xpath pointing to a runtime bean instance e.g:
+ * <pre>
+ * /modules/module[name=instanceName][type=moduleType]
+ * </pre>
+ * or
+ * <pre>
+ * /a:modules/a:module[a:name=instanceName][a:type=moduleType]
+ * </pre>
+ */
private static final String xpathPatternBlueprint =
- "/" + XmlNetconfConstants.MODULES_KEY
- + "/" + XmlNetconfConstants.MODULE_KEY
- + "\\["
-
- + "(?<key1>type|name)"
- + "='(?<value1>[^']+)'"
- + "( and |\\]\\[)"
- + "(?<key2>type|name)"
- + "='(?<value2>[^']+)'"
-
- + "\\]"
- + "(?<additional>.*)";
+ "/" + getRegExForPrefixedName(Modules.QNAME.getLocalName())+ "/" + getRegExForPrefixedName(Module.QNAME.getLocalName())
+
+ + "\\["
+ + "(?<key1>" + getRegExForPrefixedName(XmlNetconfConstants.TYPE_KEY) + "|" + getRegExForPrefixedName(XmlNetconfConstants.NAME_KEY) + ")"
+ + "=('|\")?(?<value1>[^'\"\\]]+)('|\")?"
+ + "( and |\\]\\[)"
+ + "(?<key2>" + getRegExForPrefixedName(XmlNetconfConstants.TYPE_KEY) + "|" + getRegExForPrefixedName(XmlNetconfConstants.NAME_KEY) + ")"
+ + "=('|\")?(?<value2>[^'\"\\]]+)('|\")?"
+ + "\\]"
+
+ + "(?<additional>.*)";
+
+ /**
+ * Return reg ex that matches either the name with or without a prefix
+ */
+ private static String getRegExForPrefixedName(final String name) {
+ return "([^:]+:)?" + name;
+ }
private static final Pattern xpathPattern = Pattern.compile(xpathPatternBlueprint);
- private static final String additionalPatternBlueprint = "(?<additionalKey>.+)\\[(.+)='(?<additionalValue>.+)'\\]";
+
+ /**
+ * Pattern for additional path elements inside xpath for instance identifier pointing to an inner runtime bean e.g:
+ * <pre>
+ * /modules/module[name=instanceName and type=moduleType]/inner[key=b]
+ * </pre>
+ */
+ private static final String additionalPatternBlueprint = getRegExForPrefixedName("(?<additionalKey>.+)") + "\\[(?<prefixedKey>" + getRegExForPrefixedName("(.+)") + ")=('|\")?(?<additionalValue>[^'\"\\]]+)('|\")?\\]";
private static final Pattern additionalPattern = Pattern.compile(additionalPatternBlueprint);
public static RuntimeRpcElementResolved fromXpath(String xpath, String elementName, String namespace) {
PatternGroupResolver(String key1, String value1, String value2, String additional) {
this.key1 = Preconditions.checkNotNull(key1);
this.value1 = Preconditions.checkNotNull(value1);
-
this.value2 = Preconditions.checkNotNull(value2);
-
this.additional = Preconditions.checkNotNull(additional);
}
String getModuleName() {
- return key1.equals(XmlNetconfConstants.TYPE_KEY) ? value1 : value2;
+ return key1.contains(XmlNetconfConstants.TYPE_KEY) ? value1 : value2;
}
String getInstanceName() {
- return key1.equals(XmlNetconfConstants.NAME_KEY) ? value1 : value2;
+ return key1.contains(XmlNetconfConstants.NAME_KEY) ? value1 : value2;
}
+
Map<String, String> getAdditionalKeys(String elementName, String moduleName) {
HashMap<String, String> additionalAttributes = Maps.newHashMap();
Preconditions
.checkState(
matcher.matches(),
- "Attribute %s not in required form on rpc element %s, required format for additional attributes is %s",
+ "Attribute %s not in required form on rpc element %s, required format for additional attributes is: %s",
additionalKeyValue, elementName, additionalPatternBlueprint);
String name = matcher.group("additionalKey");
runtimeBeanYangName = name;
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.netconf.confignetconfconnector.operations.runtimerpc;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+@RunWith(Parameterized.class)
+public class RuntimeRpcElementResolvedTest {
+
+ private static final String MODULE_TYPE = "moduleType";
+ private static final String INSTANCE_NAME = "instanceName";
+ @Parameterized.Parameter(0)
+ public String xpath;
+ @Parameterized.Parameter(1)
+ public Map<String, String> additional;
+
+ @Parameterized.Parameters(name = "{index}: parsed({0}) contains moduleName:{1} and instanceName:{2}")
+ public static Collection<Object[]> data() {
+ return Arrays.asList(new Object[][] {
+ // With namespaces
+ { "/a:modules/a:module[a:name='instanceName'][a:type='moduleType']/b:listener-state[b:peer-id='127.0.0.1']",
+ new HashMap<String, String>() {{
+ put("listener-state", "127.0.0.1");
+ }}},
+ { "/a:modules/a:module[a:name='instanceName'][a:type='moduleType']",
+ null},
+
+ // Without namespaces
+ { "/modules/module[name=instanceName][type=moduleType]", null},
+ { "/modules/module[type=moduleType][name='instanceName']", null},
+ { "/modules/module[name=\'instanceName\'][type=\"moduleType\"]", null},
+ { "/modules/module[type=moduleType and name=instanceName]", null},
+ { "/modules/module[name=\"instanceName\" and type=moduleType]", null},
+ { "/modules/module[type=\"moduleType\" and name=instanceName]", null},
+ { "/modules/module[name=\'instanceName\' and type=\"moduleType\"]", null},
+
+ // With inner beans
+ { "/modules/module[name=instanceName and type=\"moduleType\"]/inner[key=b]", Collections.singletonMap("inner", "b")},
+ { "/modules/module[name=instanceName and type=moduleType]/inner[key=b]", Collections.singletonMap("inner", "b")},
+ { "/modules/module[name=instanceName and type=moduleType]/inner[key=\'b\']", Collections.singletonMap("inner", "b")},
+ { "/modules/module[name=instanceName and type=moduleType]/inner[key=\"b\"]", Collections.singletonMap("inner", "b")},
+
+ { "/modules/module[name=instanceName and type=\"moduleType\"]/inner[key2=a]/inner2[key=b]",
+ new HashMap<String, String>() {{
+ put("inner", "a");
+ put("inner2", "b");
+ }}
+ },
+ });
+ }
+
+ @Test
+ public void testFromXpath() throws Exception {
+ final RuntimeRpcElementResolved resolved = RuntimeRpcElementResolved.fromXpath(xpath, "element", "namespace");
+ assertEquals(MODULE_TYPE, resolved.getModuleName());
+ assertEquals(INSTANCE_NAME, resolved.getInstanceName());
+ if (additional != null) {
+ assertEquals(additional, resolved.getAdditionalAttributes());
+ }
+ }
+}
package org.opendaylight.controller.netconf.monitoring;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
-
import java.util.Collections;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
package org.opendaylight.controller.netconf.monitoring.osgi;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.*;
-
public class NetconfMonitoringActivatorTest {
@Mock
MockitoAnnotations.initMocks(this);
doReturn(filter).when(context).createFilter(anyString());
doNothing().when(context).addServiceListener(any(ServiceListener.class), anyString());
- ServiceReference[] refs = new ServiceReference[2];
+ ServiceReference<?>[] refs = new ServiceReference[2];
doReturn(Arrays.asList(refs)).when(context).getServiceReferences(any(Class.class), anyString());
doReturn(refs).when(context).getServiceReferences(anyString(), anyString());
}
package org.opendaylight.controller.netconf.monitoring.osgi;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
import java.util.Hashtable;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyCollection;
-import static org.mockito.Mockito.*;
-
public class NetconfMonitoringServiceTrackerTest {
@Mock
- private ServiceReference reference;
+ private ServiceReference<NetconfMonitoringService> reference;
@Mock
private BundleContext context;
@Mock
- private ServiceRegistration serviceRegistration;
+ private ServiceRegistration<?> serviceRegistration;
@Mock
private Filter filter;
@Mock
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doAnswer;
-
import com.google.common.collect.Lists;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
@Test
public void testEncode() throws Exception {
final List<ByteBuf> chunks = Lists.newArrayList();
- doAnswer(new Answer() {
+ doAnswer(new Answer<Object>() {
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
chunks.add((ByteBuf) invocation.getArguments()[0]);
package org.opendaylight.controller.netconf.nettyutil.handler;
-import static org.junit.Assert.*;
-
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
import com.google.common.collect.Lists;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.custommonkey.xmlunit.XMLUnit;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
-
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.SettableFuture;
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.Unpooled;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelPromise;
import java.io.IOException;
import java.net.SocketAddress;
-
import org.apache.sshd.ClientChannel;
import org.apache.sshd.ClientSession;
import org.apache.sshd.SshClient;
import org.mockito.stubbing.Answer;
import org.opendaylight.controller.netconf.nettyutil.handler.ssh.authentication.AuthenticationHandler;
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.SettableFuture;
-
-import io.netty.buffer.ByteBuf;
-import io.netty.buffer.Unpooled;
-import io.netty.channel.Channel;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.channel.ChannelPromise;
-
public class AsyncSshHandlerTest {
@Mock
private <T extends SshFuture<T>> ListenableFuture<SshFutureListener<T>> stubAddListener(final T future) {
final SettableFuture<SshFutureListener<T>> listenerSettableFuture = SettableFuture.create();
- doAnswer(new Answer() {
+ doAnswer(new Answer<Object>() {
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
listenerSettableFuture.set((SshFutureListener<T>) invocation.getArguments()[0]);
package org.opendaylight.controller.netconf.util.mapping;
+import com.google.common.base.Optional;
import java.util.Map;
-
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants;
import org.opendaylight.controller.netconf.mapping.api.HandlingPriority;
import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution;
import org.opendaylight.controller.netconf.util.xml.XmlElement;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
-import com.google.common.base.Optional;
-
public abstract class AbstractNetconfOperation implements NetconfOperation {
private final String netconfSessionIdForReporting;
- private static final Logger logger = LoggerFactory.getLogger(AbstractNetconfOperation.class);
- protected AbstractNetconfOperation(String netconfSessionIdForReporting) {
+ protected AbstractNetconfOperation(final String netconfSessionIdForReporting) {
this.netconfSessionIdForReporting = netconfSessionIdForReporting;
}
}
@Override
- public HandlingPriority canHandle(Document message) throws NetconfDocumentedException {
+ public HandlingPriority canHandle(final Document message) throws NetconfDocumentedException {
OperationNameAndNamespace operationNameAndNamespace = null;
operationNameAndNamespace = new OperationNameAndNamespace(message);
return canHandle(operationNameAndNamespace.getOperationName(), operationNameAndNamespace.getNamespace());
private final String operationName, namespace;
private final XmlElement operationElement;
- public OperationNameAndNamespace(Document message) throws NetconfDocumentedException {
+ public OperationNameAndNamespace(final Document message) throws NetconfDocumentedException {
XmlElement requestElement = null;
requestElement = getRequestElementWithCheck(message);
operationElement = requestElement.getOnlyChildElement();
}
}
- protected static XmlElement getRequestElementWithCheck(Document message) throws NetconfDocumentedException {
+ protected static XmlElement getRequestElementWithCheck(final Document message) throws NetconfDocumentedException {
return XmlElement.fromDomElementWithExpected(message.getDocumentElement(), XmlNetconfConstants.RPC_KEY,
XmlNetconfConstants.URN_IETF_PARAMS_XML_NS_NETCONF_BASE_1_0);
}
- protected HandlingPriority canHandle(String operationName, String operationNamespace) {
+ protected HandlingPriority canHandle(final String operationName, final String operationNamespace) {
return operationName.equals(getOperationName()) && operationNamespace.equals(getOperationNamespace())
? getHandlingPriority()
: HandlingPriority.CANNOT_HANDLE;
protected abstract String getOperationName();
@Override
- public Document handle(Document requestMessage,
- NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
+ public Document handle(final Document requestMessage,
+ final NetconfOperationChainedExecution subsequentOperation) throws NetconfDocumentedException {
XmlElement requestElement = getRequestElementWithCheck(requestMessage);
*/
package org.opendaylight.controller.netconf.util;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
import org.junit.Test;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.w3c.dom.Document;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.fail;
-import static org.junit.matchers.JUnitMatchers.containsString;
-
public class NetconfUtilTest {
@Test
package org.opendaylight.controller.netconf.util.messages;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
import com.google.common.base.Optional;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.util.collections.Sets;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
public class NetconfHelloMessageTest {
Set<String> caps;
@Before
- public void setUp() throws Exception {
+ public void setUp() {
caps = Sets.newSet("cap1");
}
@Test
- public void testConstructor() throws Exception {
+ public void testConstructor() throws NetconfDocumentedException {
NetconfHelloMessageAdditionalHeader additionalHeader = new NetconfHelloMessageAdditionalHeader("name","host","1","transp","id");
NetconfHelloMessage message = NetconfHelloMessage.createClientHello(caps, Optional.of(additionalHeader));
- assertTrue(message.isHelloMessage(message));
+ assertTrue(NetconfHelloMessage.isHelloMessage(message));
assertEquals(Optional.of(additionalHeader), message.getAdditionalHeader());
NetconfHelloMessage serverMessage = NetconfHelloMessage.createServerHello(caps, 100L);
- assertTrue(serverMessage.isHelloMessage(serverMessage));
+ assertTrue(NetconfHelloMessage.isHelloMessage(serverMessage));
}
}
package org.opendaylight.controller.netconf.util.messages;
-import com.google.common.base.Charsets;
-import java.util.Arrays;
-import org.junit.Test;
-
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+import com.google.common.base.Charsets;
+import org.junit.Test;
+@Deprecated
public class NetconfMessageHeaderTest {
@Test
public void testGet() throws Exception {
package org.opendaylight.controller.netconf.util.osgi;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
import com.google.common.base.Optional;
import io.netty.channel.local.LocalAddress;
import java.net.InetSocketAddress;
import org.junit.Before;
import org.junit.Test;
-import org.opendaylight.controller.netconf.util.NetconfUtil;
import org.osgi.framework.BundleContext;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-
public class NetconfConfigUtilTest {
private BundleContext bundleContext;
package org.opendaylight.controller.networkconfig.neutron;
import java.io.Serializable;
+import java.net.InetAddress;
+import java.net.Inet6Address;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
* a new subnet)
*/
public boolean isValidCIDR() {
- try {
- SubnetUtils util = new SubnetUtils(cidr);
- SubnetInfo info = util.getInfo();
- if (!info.getNetworkAddress().equals(info.getAddress())) {
+ // fix for Bug 2290 - need to wrap the existing test as
+ // IPv4 because SubnetUtils doesn't support IPv6
+ if (ipVersion == 4) {
+ try {
+ SubnetUtils util = new SubnetUtils(cidr);
+ SubnetInfo info = util.getInfo();
+ if (!info.getNetworkAddress().equals(info.getAddress())) {
+ return false;
+ }
+ } catch (Exception e) {
return false;
}
- } catch (Exception e) {
- return false;
+ return true;
}
- return true;
+ if (ipVersion == 6) {
+ // fix for Bug2290 - this is custom code because no classes
+ // with ODL-friendly licenses have been found
+ // extract address (in front of /) and length (after /)
+ String[] parts = cidr.split("/");
+ if (parts.length != 2) {
+ return false;
+ }
+ try {
+ int length = Integer.parseInt(parts[1]);
+ //TODO?: limit check on length
+ // convert to byte array
+ byte[] addrBytes = ((Inet6Address) InetAddress.getByName(parts[0])).getAddress();
+ int i;
+ for (i=length; i<128; i++) { // offset is to ensure proper comparison
+ if (((((int) addrBytes[i/8]) & 0x000000FF) & (1 << (7-(i%8)))) != 0) {
+ return(false);
+ }
+ }
+ return(true);
+ } catch (Exception e) {
+ return(false);
+ }
+ }
+ return false;
}
/* test to see if the gateway IP specified overlaps with specified