<feature>base-tomcat</feature>
<feature>base-netty</feature>
<feature>base-jersey</feature>
+ <feature>base-jackson</feature>
<feature>base-spring-security</feature>
</feature>
<feature name="base-dummy-console" description="Temporary Dummy Console" version="1.1.0-SNAPSHOT">
<bundle>mvn:com.sun.jersey/jersey-core/${jersey.version}</bundle>
<bundle>mvn:com.sun.jersey/jersey-client/${jersey.version}</bundle>
<bundle>mvn:com.sun.jersey/jersey-servlet/${jersey.version}</bundle>
- <bundle start="true" start-level="35">mvn:javax.ws.rs/javax.ws.rs-api/2.0</bundle>
</feature>
<feature name="base-jersey2-osgi" description="OSGi friendly Jersey" version="${jersey2.publisher.version}">
<feature>http</feature>
<bundle start="true" start-level="35">mvn:orbit/javax.servlet.jsp.jstl/1.2.0.v201105211821</bundle>
<bundle start="true" start-level="35">mvn:orbit/javax.servlet.jsp.jstl.impl/1.2.0.v201210211230</bundle>
</feature>
- <feature name="base-tomcat" description="OpenDaylight Tomcat" version="7.0.32">
+ <feature name="base-tomcat" description="OpenDaylight Tomcat" version="7.0.53">
<feature>base-gemini-web</feature>
<feature>base-eclipselink-persistence</feature>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina/7.0.32.v201211201336</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina/${commons.karaf.catalina}</bundle>
<bundle start="true" start-level="35">mvn:geminiweb/org.eclipse.gemini.web.tomcat/${geminiweb.version}</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina.ha/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina.tribes/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.coyote/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.el/7.0.32.v201211081135</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.jasper/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.juli.extras/7.0.32.v201211081135</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.api/7.0.32.v201211081135</bundle>
- <bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.util/7.0.32.v201211201952</bundle>
- <bundle start="true" start-level="35">wrap:mvn:virgomirror/org.eclipse.jdt.core.compiler.batch/3.8.0.I20120518-2145</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina.ha/${commons.karaf.catalina.ha}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.catalina.tribes/${commons.karaf.catalina.tribes}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.coyote/${commons.karaf.coyote}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.el/${commons.karaf.el}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.jasper/${commons.karaf.jasper}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.juli.extras/${commons.karaf.juli.version}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.api/${commons.karaf.tomcat.api}</bundle>
+ <bundle start="true" start-level="35">mvn:orbit/org.apache.tomcat.util/${commons.karaf.tomcat.util}</bundle>
+ <bundle start="true" >mvn:org.opendaylight.controller/karaf-tomcat-security/${karaf.security.version}</bundle>
+ <bundle start="true" start-level="35">wrap:mvn:virgomirror/org.eclipse.jdt.core.compiler.batch/${eclipse.jdt.core.compiler.batch.version}</bundle>
</feature>
<feature name="base-spring" description="Opendaylight Spring Support" version="${spring.version}">
<bundle>mvn:org.ow2.asm/asm-all/${asm.version}</bundle>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>netconf-config</artifactId>
+ <version>${config.version}</version>
+ <type>xml</type>
+ <classifier>config</classifier>
</dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<commons.catalina>7.0.32.v201211201336</commons.catalina>
<commons.catalina.ha>7.0.32.v201211201952</commons.catalina.ha>
<commons.catalina.tribes>7.0.32.v201211201952</commons.catalina.tribes>
- <commons.checkstyle.version>0.0.3-SNAPSHOT</commons.checkstyle.version>
<commons.coyote>7.0.32.v201211201952</commons.coyote>
<commons.el>7.0.32.v201211081135</commons.el>
+ <commons.jasper>7.0.32.v201211201952</commons.jasper>
+ <commons.juli.version>7.0.32.v201211081135</commons.juli.version>
+ <commons.tomcat.api>7.0.32.v201211081135</commons.tomcat.api>
+ <commons.tomcat.util>7.0.32.v201211201952</commons.tomcat.util>
+
+ <commons.karaf.catalina>7.0.53.v201406061610</commons.karaf.catalina>
+ <commons.karaf.catalina.ha>7.0.53.v201406070630</commons.karaf.catalina.ha>
+ <commons.karaf.catalina.tribes>7.0.53.v201406070630</commons.karaf.catalina.tribes>
+ <commons.karaf.coyote>7.0.53.v201406070630</commons.karaf.coyote>
+ <commons.karaf.el>7.0.53.v201406060720</commons.karaf.el>
+ <commons.karaf.jasper>7.0.53.v201406070630</commons.karaf.jasper>
+ <commons.karaf.juli.version>7.0.53.v201406060720</commons.karaf.juli.version>
+ <commons.karaf.tomcat.api>7.0.53.v201406060720</commons.karaf.tomcat.api>
+ <commons.karaf.tomcat.util>7.0.53.v201406070630</commons.karaf.tomcat.util>
+
+ <commons.checkstyle.version>0.0.3-SNAPSHOT</commons.checkstyle.version>
<commons.fileupload.version>1.2.2</commons.fileupload.version>
<commons.httpclient.version>0.1.2-SNAPSHOT</commons.httpclient.version>
<commons.io.version>2.4</commons.io.version>
- <commons.jasper>7.0.32.v201211201952</commons.jasper>
- <commons.juli.version>7.0.32.v201211081135</commons.juli.version>
<commons.lang3.version>3.1</commons.lang3.version>
<commons.logback_settings.version>0.0.2-SNAPSHOT</commons.logback_settings.version>
<commons.net.version>3.0.1</commons.net.version>
<commons.opendaylight.concepts.version>0.5.2-SNAPSHOT</commons.opendaylight.concepts.version>
<commons.opendaylight.version>1.4.2-SNAPSHOT</commons.opendaylight.version>
<commons.parent.version>1.0.2-SNAPSHOT</commons.parent.version>
- <commons.tomcat.api>7.0.32.v201211081135</commons.tomcat.api>
- <commons.tomcat.util>7.0.32.v201211201952</commons.tomcat.util>
<compiler.version>2.3.2</compiler.version>
<commons.httpclient.version>0.1.2-SNAPSHOT</commons.httpclient.version>
<concepts.version>0.5.2-SNAPSHOT</concepts.version>
<devices.web.version>0.4.2-SNAPSHOT</devices.web.version>
<dummy-console.version>1.1.0-SNAPSHOT</dummy-console.version>
<eclipse.persistence.version>2.5.0</eclipse.persistence.version>
+ <eclipse.jdt.core.compiler.batch.version>3.8.0.I20120518-2145</eclipse.jdt.core.compiler.batch.version>
<!-- enforcer version -->
<enforcer.version>1.3.1</enforcer.version>
<enunciate.version>1.28</enunciate.version>
<scala.version>2.10</scala.version>
<scala.micro.version>4</scala.micro.version>
<security.version>0.4.2-SNAPSHOT</security.version>
+ <karaf.security.version>0.4.2-SNAPSHOT</karaf.security.version>
<shapeless.version>1.2.4</shapeless.version>
<sitedeploy>dav:http://nexus.opendaylight.org/content/sites/site</sitedeploy>
<sonar.branch>${user.name}-private-view</sonar.branch>
*
*/
-public class ConfigurationService implements IConfigurationService, ICacheUpdateAware<ConfigurationEvent, String> {
+public class ConfigurationService implements IConfigurationService, ICacheUpdateAware<String, String> {
private static final Logger logger = LoggerFactory
.getLogger(ConfigurationService.class);
public static final String SAVE_EVENT_CACHE = "config.event.save";
private static final String ROOT = GlobalConstants.STARTUPHOME.toString();
private IClusterGlobalServices clusterServices;
- private ConcurrentMap <ConfigurationEvent, String> configEvent;
+ private ConcurrentMap<String, String> configEvent;
private Set<IConfigurationAware> configurationAwareList = Collections
.synchronizedSet(new HashSet<IConfigurationAware>());
private ObjectReader objReader;
@Override
public Status saveConfigurations() {
if (configEvent != null) {
- configEvent.put(ConfigurationEvent.SAVE, "");
+ configEvent.put(ConfigurationEvent.SAVE.toString(), "");
}
return saveConfigurationsInternal();
}
}
@Override
- public void entryCreated(ConfigurationEvent key, String cacheName,
+ public void entryCreated(String key, String cacheName,
boolean originLocal) {
if (originLocal) {
return;
}
@Override
- public void entryUpdated(ConfigurationEvent key, String new_value,
+ public void entryUpdated(String key, String new_value,
String cacheName, boolean originLocal) {
if (originLocal) {
return;
}
- if (key == ConfigurationEvent.SAVE) {
+ if (key.equals(ConfigurationEvent.SAVE.toString())) {
saveConfigurationsInternal();
}
}
@Override
- public void entryDeleted(ConfigurationEvent key, String cacheName,
+ public void entryDeleted(String key, String cacheName,
boolean originLocal) {
if (originLocal) {
return;
logger.error("uninitialized clusterServices, can't retrieve cache");
return;
}
- configEvent = (ConcurrentMap<ConfigurationEvent, String>) this.clusterServices.getCache(SAVE_EVENT_CACHE);
+ configEvent = (ConcurrentMap<String, String>) this.clusterServices.getCache(SAVE_EVENT_CACHE);
if (configEvent == null) {
logger.error("Failed to retrieve configuration Cache");
}
*/
public class ContainerConfigurationService implements IConfigurationContainerService,
- IConfigurationAware, ICacheUpdateAware<ConfigurationEvent, String> {
+ IConfigurationAware,
+ ICacheUpdateAware<String, String> {
public static final String CONTAINER_SAVE_EVENT_CACHE = "config.container.event.save";
private static final Logger logger = LoggerFactory.getLogger(ContainerConfigurationService.class);
private IClusterContainerServices clusterServices;
- private ConcurrentMap <ConfigurationEvent, String> containerConfigEvent;
+ private ConcurrentMap<String, String> containerConfigEvent;
// Directory which contains the startup files for this container
private String root;
private Set<IConfigurationContainerAware> configurationAwareList = Collections
@Override
public Status saveConfigurations() {
- containerConfigEvent.put(ConfigurationEvent.SAVE, "");
+ containerConfigEvent.put(ConfigurationEvent.SAVE.toString(), "");
return saveConfiguration();
}
@Override
- public void entryCreated(ConfigurationEvent key, String cacheName,
+ public void entryCreated(String key, String cacheName,
boolean originLocal) {
if (originLocal) {
return;
}
@Override
- public void entryUpdated(ConfigurationEvent key, String new_value,
+ public void entryUpdated(String key, String new_value,
String cacheName, boolean originLocal) {
if (originLocal) {
return;
}
logger.debug("Processing {} event", key);
- if (key == ConfigurationEvent.SAVE) {
+ if (key.equals(ConfigurationEvent.SAVE.toString())) {
saveConfiguration();
}
}
@Override
- public void entryDeleted(ConfigurationEvent key, String cacheName,
+ public void entryDeleted(String key, String cacheName,
boolean originLocal) {
if (originLocal) {
return;
logger.error("uninitialized clusterServices, can't retrieve cache");
return;
}
- containerConfigEvent = (ConcurrentMap<ConfigurationEvent, String>) this.clusterServices.getCache(CONTAINER_SAVE_EVENT_CACHE);
+ containerConfigEvent =
+ (ConcurrentMap<String, String>) this.clusterServices.getCache(CONTAINER_SAVE_EVENT_CACHE);
if (containerConfigEvent == null) {
logger.error("Failed to retrieve configuration Cache");
}
<Host name="localhost" appBase=""
unpackWARs="false" autoDeploy="false"
deployOnStartup="false" createDirs="false">
-<!-- <Realm className="org.opendaylight.controller.security.ControllerCustomRealm" />
+ <Realm className="org.opendaylight.controller.karafsecurity.ControllerCustomRealm />
<Valve className="org.apache.catalina.authenticator.SingleSignOn" />
- -->
<Valve className="org.apache.catalina.valves.AccessLogValve" directory="logs"
prefix="web_access_log_" suffix=".txt" resolveHosts="false"
rotatable="true" fileDateFormat="yyyy-MM"
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>commons.opendaylight</artifactId>
+ <version>1.4.2-SNAPSHOT</version>
+ <relativePath>../commons/opendaylight</relativePath>
+ </parent>
+
+ <artifactId>karaf-tomcat-security</artifactId>
+ <version>0.4.2-SNAPSHOT</version>
+ <packaging>bundle</packaging>
+ <dependencies>
+ <dependency>
+ <groupId>orbit</groupId>
+ <artifactId>org.apache.catalina</artifactId>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <extensions>true</extensions>
+ <configuration>
+ <instructions>
+ <Fragment-Host>org.apache.catalina</Fragment-Host>
+ <Import-Package>org.slf4j,
+ javax.servlet,
+ org.apache.catalina,
+ org.apache.catalina.connector,
+ org.apache.catalina.valves,
+ org.apache.catalina.realm
+ </Import-Package>
+ <Export-Package>org.opendaylight.controller.karafsecurity</Export-Package>
+ </instructions>
+ <manifestLocation>${project.basedir}/META-INF</manifestLocation>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <scm>
+ <connection>scm:git:ssh://git.opendaylight.org:29418/controller.git</connection>
+ <developerConnection>scm:git:ssh://git.opendaylight.org:29418/controller.git</developerConnection>
+ <tag>HEAD</tag>
+ <url>https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main</url>
+ </scm>
+</project>
--- /dev/null
+/*
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.karafsecurity;
+
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.catalina.realm.GenericPrincipal;
+import org.apache.catalina.realm.RealmBase;
+
+public class ControllerCustomRealm extends RealmBase {
+
+ private static final String name = "ControllerCustomRealm";
+
+ @Override
+ protected String getName() {
+ return name;
+ }
+
+ @Override
+ protected String getPassword(String username) {
+ return "admin";
+ }
+
+ @Override
+ protected Principal getPrincipal(String username) {
+ List<String> controllerRoles = new ArrayList<String>();
+ controllerRoles.add("System-Admin");
+ return new GenericPrincipal(username, "", controllerRoles);
+ }
+
+ @Override
+ public Principal authenticate(String username, String credentials) {
+ return this.getPrincipal(username);
+ }
+}
import com.google.common.base.Function;
import com.google.common.base.Optional;
-import java.util.AbstractMap.SimpleEntry;
+
import java.util.Iterator;
import java.util.Map.Entry;
+
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationException;
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizationOperation;
import org.opendaylight.controller.md.sal.common.impl.util.compat.DataNormalizer;
import org.opendaylight.yangtools.yang.data.impl.codec.DeserializationException;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaContextListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
public class BindingToNormalizedNodeCodec implements SchemaContextListener,AutoCloseable {
- private static final Logger LOG = LoggerFactory.getLogger(BindingToNormalizedNodeCodec.class);
-
private final BindingIndependentMappingService bindingToLegacy;
private final BindingNormalizedNodeCodecRegistry codecRegistry;
private DataNormalizer legacyToNormalized;
}
}
-
- private static final Entry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject> toBindingEntry(
- final org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject> key,
- final DataObject value) {
- return new SimpleEntry<org.opendaylight.yangtools.yang.binding.InstanceIdentifier<? extends DataObject>, DataObject>(
- key, value);
- }
-
public DataNormalizer getDataNormalizer() {
return legacyToNormalized;
}
package org.opendaylight.controller.md.sal.binding.impl.test;
import static org.junit.Assert.assertTrue;
-
import javassist.ClassPool;
+
import org.junit.Test;
import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
import org.opendaylight.controller.md.sal.binding.test.AbstractSchemaAwareTest;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.Top;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelList;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.test.list.rev140701.two.level.list.TopLevelListKey;
+import org.opendaylight.yangtools.binding.data.codec.gen.impl.DataObjectSerializerGenerator;
import org.opendaylight.yangtools.binding.data.codec.gen.impl.StreamWriterGenerator;
import org.opendaylight.yangtools.binding.data.codec.impl.BindingNormalizedNodeCodecRegistry;
import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
@Override
protected void setupWithSchema(final SchemaContext context) {
mappingService = new RuntimeGeneratedMappingServiceImpl(ClassPool.getDefault());
- StreamWriterGenerator streamWriter = new StreamWriterGenerator(JavassistUtils.forClassPool(ClassPool.getDefault()));
+ DataObjectSerializerGenerator streamWriter = StreamWriterGenerator.create(JavassistUtils.forClassPool(ClassPool.getDefault()));
BindingNormalizedNodeCodecRegistry registry = new BindingNormalizedNodeCodecRegistry(streamWriter);
codec = new BindingToNormalizedNodeCodec(GeneratedClassLoadingStrategy.getTCCLClassLoadingStrategy(), mappingService, registry);
mappingService.onGlobalContextUpdated(context);
import org.opendaylight.yangtools.sal.binding.generator.impl.GeneratedClassLoadingStrategy;
import org.opendaylight.yangtools.sal.binding.generator.impl.RuntimeGeneratedMappingServiceImpl;
import org.opendaylight.yangtools.sal.binding.generator.util.JavassistUtils;
-import org.opendaylight.yangtools.yang.data.impl.codec.BindingIndependentMappingService;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
public class DataBrokerTestCustomizer {
return schemaService;
}
- private BindingIndependentMappingService getMappingService() {
- return mappingService;
- }
-
private DOMDataBroker getDOMDataBroker() {
if(domDataBroker == null) {
domDataBroker = createDOMDataBroker();
return domDataBroker;
}
- private ImmutableMap<LogicalDatastoreType, DOMStore> getDatastores() {
- if(datastores == null) {
+ private synchronized ImmutableMap<LogicalDatastoreType, DOMStore> getDatastores() {
+ if (datastores == null) {
datastores = createDatastores();
}
return datastores;
}
cluster {
- seed-nodes = ["akka.tcp://opendaylight-cluster-data@<CHANGE_ME>:2550"]
+ seed-nodes = ["akka.tcp://opendaylight-cluster-data@<CHANGE_SEED_IP>:2550"]
auto-down-unreachable-after = 10s
}
}
cluster {
- seed-nodes = ["akka.tcp://opendaylight-cluster-rpc@<CHANGE_ME>:2551"]
+ seed-nodes = ["akka.tcp://opendaylight-cluster-rpc@<CHANGE_SEED_IP>:2551"]
auto-down-unreachable-after = 10s
}
@Override
public DOMStoreReadWriteTransaction newReadWriteTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
+ TransactionProxy.TransactionType.READ_WRITE, schemaContext);
}
@Override
public DOMStoreWriteTransaction newWriteOnlyTransaction() {
return new TransactionProxy(actorContext,
- TransactionProxy.TransactionType.READ_WRITE, schemaContext);
+ TransactionProxy.TransactionType.WRITE_ONLY, schemaContext);
}
@Override
@Override
public java.lang.AutoCloseable createInstance() {
return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(),
- InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
- getMaxShardDataChangeExecutorQueueSize(),
- getMaxShardDataChangeListenerQueueSize()));
+ InMemoryDOMDataStoreConfigProperties.create(getConfigMaxShardDataChangeExecutorPoolSize(),
+ getConfigMaxShardDataChangeExecutorQueueSize(),
+ getConfigMaxShardDataChangeListenerQueueSize()));
}
}
public java.lang.AutoCloseable createInstance() {
return DistributedDataStoreFactory.createInstance("operational",
getOperationalSchemaServiceDependency(),
- InMemoryDOMDataStoreConfigProperties.create(getMaxShardDataChangeExecutorPoolSize(),
- getMaxShardDataChangeExecutorQueueSize(),
- getMaxShardDataChangeListenerQueueSize()));
+ InMemoryDOMDataStoreConfigProperties.create(getOperationalMaxShardDataChangeExecutorPoolSize(),
+ getOperationalMaxShardDataChangeExecutorQueueSize(),
+ getOperationalMaxShardDataChangeListenerQueueSize()));
}
}
}
}
- leaf max-shard-data-change-executor-queue-size {
+ leaf config-max-shard-data-change-executor-queue-size {
default 1000;
type uint16;
description "The maximum queue size for each shard's data store data change notification executor.";
}
- leaf max-shard-data-change-executor-pool-size {
+ leaf config-max-shard-data-change-executor-pool-size {
default 20;
type uint16;
description "The maximum thread pool size for each shard's data store data change notification executor.";
}
- leaf max-shard-data-change-listener-queue-size {
+ leaf config-max-shard-data-change-listener-queue-size {
default 1000;
type uint16;
description "The maximum queue size for each shard's data store data change listeners.";
}
}
- leaf max-shard-data-change-executor-queue-size {
+ leaf operational-max-shard-data-change-executor-queue-size {
default 1000;
type uint16;
description "The maximum queue size for each shard's data store data change notification executor.";
}
- leaf max-shard-data-change-executor-pool-size {
+ leaf operational-max-shard-data-change-executor-pool-size {
default 20;
type uint16;
description "The maximum thread pool size for each shard's data store data change notification executor.";
}
- leaf max-shard-data-change-listener-queue-size {
+ leaf operational-max-shard-data-change-listener-queue-size {
default 1000;
type uint16;
description "The maximum queue size for each shard's data store data change listeners.";
--- /dev/null
+/*
+ *
+ * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ *
+ */
+
+package org.opendaylight.controller.cluster.datastore;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.opendaylight.controller.cluster.datastore.utils.ActorContext;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreReadWriteTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreTransaction;
+import org.opendaylight.controller.sal.core.spi.data.DOMStoreWriteTransaction;
+import org.opendaylight.yangtools.yang.model.api.SchemaContext;
+
+public class TransactionChainProxyTest {
+ ActorContext actorContext = Mockito.mock(ActorContext.class);
+ SchemaContext schemaContext = Mockito.mock(SchemaContext.class);
+ @Test
+ public void testNewReadOnlyTransaction() throws Exception {
+
+ DOMStoreTransaction dst = new TransactionChainProxy(actorContext, schemaContext).newReadOnlyTransaction();
+ Assert.assertTrue(dst instanceof DOMStoreReadTransaction);
+
+ }
+
+ @Test
+ public void testNewReadWriteTransaction() throws Exception {
+ DOMStoreTransaction dst = new TransactionChainProxy(actorContext, schemaContext).newReadWriteTransaction();
+ Assert.assertTrue(dst instanceof DOMStoreReadWriteTransaction);
+
+ }
+
+ @Test
+ public void testNewWriteOnlyTransaction() throws Exception {
+ DOMStoreTransaction dst = new TransactionChainProxy(actorContext, schemaContext).newWriteOnlyTransaction();
+ Assert.assertTrue(dst instanceof DOMStoreWriteTransaction);
+
+ }
+
+ @Test(expected=UnsupportedOperationException.class)
+ public void testClose() throws Exception {
+ new TransactionChainProxy(actorContext, schemaContext).close();
+ }
+}
package org.opendaylight.controller.remote.rpc.registry.gossip;
import akka.actor.ActorRef;
+import akka.actor.ActorRefProvider;
import akka.actor.Address;
import akka.actor.Props;
import akka.actor.UntypedActor;
-import akka.cluster.Cluster;
+import akka.cluster.ClusterActorRefProvider;
import akka.event.Logging;
import akka.event.LoggingAdapter;
import org.opendaylight.controller.utils.ConditionalProbe;
/**
* Cluster address for this node
*/
- private final Address selfAddress = Cluster.get(getContext().system()).selfAddress();
-
- /**
- * Our private gossiper
- */
- private ActorRef gossiper;
+ private Address selfAddress;
private ConditionalProbe probe;
- public BucketStore(){
- gossiper = getContext().actorOf(Props.create(Gossiper.class), "gossiper");
- }
+ @Override
+ public void preStart(){
+ ActorRefProvider provider = getContext().provider();
+ selfAddress = provider.getDefaultAddress();
- /**
- * This constructor is useful for testing.
- * TODO: Pass Props instead of ActorRef
- *
- * @param gossiper
- */
- public BucketStore(ActorRef gossiper){
- this.gossiper = gossiper;
+ if ( provider instanceof ClusterActorRefProvider)
+ getContext().actorOf(Props.create(Gossiper.class), "gossiper");
}
@Override
public void onReceive(Object message) throws Exception {
- log.debug("Received message: node[{}], message[{}]", selfAddress,
- message);
+ log.debug("Received message: node[{}], message[{}]", selfAddress, message);
if (probe != null) {
-
probe.tell(message, getSelf());
}
if (remoteVersion == null) remoteVersion = -1L;
//update only if remote version is newer
- if ( remoteVersion > localVersion ) {
+ if ( remoteVersion.longValue() > localVersion.longValue() ) {
remoteBuckets.put(entry.getKey(), receivedBucket);
versions.put(entry.getKey(), remoteVersion);
}
Address getSelfAddress() {
return selfAddress;
}
-
}
package org.opendaylight.controller.remote.rpc.registry.gossip;
import akka.actor.ActorRef;
+import akka.actor.ActorRefProvider;
import akka.actor.ActorSelection;
import akka.actor.Address;
import akka.actor.Cancellable;
import akka.actor.UntypedActor;
import akka.cluster.Cluster;
+import akka.cluster.ClusterActorRefProvider;
import akka.cluster.ClusterEvent;
import akka.cluster.Member;
import akka.dispatch.Mapper;
final LoggingAdapter log = Logging.getLogger(getContext().system(), this);
- Cluster cluster = Cluster.get(getContext().system());
+ private Cluster cluster;
/**
* ActorSystem's address for the current cluster node.
*/
- private Address selfAddress = cluster.selfAddress();
+ private Address selfAddress;
/**
* All known cluster members
@Override
public void preStart(){
-
- cluster.subscribe(getSelf(),
- ClusterEvent.initialStateAsEvents(),
- ClusterEvent.MemberEvent.class,
- ClusterEvent.UnreachableMember.class);
+ ActorRefProvider provider = getContext().provider();
+ selfAddress = provider.getDefaultAddress();
+
+ if ( provider instanceof ClusterActorRefProvider ) {
+ cluster = Cluster.get(getContext().system());
+ cluster.subscribe(getSelf(),
+ ClusterEvent.initialStateAsEvents(),
+ ClusterEvent.MemberEvent.class,
+ ClusterEvent.UnreachableMember.class);
+ }
if (autoStartGossipTicks) {
gossipTask = getContext().system().scheduler().schedule(
*/
package org.opendaylight.controller.remote.rpc.registry.gossip;
-import akka.actor.ActorRef;
import akka.actor.ActorSystem;
+import akka.actor.Address;
import akka.actor.Props;
import akka.testkit.TestActorRef;
-import akka.testkit.TestProbe;
import com.typesafe.config.ConfigFactory;
-import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
-import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opendaylight.controller.remote.rpc.TerminationMonitor;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.spy;
+import java.util.HashMap;
+import java.util.Map;
public class BucketStoreTest {
private static ActorSystem system;
private static BucketStore store;
- private BucketStore mockStore;
-
@BeforeClass
public static void setup() {
- system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("odl-cluster"));
+ system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("unit-test"));
system.actorOf(Props.create(TerminationMonitor.class), "termination-monitor");
store = createStore();
system.shutdown();
}
- @Before
- public void createMocks(){
- mockStore = spy(store);
- }
-
- @After
- public void resetMocks(){
- reset(mockStore);
- }
-
+ /**
+ * Given a new local bucket
+ * Should replace
+ */
@Test
- public void testReceiveUpdateBucket_WhenInputBucketShouldUpdateVersion(){
+ public void testReceiveUpdateBucket(){
Bucket bucket = new BucketImpl();
Long expectedVersion = bucket.getVersion();
- mockStore.receiveUpdateBucket(bucket);
+ store.receiveUpdateBucket(bucket);
+
+ Assert.assertEquals(bucket, store.getLocalBucket());
+ Assert.assertEquals(expectedVersion, store.getLocalBucket().getVersion());
+ }
+
+ /**
+ * Given remote buckets
+ * Should merge with local copy of remote buckets
+ */
+ @Test
+ public void testReceiveUpdateRemoteBuckets(){
+
+ Address localAddress = system.provider().getDefaultAddress();
+ Bucket localBucket = new BucketImpl();
+
+ Address a1 = new Address("tcp", "system1");
+ Address a2 = new Address("tcp", "system2");
+ Address a3 = new Address("tcp", "system3");
+
+ Bucket b1 = new BucketImpl();
+ Bucket b2 = new BucketImpl();
+ Bucket b3 = new BucketImpl();
+
+ Map<Address, Bucket> remoteBuckets = new HashMap<>(3);
+ remoteBuckets.put(a1, b1);
+ remoteBuckets.put(a2, b2);
+ remoteBuckets.put(a3, b3);
+ remoteBuckets.put(localAddress, localBucket);
+
+ //Given remote buckets
+ store.receiveUpdateRemoteBuckets(remoteBuckets);
+
+ //Should NOT contain local bucket
+ //Should contain ONLY 3 entries i.e a1, a2, a3
+ Map<Address, Bucket> remoteBucketsInStore = store.getRemoteBuckets();
+ Assert.assertFalse("remote buckets contains local bucket", remoteBucketsInStore.containsKey(localAddress));
+ Assert.assertTrue(remoteBucketsInStore.size() == 3);
+
+ //Add a new remote bucket
+ Address a4 = new Address("tcp", "system4");
+ Bucket b4 = new BucketImpl();
+ remoteBuckets.clear();
+ remoteBuckets.put(a4, b4);
+ store.receiveUpdateRemoteBuckets(remoteBuckets);
+
+ //Should contain a4
+ //Should contain 4 entries now i.e a1, a2, a3, a4
+ remoteBucketsInStore = store.getRemoteBuckets();
+ Assert.assertTrue("Does not contain a4", remoteBucketsInStore.containsKey(a4));
+ Assert.assertTrue(remoteBucketsInStore.size() == 4);
+
+ //Update a bucket
+ Bucket b3_new = new BucketImpl();
+ remoteBuckets.clear();
+ remoteBuckets.put(a3, b3_new);
+ remoteBuckets.put(a1, null);
+ remoteBuckets.put(a2, null);
+ store.receiveUpdateRemoteBuckets(remoteBuckets);
+
+ //Should only update a3
+ remoteBucketsInStore = store.getRemoteBuckets();
+ Bucket b3_inStore = remoteBucketsInStore.get(a3);
+ Assert.assertEquals(b3_new.getVersion(), b3_inStore.getVersion());
+
+ //Should NOT update a1 and a2
+ Bucket b1_inStore = remoteBucketsInStore.get(a1);
+ Bucket b2_inStore = remoteBucketsInStore.get(a2);
+ Assert.assertEquals(b1.getVersion(), b1_inStore.getVersion());
+ Assert.assertEquals(b2.getVersion(), b2_inStore.getVersion());
+ Assert.assertTrue(remoteBucketsInStore.size() == 4);
+
+ //Should update versions map
+ //versions map contains versions for all remote buckets (4) + local bucket
+ //so it should have total 5.
+ Map<Address, Long> versionsInStore = store.getVersions();
+ Assert.assertTrue(String.format("Expected:%s, Actual:%s", 5, versionsInStore.size()),
+ versionsInStore.size() == 5);
+ Assert.assertEquals(b1.getVersion(), versionsInStore.get(a1));
+ Assert.assertEquals(b2.getVersion(), versionsInStore.get(a2));
+ Assert.assertEquals(b3_new.getVersion(), versionsInStore.get(a3));
+ Assert.assertEquals(b4.getVersion(), versionsInStore.get(a4));
+
+ //Send older version of bucket
+ remoteBuckets.clear();
+ remoteBuckets.put(a3, b3);
+ store.receiveUpdateRemoteBuckets(remoteBuckets);
+
+ //Should NOT update a3
+ remoteBucketsInStore = store.getRemoteBuckets();
+ b3_inStore = remoteBucketsInStore.get(a3);
+ Assert.assertTrue(b3_inStore.getVersion().longValue() == b3_new.getVersion().longValue());
- Assert.assertEquals(bucket, mockStore.getLocalBucket());
- Assert.assertEquals(expectedVersion, mockStore.getLocalBucket().getVersion());
}
/**
* @return instance of BucketStore class
*/
private static BucketStore createStore(){
- TestProbe mockActor = new TestProbe(system);
- ActorRef mockGossiper = mockActor.ref();
- final Props props = Props.create(BucketStore.class, mockGossiper);
+ final Props props = Props.create(BucketStore.class);
final TestActorRef<BucketStore> testRef = TestActorRef.create(system, props, "testStore");
-
return testRef.underlyingActor();
}
}
\ No newline at end of file
@BeforeClass
public static void setup() throws InterruptedException {
- system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("odl-cluster"));
+ system = ActorSystem.create("opendaylight-rpc", ConfigFactory.load().getConfig("unit-test"));
system.actorOf(Props.create(TerminationMonitor.class), "termination-monitor");
gossiper = createGossiper();
unit-test{
akka {
loglevel = "INFO"
- loggers = ["akka.event.slf4j.Slf4jLogger"]
- actor {
- provider = "akka.cluster.ClusterActorRefProvider"
- }
+ #loggers = ["akka.event.slf4j.Slf4jLogger"]
}
}
if (statsFlow == null || storedFlow == null) {
return false;
}
- if (statsFlow.getClass() != storedFlow.getClass()) {
- return false;
- }
if (statsFlow.getContainerName()== null) {
if (storedFlow.getContainerName()!= null) {
return false;
}
if (storedFlow == null && statsFlow != null) return false;
if (statsFlow == null && storedFlow != null) return false;
- if (storedFlow.getClass() != statsFlow.getClass()) {
- return false;
- }
if (storedFlow.getEthernetMatch() == null) {
if (statsFlow.getEthernetMatch() != null) {
return false;
<module>opendaylight/connectionmanager/api</module>
<module>opendaylight/connectionmanager/implementation</module>
<module>opendaylight/security</module>
+ <module>opendaylight/karaf-tomcat-security</module>
<!-- third-parties uncomment them if you need snapshot version of it -->
<!-- <module>third-party/openflowj</module> -->