<artifactId>logback-classic</artifactId>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcpkix-jdk15on</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-netty-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>netconf-auth</artifactId>
+ </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>commons.logback_settings</artifactId>
<groupId>xmlunit</groupId>
<artifactId>xmlunit</artifactId>
</dependency>
-
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>config-util</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.controller</groupId>
+ <artifactId>ietf-netconf-monitoring</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-client</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-yang-types-20130715</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.opendaylight.yangtools.model</groupId>
+ <artifactId>ietf-inet-types</artifactId>
+ </dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-impl</artifactId>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-monitoring</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netconf-ssh</artifactId>
</dependency>
-
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>netty-config-api</artifactId>
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import ch.qos.logback.classic.Level;
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.io.ByteStreams;
+import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import java.io.File;
+import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.util.Collections;
+import java.util.Comparator;
import java.util.List;
-
import java.util.concurrent.TimeUnit;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.annotation.Arg;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
-
+import org.opendaylight.controller.netconf.util.xml.XmlElement;
+import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Charsets;
-import com.google.common.io.CharStreams;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
public final class Main {
- // TODO add logback config
-
- // TODO make exi configurable
-
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
static class Params {
@Arg(dest = "generate-config-address")
public String generateConfigsAddress;
- @Arg(dest = "generate-configs-dir")
- public File generateConfigsDir;
+ @Arg(dest = "distro-folder")
+ public File distroFolder;
@Arg(dest = "generate-configs-batch-size")
public int generateConfigBatchSize;
@Arg(dest = "exi")
public boolean exi;
+ @Arg(dest = "debug")
+ public boolean debug;
+
static ArgumentParser getParser() {
final ArgumentParser parser = ArgumentParsers.newArgumentParser("netconf testool");
- parser.addArgument("--devices-count")
+
+ parser.description("Netconf device simulator. Detailed info can be found at https://wiki.opendaylight.org/view/OpenDaylight_Controller:Netconf:Testtool#Building_testtool");
+
+ parser.addArgument("--device-count")
.type(Integer.class)
.setDefault(1)
.type(Integer.class)
parser.addArgument("--schemas-dir")
.type(File.class)
- .required(true)
- .help("Directory containing yang schemas to describe simulated devices")
+ .help("Directory containing yang schemas to describe simulated devices. Some schemas e.g. netconf monitoring and inet types are included by default")
.dest("schemas-dir");
parser.addArgument("--starting-port")
parser.addArgument("--generate-config-connection-timeout")
.type(Integer.class)
- .setDefault((int)TimeUnit.MINUTES.toMillis(5))
+ .setDefault((int)TimeUnit.MINUTES.toMillis(30))
.help("Timeout to be generated in initial config files")
.dest("generate-config-connection-timeout");
parser.addArgument("--generate-configs-batch-size")
.type(Integer.class)
- .setDefault(100)
+ .setDefault(4000)
.help("Number of connector configs per generated file")
.dest("generate-configs-batch-size");
- parser.addArgument("--generate-configs-dir")
+ parser.addArgument("--distribution-folder")
.type(File.class)
- .help("Directory where initial config files for ODL distribution should be generated")
- .dest("generate-configs-dir");
+ .help("Directory where the karaf distribution for controller is located")
+ .dest("distro-folder");
parser.addArgument("--ssh")
.type(Boolean.class)
parser.addArgument("--exi")
.type(Boolean.class)
- .setDefault(false)
+ .setDefault(true)
.help("Whether to use exi to transport xml content")
.dest("exi");
+ parser.addArgument("--debug")
+ .type(Boolean.class)
+ .setDefault(false)
+ .help("Whether to use debug log level instead of INFO")
+ .dest("debug");
+
return parser;
}
checkArgument(deviceCount > 0, "Device count has to be > 0");
checkArgument(startingPort > 1024, "Starting port has to be > 1024");
- checkArgument(schemasDir.exists(), "Schemas dir has to exist");
- checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
- checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ if(schemasDir != null) {
+ checkArgument(schemasDir.exists(), "Schemas dir has to exist");
+ checkArgument(schemasDir.isDirectory(), "Schemas dir has to be a directory");
+ checkArgument(schemasDir.canRead(), "Schemas dir has to be readable");
+ }
}
}
public static void main(final String[] args) {
- ch.ethz.ssh2.log.Logger.enabled = true;
-
final Params params = parseArgs(args, Params.getParser());
params.validate();
+ final ch.qos.logback.classic.Logger root = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
+ root.setLevel(params.debug ? Level.DEBUG : Level.INFO);
+
final NetconfDeviceSimulator netconfDeviceSimulator = new NetconfDeviceSimulator();
try {
final List<Integer> openDevices = netconfDeviceSimulator.start(params);
- if(params.generateConfigsDir != null) {
- new ConfigGenerator(params.generateConfigsDir, openDevices).generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ if(params.distroFolder != null) {
+ final ConfigGenerator configGenerator = new ConfigGenerator(params.distroFolder, openDevices);
+ final List<File> generated = configGenerator.generate(params.ssh, params.generateConfigBatchSize, params.generateConfigsTimeout, params.generateConfigsAddress);
+ configGenerator.updateFeatureFile(generated);
+ configGenerator.changeLoadOrder();
}
} catch (final Exception e) {
LOG.error("Unhandled exception", e);
}
}
-
private static Params parseArgs(final String[] args, final ArgumentParser parser) {
final Params opt = new Params();
try {
public static final String NETCONF_USE_SSH = "false";
public static final String SIM_DEVICE_SUFFIX = "-sim-device";
- private final File directory;
+ private static final String SIM_DEVICE_CFG_PREFIX = "simulated-devices_";
+ private static final String ETC_KARAF_PATH = "etc/";
+ private static final String ETC_OPENDAYLIGHT_KARAF_PATH = ETC_KARAF_PATH + "opendaylight/karaf/";
+
+ public static final String NETCONF_CONNECTOR_ALL_FEATURE = "odl-netconf-connector-all";
+ private static final String ORG_OPS4J_PAX_URL_MVN_CFG = "org.ops4j.pax.url.mvn.cfg";
+
+ private final File configDir;
private final List<Integer> openDevices;
+ private final File ncFeatureFile;
+ private final File etcDir;
+ private final File loadOrderCfgFile;
public ConfigGenerator(final File directory, final List<Integer> openDevices) {
- this.directory = directory;
+ this.configDir = new File(directory, ETC_OPENDAYLIGHT_KARAF_PATH);
+ this.etcDir = new File(directory, ETC_KARAF_PATH);
+ this.loadOrderCfgFile = new File(etcDir, ORG_OPS4J_PAX_URL_MVN_CFG);
+ this.ncFeatureFile = getFeatureFile(directory, "features-netconf-connector");
this.openDevices = openDevices;
}
- public void generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
- if(directory.exists() == false) {
- checkState(directory.mkdirs(), "Unable to create folder %s" + directory);
+ public List<File> generate(final boolean useSsh, final int batchSize, final int generateConfigsTimeout, final String address) {
+ if(configDir.exists() == false) {
+ Preconditions.checkState(configDir.mkdirs(), "Unable to create directory " + configDir);
+ }
+
+ for (final File file : configDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return !pathname.isDirectory() && pathname.getName().startsWith(SIM_DEVICE_CFG_PREFIX);
+ }
+ })) {
+ Preconditions.checkState(file.delete(), "Unable to clean previous generated file %s", file);
}
try(InputStream stream = Main.class.getResourceAsStream(NETCONF_CONNECTOR_XML)) {
checkNotNull(stream, "Cannot load %s", NETCONF_CONNECTOR_XML);
String configBlueprint = CharStreams.toString(new InputStreamReader(stream, Charsets.UTF_8));
- // TODO make address configurable
checkState(configBlueprint.contains(NETCONF_CONNECTOR_NAME));
checkState(configBlueprint.contains(NETCONF_CONNECTOR_PORT));
checkState(configBlueprint.contains(NETCONF_USE_SSH));
StringBuilder b = new StringBuilder();
b.append(before);
+ final List<File> generatedConfigs = Lists.newArrayList();
+
for (final Integer openDevice : openDevices) {
if(batchStart == null) {
batchStart = openDevice;
connectorCount++;
if(connectorCount == batchSize) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevice)), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevice));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
connectorCount = 0;
b = new StringBuilder();
b.append(before);
// Write remaining
if(connectorCount != 0) {
b.append(after);
- Files.write(b.toString(), new File(directory, String.format("simulated-devices_%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1))), Charsets.UTF_8);
+ final File to = new File(configDir, String.format(SIM_DEVICE_CFG_PREFIX + "%d-%d.xml", batchStart, openDevices.get(openDevices.size() - 1)));
+ generatedConfigs.add(to);
+ Files.write(b.toString(), to, Charsets.UTF_8);
}
- LOG.info("Config files generated in {}", directory);
+ LOG.info("Config files generated in {}", configDir);
+ return generatedConfigs;
} catch (final IOException e) {
throw new RuntimeException("Unable to generate config files", e);
}
}
+
+
+ public void updateFeatureFile(final List<File> generated) {
+ // TODO karaf core contains jaxb for feature files, use that for modification
+ try {
+ final Document document = XmlUtil.readXmlToDocument(Files.toString(ncFeatureFile, Charsets.UTF_8));
+ final NodeList childNodes = document.getDocumentElement().getChildNodes();
+
+ for (int i = 0; i < childNodes.getLength(); i++) {
+ final Node item = childNodes.item(i);
+ if(item instanceof Element == false) {
+ continue;
+ }
+ if(item.getLocalName().equals("feature") ==false) {
+ continue;
+ }
+
+ if(NETCONF_CONNECTOR_ALL_FEATURE.equals(((Element) item).getAttribute("name"))) {
+ final Element ncAllFeatureDefinition = (Element) item;
+ // Clean previous generated files
+ for (final XmlElement configfile : XmlElement.fromDomElement(ncAllFeatureDefinition).getChildElements("configfile")) {
+ ncAllFeatureDefinition.removeChild(configfile.getDomElement());
+ }
+ for (final File file : generated) {
+ final Element configfile = document.createElement("configfile");
+ configfile.setTextContent("file:" + ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ configfile.setAttribute("finalname", ETC_OPENDAYLIGHT_KARAF_PATH + file.getName());
+ ncAllFeatureDefinition.appendChild(configfile);
+ }
+ }
+ }
+
+ Files.write(XmlUtil.toString(document), ncFeatureFile, Charsets.UTF_8);
+ LOG.info("Feature file {} updated", ncFeatureFile);
+ } catch (final IOException e) {
+ throw new RuntimeException("Unable to load features file as a resource");
+ } catch (final SAXException e) {
+ throw new RuntimeException("Unable to parse features file");
+ }
+ }
+
+
+ private static File getFeatureFile(final File distroFolder, final String featureName) {
+ checkExistingDir(distroFolder, String.format("Folder %s does not exist", distroFolder));
+
+ final File systemDir = checkExistingDir(new File(distroFolder, "system"), String.format("Folder %s does not contain a karaf distro, folder system is missing", distroFolder));
+ final File netconfConnectorFeaturesParentDir = checkExistingDir(new File(systemDir, "org/opendaylight/controller/" + featureName), String.format("Karaf distro in %s does not contain netconf-connector features", distroFolder));
+
+ // Find newest version for features
+ final File newestVersionDir = Collections.max(
+ Lists.newArrayList(netconfConnectorFeaturesParentDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.isDirectory();
+ }
+ })), new Comparator<File>() {
+ @Override
+ public int compare(final File o1, final File o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+ });
+
+ return newestVersionDir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(final File pathname) {
+ return pathname.getName().contains(featureName);
+ }
+ })[0];
+ }
+
+ private static File checkExistingDir(final File folder, final String msg) {
+ Preconditions.checkArgument(folder.exists(), msg);
+ Preconditions.checkArgument(folder.isDirectory(), msg);
+ return folder;
+ }
+
+ public void changeLoadOrder() {
+ try {
+ Files.write(ByteStreams.toByteArray(getClass().getResourceAsStream("/" +ORG_OPS4J_PAX_URL_MVN_CFG)), loadOrderCfgFile);
+ LOG.info("Load order changed to prefer local bundles/features by rewriting file {}", loadOrderCfgFile);
+ } catch (IOException e) {
+ throw new RuntimeException("Unable to rewrite features file " + loadOrderCfgFile, e);
+ }
+ }
}
}
import com.google.common.base.Charsets;
import com.google.common.base.Function;
+import com.google.common.base.Objects;
import com.google.common.base.Optional;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.CharStreams;
import com.google.common.util.concurrent.CheckedFuture;
+import com.google.common.util.concurrent.Futures;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.local.LocalAddress;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.net.Inet4Address;
import org.opendaylight.yangtools.yang.model.repo.api.YangTextSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.PotentialSchemaSource;
import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceListener;
+import org.opendaylight.yangtools.yang.model.repo.spi.SchemaSourceProvider;
import org.opendaylight.yangtools.yang.model.repo.util.FilesystemSchemaSourceCache;
import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils;
import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
private static final Logger LOG = LoggerFactory.getLogger(NetconfDeviceSimulator.class);
- public static final int CONNECTION_TIMEOUT_MILLIS = 20000;
-
private final NioEventLoopGroup nettyThreadgroup;
private final HashedWheelTimer hashedWheelTimer;
private final List<Channel> devicesChannels = Lists.newArrayList();
}
private Map<ModuleBuilder, String> toModuleBuilders(final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> sources) {
- final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
- @Override
- public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
- return input.getKey().getAST();
- }
- });
- final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
- asts.values(), Optional.<SchemaContext>absent());
+ final Map<SourceIdentifier, ParserRuleContext> asts = Maps.transformValues(sources, new Function<Map.Entry<ASTSchemaSource, YangTextSchemaSource>, ParserRuleContext>() {
+ @Override
+ public ParserRuleContext apply(final Map.Entry<ASTSchemaSource, YangTextSchemaSource> input) {
+ return input.getKey().getAST();
+ }
+ });
+ final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
+ asts.values(), Optional.<SchemaContext>absent());
- final ParseTreeWalker walker = new ParseTreeWalker();
- final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
+ final ParseTreeWalker walker = new ParseTreeWalker();
+ final Map<ModuleBuilder, String> sourceToBuilder = new HashMap<>();
- for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
- final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
- walker, entry.getValue()).getModuleBuilder();
+ for (final Map.Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
+ final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
+ walker, entry.getValue()).getModuleBuilder();
- try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
- sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
+ try(InputStreamReader stream = new InputStreamReader(sources.get(entry.getKey()).getValue().openStream(), Charsets.UTF_8)) {
+ sourceToBuilder.put(moduleBuilder, CharStreams.toString(stream));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
}
-
- return sourceToBuilder;
}
+ return sourceToBuilder;
+ }
+
public List<Integer> start(final Main.Params params) {
+ LOG.info("Starting {}, {} simulated devices starting on port {}", params.deviceCount, params.ssh ? "SSH" : "TCP", params.startingPort);
+
final Map<ModuleBuilder, String> moduleBuilders = parseSchemasToModuleBuilders(params);
final NetconfServerDispatcher dispatcher = createDispatcher(moduleBuilders, params.exi, params.generateConfigsTimeout);
devicesChannels.add(server.channel());
openDevices.add(currentPort - 1);
-
}
if(openDevices.size() == params.deviceCount) {
- LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort);
+ LOG.info("All simulated devices started successfully from port {} to {}", params.startingPort, currentPort - 1);
} else {
LOG.warn("Not all simulated devices started successfully. Started devices ar on ports {}", openDevices);
}
public void schemaSourceUnregistered(final PotentialSchemaSource<?> potentialSchemaSource) {}
});
- final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
- consumer.registerSchemaSourceListener(cache);
+ if(params.schemasDir != null) {
+ final FilesystemSchemaSourceCache<YangTextSchemaSource> cache = new FilesystemSchemaSourceCache<>(consumer, YangTextSchemaSource.class, params.schemasDir);
+ consumer.registerSchemaSourceListener(cache);
+ }
+
+ addDefaultSchemas(consumer);
final Map<SourceIdentifier, Map.Entry<ASTSchemaSource, YangTextSchemaSource>> asts = Maps.newHashMap();
for (final SourceIdentifier loadedSource : loadedSources) {
return toModuleBuilders(asts);
}
+ private void addDefaultSchemas(final SharedSchemaRepository consumer) {
+ SourceIdentifier sId = new SourceIdentifier("ietf-netconf-monitoring", "2010-10-04");
+ registerSource(consumer, "/META-INF/yang/ietf-netconf-monitoring.yang", sId);
+
+ sId = new SourceIdentifier("ietf-yang-types", "2013-07-15");
+ registerSource(consumer, "/META-INF/yang/ietf-yang-types@2013-07-15.yang", sId);
+
+ sId = new SourceIdentifier("ietf-inet-types", "2010-09-24");
+ registerSource(consumer, "/META-INF/yang/ietf-inet-types.yang", sId);
+ }
+
+ private void registerSource(final SharedSchemaRepository consumer, final String resource, final SourceIdentifier sourceId) {
+ consumer.registerSchemaSource(new SchemaSourceProvider<SchemaSourceRepresentation>() {
+ @Override
+ public CheckedFuture<? extends SchemaSourceRepresentation, SchemaSourceException> getSource(final SourceIdentifier sourceIdentifier) {
+ return Futures.immediateCheckedFuture(new YangTextSchemaSource(sourceId) {
+ @Override
+ protected Objects.ToStringHelper addToStringAttributes(final Objects.ToStringHelper toStringHelper) {
+ return toStringHelper;
+ }
+
+ @Override
+ public InputStream openStream() throws IOException {
+ return getClass().getResourceAsStream(resource);
+ }
+ });
+ }
+ }, PotentialSchemaSource.create(sourceId, YangTextSchemaSource.class, PotentialSchemaSource.Costs.IMMEDIATE.getValue()));
+ }
+
private static InetSocketAddress getAddress(final int port) {
try {
// TODO make address configurable
--- /dev/null
+################################################################################
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+################################################################################
+
+#
+# If set to true, the following property will not allow any certificate to be used
+# when accessing Maven repositories through SSL
+#
+#org.ops4j.pax.url.mvn.certificateCheck=
+
+#
+# Path to the local Maven settings file.
+# The repositories defined in this file will be automatically added to the list
+# of default repositories if the 'org.ops4j.pax.url.mvn.repositories' property
+# below is not set.
+# The following locations are checked for the existence of the settings.xml file
+# * 1. looks for the specified url
+# * 2. if not found looks for ${user.home}/.m2/settings.xml
+# * 3. if not found looks for ${maven.home}/conf/settings.xml
+# * 4. if not found looks for ${M2_HOME}/conf/settings.xml
+#
+#org.ops4j.pax.url.mvn.settings=
+
+#
+# Path to the local Maven repository which is used to avoid downloading
+# artifacts when they already exist locally.
+# The value of this property will be extracted from the settings.xml file
+# above, or defaulted to:
+# System.getProperty( "user.home" ) + "/.m2/repository"
+#
+org.ops4j.pax.url.mvn.localRepository=${karaf.home}/${karaf.default.repository}
+
+#
+# Default this to false. It's just weird to use undocumented repos
+#
+org.ops4j.pax.url.mvn.useFallbackRepositories=false
+
+#
+# Uncomment if you don't wanna use the proxy settings
+# from the Maven conf/settings.xml file
+#
+# org.ops4j.pax.url.mvn.proxySupport=false
+
+#
+# Disable aether support by default. This ensure that the defaultRepositories
+# below will be used
+#
+#org.ops4j.pax.url.mvn.disableAether=true
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# Those repositories will be checked before iterating through the
+# below list of repositories and even before the local repository
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+#
+# The following property value will add the system folder as a repo.
+#
+#org.ops4j.pax.url.mvn.defaultRepositories=
+
+# Use the default local repo (e.g.~/.m2/repository) as a "remote" repo
+org.ops4j.pax.url.mvn.defaultLocalRepoAsRemote=false
+
+#
+# Comma separated list of repositories scanned when resolving an artifact.
+# The default list includes the following repositories containing releases:
+# http://repo1.maven.org/maven2
+# http://repository.apache.org/content/groups/snapshots-group
+# http://svn.apache.org/repos/asf/servicemix/m2-repo
+# http://repository.springsource.com/maven/bundles/release
+# http://repository.springsource.com/maven/bundles/external
+# To add repositories to the default ones, prepend '+' to the list of repositories
+# to add.
+# A repository url can be appended with zero or more of the following flags:
+# @snapshots : the repository contains snaphots
+# @noreleases : the repository does not contain any released artifacts
+# @id=reponid : the id for the repository, just like in the settings.xml this is optional but recomendet
+#
+# The default list doesn't contain any repository containing snapshots as it can impact the artifacts resolution.
+# You may want to add the following repositories containing snapshots:
+# http://repository.apache.org/content/groups/snapshots-group@id=apache@snapshots@noreleases
+# http://oss.sonatype.org/content/repositories/snapshots@id=sonatype.snapshots.deploy@snapshots@norelease
+# http://oss.sonatype.org/content/repositories/ops4j-snapshots@id=ops4j.sonatype.snapshots.deploy@snapshots@noreleases
+#
+org.ops4j.pax.url.mvn.repositories= \
+ file:${karaf.home}/${karaf.default.repository}@id=system.repository, \
+ file:${karaf.data}/kar@id=kar.repository@multi, \
+ http://repo1.maven.org/maven2@id=central, \
+ http://repository.springsource.com/maven/bundles/release@id=spring.ebr.release, \
+ http://repository.springsource.com/maven/bundles/external@id=spring.ebr.external
<module>netconf-connector-config</module>
<module>netconf-auth</module>
<module>netconf-usermanager</module>
+ <module>netconf-testtool</module>
</modules>
<dependencies>
<module>netconf-it</module>
</modules>
</profile>
-
- <profile>
- <id>testtool</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <modules>
- <module>netconf-testtool</module>
- </modules>
- </profile>
</profiles>
</project>