From: Tony Tkacik Date: Thu, 20 Nov 2014 09:05:53 +0000 (+0000) Subject: Merge changes Ic434bf4a,I05a3fb18,I47a3783d,I8234bbfd X-Git-Tag: release/lithium~863 X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?p=controller.git;a=commitdiff_plain;h=992a433ff8fd0ce7335bf5ea9e59a75602a95a19;hp=33a40451c53d0cbf66407bd3e217baf904aa7088 Merge changes Ic434bf4a,I05a3fb18,I47a3783d,I8234bbfd * changes: Fix checkstyle warnings in netconf-cli Fix checkstyle warnings ietf-netconf-monitoring Fix checkstyle warnings for netconf-testtool Fix checkstyle warnings for config-netconf-connector --- diff --git a/opendaylight/adsal/switchmanager/integrationtest/src/test/java/org/opendaylight/controller/switchmanager/internal/SwitchManagerIT.java b/opendaylight/adsal/switchmanager/integrationtest/src/test/java/org/opendaylight/controller/switchmanager/internal/SwitchManagerIT.java index 64098ec515..3963abf265 100644 --- a/opendaylight/adsal/switchmanager/integrationtest/src/test/java/org/opendaylight/controller/switchmanager/internal/SwitchManagerIT.java +++ b/opendaylight/adsal/switchmanager/integrationtest/src/test/java/org/opendaylight/controller/switchmanager/internal/SwitchManagerIT.java @@ -155,7 +155,7 @@ public class SwitchManagerIT { assertFalse(debugit); // Now lets create a hosttracker for testing purpose - ServiceReference s = bc.getServiceReference(ISwitchManager.class + ServiceReference s = bc.getServiceReference(ISwitchManager.class .getName()); if (s != null) { this.switchManager = (ISwitchManager) bc.getService(s); diff --git a/opendaylight/adsal/topologymanager/integrationtest/src/test/java/org/opendaylight/controller/topologymanager/TopologyManagerIT.java b/opendaylight/adsal/topologymanager/integrationtest/src/test/java/org/opendaylight/controller/topologymanager/TopologyManagerIT.java index 08934f32c7..c2fc8aaa8e 100644 --- a/opendaylight/adsal/topologymanager/integrationtest/src/test/java/org/opendaylight/controller/topologymanager/TopologyManagerIT.java +++ b/opendaylight/adsal/topologymanager/integrationtest/src/test/java/org/opendaylight/controller/topologymanager/TopologyManagerIT.java @@ -159,7 +159,7 @@ public class TopologyManagerIT { Set properties = new HashSet(); - ServiceReference r = bc.getServiceReference(IPluginInTopologyService.class + ServiceReference r = bc.getServiceReference(IPluginInTopologyService.class .getName()); TopologyServices topologyServices = null; if (r != null) { diff --git a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/ServiceReferenceRegistryImpl.java b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/ServiceReferenceRegistryImpl.java index 0f881e95ad..0dff41402e 100644 --- a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/ServiceReferenceRegistryImpl.java +++ b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/ServiceReferenceRegistryImpl.java @@ -8,7 +8,8 @@ package org.opendaylight.controller.config.manager.impl; import static com.google.common.base.Preconditions.checkNotNull; - +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -71,22 +72,22 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public Set lookupConfigBeans(String moduleName) { + public Set lookupConfigBeans(final String moduleName) { throw new UnsupportedOperationException(); } @Override - public Set lookupConfigBeans(String moduleName, String instanceName) { + public Set lookupConfigBeans(final String moduleName, final String instanceName) { throw new UnsupportedOperationException(); } @Override - public ObjectName lookupConfigBean(String moduleName, String instanceName) throws InstanceNotFoundException { + public ObjectName lookupConfigBean(final String moduleName, final String instanceName) throws InstanceNotFoundException { throw new UnsupportedOperationException(); } @Override - public void checkConfigBeanExists(ObjectName objectName) throws InstanceNotFoundException { + public void checkConfigBeanExists(final ObjectName objectName) throws InstanceNotFoundException { throw new InstanceNotFoundException("Cannot find " + objectName + " - Tried to use mocking registry"); } @@ -110,7 +111,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public ServiceReferenceJMXRegistration registerMBean(ServiceReferenceMXBeanImpl object, ObjectName on) throws InstanceAlreadyExistsException { + public ServiceReferenceJMXRegistration registerMBean(final ServiceReferenceMXBeanImpl object, final ObjectName on) throws InstanceAlreadyExistsException { throw new UnsupportedOperationException(); } @@ -128,9 +129,9 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe /** * Static constructor for transaction controller. Take current state as seen by config registry, allow writing new data. */ - public static SearchableServiceReferenceWritableRegistry createSRWritableRegistry(ServiceReferenceReadableRegistry oldReadableRegistry, - ConfigTransactionLookupRegistry txLookupRegistry, - Map> currentlyRegisteredFactories) { + public static SearchableServiceReferenceWritableRegistry createSRWritableRegistry(final ServiceReferenceReadableRegistry oldReadableRegistry, + final ConfigTransactionLookupRegistry txLookupRegistry, + final Map> currentlyRegisteredFactories) { if (txLookupRegistry == null) { throw new IllegalArgumentException("txLookupRegistry is null"); @@ -148,8 +149,8 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe /** * Copy back state to config registry after commit. */ - public static CloseableServiceReferenceReadableRegistry createSRReadableRegistry(ServiceReferenceWritableRegistry oldWritableRegistry, - LookupRegistry lookupRegistry, BaseJMXRegistrator baseJMXRegistrator) { + public static CloseableServiceReferenceReadableRegistry createSRReadableRegistry(final ServiceReferenceWritableRegistry oldWritableRegistry, + final LookupRegistry lookupRegistry, final BaseJMXRegistrator baseJMXRegistrator) { ServiceReferenceRegistryImpl old = (ServiceReferenceRegistryImpl) oldWritableRegistry; // even if factories do change, nothing in the mapping can change between transactions @@ -163,7 +164,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe /** * Fill refNames and mBeans maps from old instance */ - private static void copy(ServiceReferenceRegistryImpl old, ServiceReferenceRegistryImpl newRegistry, String nullableDstTransactionName) { + private static void copy(final ServiceReferenceRegistryImpl old, final ServiceReferenceRegistryImpl newRegistry, final String nullableDstTransactionName) { for (Entry> refNameEntry : old.mBeans.entrySet()) { ObjectName currentImplementation; ObjectName currentImplementationSrc = refNameEntry.getValue().getKey().getCurrentImplementation(); @@ -182,7 +183,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } } - private static Map extractFactoriesMap(Map> currentlyRegisteredFactories) { + private static Map extractFactoriesMap(final Map> currentlyRegisteredFactories) { Map result = new HashMap<>(); for (Entry> entry : currentlyRegisteredFactories.entrySet()) { result.put(entry.getKey(), entry.getValue().getKey()); @@ -190,9 +191,9 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe return result; } - private ServiceReferenceRegistryImpl(Map factories, LookupRegistry lookupRegistry, - ServiceReferenceTransactionRegistratorFactory serviceReferenceRegistratorFactory, - boolean writable) { + private ServiceReferenceRegistryImpl(final Map factories, final LookupRegistry lookupRegistry, + final ServiceReferenceTransactionRegistratorFactory serviceReferenceRegistratorFactory, + final boolean writable) { this.factories = factories; this.writable = writable; this.lookupRegistry = lookupRegistry; @@ -213,10 +214,10 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe Set qNames = InterfacesHelper.getQNames(siAnnotations); allAnnotations.addAll(siAnnotations); allQNameSet.addAll(qNames); - modifiableFactoryNamesToQNames.put(entry.getKey(), Collections.unmodifiableSet(qNames)); + modifiableFactoryNamesToQNames.put(entry.getKey(), qNames); } - this.factoryNamesToQNames = Collections.unmodifiableMap(modifiableFactoryNamesToQNames); - this.allQNames = Collections.unmodifiableSet(allQNameSet); + this.factoryNamesToQNames = ImmutableMap.copyOf(modifiableFactoryNamesToQNames); + this.allQNames = ImmutableSet.copyOf(allQNameSet); // fill namespacesToAnnotations Map> modifiableNamespacesToAnnotations = new HashMap<>(); @@ -235,13 +236,13 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe ofNamespace.put(sia.localName(), sia); modifiableServiceQNamesToAnnotations.put(sia.value(), sia); } - this.namespacesToAnnotations = Collections.unmodifiableMap(modifiableNamespacesToAnnotations); - this.serviceQNamesToAnnotations = Collections.unmodifiableMap(modifiableServiceQNamesToAnnotations); + this.namespacesToAnnotations = ImmutableMap.copyOf(modifiableNamespacesToAnnotations); + this.serviceQNamesToAnnotations = ImmutableMap.copyOf(modifiableServiceQNamesToAnnotations); LOGGER.trace("factoryNamesToQNames:{}", this.factoryNamesToQNames); } @Override - public Map findServiceInterfaces(ModuleIdentifier moduleIdentifier) { + public Map findServiceInterfaces(final ModuleIdentifier moduleIdentifier) { Map result = modulesToServiceRef.get(moduleIdentifier); if (result == null) { return Collections.emptyMap(); @@ -250,7 +251,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized Set lookupServiceInterfaceNames(ObjectName objectName) throws InstanceNotFoundException { + public synchronized Set lookupServiceInterfaceNames(final ObjectName objectName) throws InstanceNotFoundException { lookupRegistry.checkConfigBeanExists(objectName); String factoryName = ObjectNameUtil.getFactoryName(objectName); @@ -264,7 +265,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized String getServiceInterfaceName(String namespace, String localName) { + public synchronized String getServiceInterfaceName(final String namespace, final String localName) { Map ofNamespace = namespacesToAnnotations.get(namespace); if (ofNamespace == null) { LOGGER.error("Cannot find namespace {} in {}", namespace, namespacesToAnnotations); @@ -295,7 +296,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe return result; } - private ObjectName getObjectName(ModuleIdentifier moduleIdentifier) { + private ObjectName getObjectName(final ModuleIdentifier moduleIdentifier) { ObjectName on; try { on = lookupRegistry.lookupConfigBean(moduleIdentifier.getFactoryName(), moduleIdentifier.getInstanceName()); @@ -307,7 +308,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized ObjectName lookupConfigBeanByServiceInterfaceName(String serviceInterfaceQName, String refName) { + public synchronized ObjectName lookupConfigBeanByServiceInterfaceName(final String serviceInterfaceQName, final String refName) { ServiceReference serviceReference = new ServiceReference(serviceInterfaceQName, refName); ModuleIdentifier moduleIdentifier = refNames.get(serviceReference); if (moduleIdentifier == null) { @@ -318,7 +319,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized Map lookupServiceReferencesByServiceInterfaceName(String serviceInterfaceQName) { + public synchronized Map lookupServiceReferencesByServiceInterfaceName(final String serviceInterfaceQName) { Map> serviceMapping = getServiceMapping(); Map innerMap = serviceMapping.get(serviceInterfaceQName); if (innerMap == null) { @@ -329,7 +330,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized ObjectName getServiceReference(String serviceInterfaceQName, String refName) throws InstanceNotFoundException { + public synchronized ObjectName getServiceReference(final String serviceInterfaceQName, final String refName) throws InstanceNotFoundException { ServiceReference serviceReference = new ServiceReference(serviceInterfaceQName, refName); if (mBeans.containsKey(serviceReference) == false) { throw new InstanceNotFoundException("Cannot find " + serviceReference); @@ -338,7 +339,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized void checkServiceReferenceExists(ObjectName objectName) throws InstanceNotFoundException { + public synchronized void checkServiceReferenceExists(final ObjectName objectName) throws InstanceNotFoundException { String actualTransactionName = ObjectNameUtil.getTransactionName(objectName); String expectedTransactionName = serviceReferenceRegistrator.getNullableTransactionName(); if (writable & actualTransactionName == null || (writable && actualTransactionName.equals(expectedTransactionName) == false)) { @@ -362,19 +363,19 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized ObjectName saveServiceReference(String serviceInterfaceName, String refName, ObjectName moduleON) throws InstanceNotFoundException { + public synchronized ObjectName saveServiceReference(final String serviceInterfaceName, final String refName, final ObjectName moduleON) throws InstanceNotFoundException { assertWritable(); ServiceReference serviceReference = new ServiceReference(serviceInterfaceName, refName); return saveServiceReference(serviceReference, moduleON); } - private synchronized ObjectName saveServiceReference(ServiceReference serviceReference, ObjectName moduleON) + private synchronized ObjectName saveServiceReference(final ServiceReference serviceReference, final ObjectName moduleON) throws InstanceNotFoundException{ return saveServiceReference(serviceReference, moduleON, false); } - private synchronized ObjectName saveServiceReference(ServiceReference serviceReference, ObjectName moduleON, - boolean skipChecks) throws InstanceNotFoundException { + private synchronized ObjectName saveServiceReference(final ServiceReference serviceReference, final ObjectName moduleON, + final boolean skipChecks) throws InstanceNotFoundException { // make sure it is found if (skipChecks == false) { @@ -443,13 +444,13 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public ServiceReferenceJMXRegistration setValue(ServiceReferenceJMXRegistration value) { + public ServiceReferenceJMXRegistration setValue(final ServiceReferenceJMXRegistration value) { throw new UnsupportedOperationException(); } }; } - private ObjectName getServiceON(ServiceReference serviceReference) { + private ObjectName getServiceON(final ServiceReference serviceReference) { if (writable) { return ObjectNameUtil.createTransactionServiceON(serviceReferenceRegistrator.getNullableTransactionName(), serviceReference.getServiceInterfaceQName(), serviceReference.getRefName()); @@ -459,12 +460,12 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized void removeServiceReference(String serviceInterfaceName, String refName) throws InstanceNotFoundException{ + public synchronized void removeServiceReference(final String serviceInterfaceName, final String refName) throws InstanceNotFoundException{ ServiceReference serviceReference = new ServiceReference(serviceInterfaceName, refName); removeServiceReference(serviceReference); } - private synchronized void removeServiceReference(ServiceReference serviceReference) throws InstanceNotFoundException { + private synchronized void removeServiceReference(final ServiceReference serviceReference) throws InstanceNotFoundException { LOGGER.debug("Removing service reference {} from {}", serviceReference, this); assertWritable(); // is the qName known? @@ -496,7 +497,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } @Override - public synchronized boolean removeServiceReferences(ObjectName moduleObjectName) throws InstanceNotFoundException { + public synchronized boolean removeServiceReferences(final ObjectName moduleObjectName) throws InstanceNotFoundException { lookupRegistry.checkConfigBeanExists(moduleObjectName); String factoryName = ObjectNameUtil.getFactoryName(moduleObjectName); // check that service interface name exist @@ -505,7 +506,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe } - private boolean removeServiceReferences(ObjectName moduleObjectName, Set qNames) throws InstanceNotFoundException { + private boolean removeServiceReferences(final ObjectName moduleObjectName, final Set qNames) throws InstanceNotFoundException { ObjectNameUtil.checkType(moduleObjectName, ObjectNameUtil.TYPE_MODULE); assertWritable(); Set serviceReferencesLinkingTo = findServiceReferencesLinkingTo(moduleObjectName, qNames); @@ -515,7 +516,7 @@ public class ServiceReferenceRegistryImpl implements CloseableServiceReferenceRe return serviceReferencesLinkingTo.isEmpty() == false; } - private Set findServiceReferencesLinkingTo(ObjectName moduleObjectName, Set serviceInterfaceQNames) { + private Set findServiceReferencesLinkingTo(final ObjectName moduleObjectName, final Set serviceInterfaceQNames) { String factoryName = ObjectNameUtil.getFactoryName(moduleObjectName); if (serviceInterfaceQNames == null) { LOGGER.warn("Possible error in code: cannot find factoryName {} in {}, object name {}", factoryName, factoryNamesToQNames, moduleObjectName); diff --git a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/util/InterfacesHelper.java b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/util/InterfacesHelper.java index 5cb1513d9c..f4d732c65c 100644 --- a/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/util/InterfacesHelper.java +++ b/opendaylight/config/config-manager/src/main/java/org/opendaylight/controller/config/manager/impl/util/InterfacesHelper.java @@ -7,6 +7,7 @@ */ package org.opendaylight.controller.config.manager.impl.util; +import com.google.common.collect.ImmutableSet; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -18,7 +19,7 @@ import org.opendaylight.controller.config.api.annotations.ServiceInterfaceAnnota import org.opendaylight.controller.config.spi.Module; import org.opendaylight.controller.config.spi.ModuleFactory; -public class InterfacesHelper { +public final class InterfacesHelper { private InterfacesHelper() { } @@ -126,7 +127,7 @@ public class InterfacesHelper { for (ServiceInterfaceAnnotation sia: siAnnotations) { qNames.add(sia.value()); } - return Collections.unmodifiableSet(qNames); + return ImmutableSet.copyOf(qNames); } public static Set getServiceInterfaceAnnotations(final ModuleFactory factory) { diff --git a/opendaylight/distribution/opendaylight/opendaylight-application.launch b/opendaylight/distribution/opendaylight/opendaylight-application.launch deleted file mode 100644 index cd9fc94343..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-application.launch +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit-fast.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit-fast.launch deleted file mode 100644 index bc5c53ca95..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit-fast.launch +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit-full.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit-full.launch deleted file mode 100644 index 82b9fe22d0..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit-full.launch +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit-noclean.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit-noclean.launch deleted file mode 100644 index f42f6577f0..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit-noclean.launch +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit-skiput.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit-skiput.launch deleted file mode 100644 index 753839c0bb..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit-skiput.launch +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit-sonar.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit-sonar.launch deleted file mode 100644 index b498f908f2..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit-sonar.launch +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-assembleit.launch b/opendaylight/distribution/opendaylight/opendaylight-assembleit.launch deleted file mode 100644 index 0edd2a734a..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-assembleit.launch +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-local.target b/opendaylight/distribution/opendaylight/opendaylight-local.target deleted file mode 100644 index a7780fe610..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-local.target +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher-local.launch b/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher-local.launch deleted file mode 100644 index 98de065c5b..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher-local.launch +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher.launch b/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher.launch deleted file mode 100644 index b14197f3d6..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-osgi-launcher.launch +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-sonar-fast.launch b/opendaylight/distribution/opendaylight/opendaylight-sonar-fast.launch deleted file mode 100644 index 6d5a588441..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-sonar-fast.launch +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight-sonar.launch b/opendaylight/distribution/opendaylight/opendaylight-sonar.launch deleted file mode 100644 index bb66bd8a92..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight-sonar.launch +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/opendaylight.target b/opendaylight/distribution/opendaylight/opendaylight.target deleted file mode 100644 index 7ae309cce4..0000000000 --- a/opendaylight/distribution/opendaylight/opendaylight.target +++ /dev/null @@ -1,84 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/pom.xml b/opendaylight/distribution/opendaylight/pom.xml deleted file mode 100644 index cd02759f5a..0000000000 --- a/opendaylight/distribution/opendaylight/pom.xml +++ /dev/null @@ -1,1440 +0,0 @@ - - - 4.0.0 - - org.opendaylight.controller - commons.opendaylight - 1.5.0-SNAPSHOT - ../../commons/opendaylight - - distribution.opendaylight - 0.2.0-SNAPSHOT - pom - - 3.0 - - - - - ch.qos.logback - logback-classic - - - ch.qos.logback - logback-core - - - - com.fasterxml.jackson.core - jackson-annotations - - - - com.fasterxml.jackson.core - jackson-core - - - - com.fasterxml.jackson.core - jackson-databind - - - - com.fasterxml.jackson.datatype - jackson-datatype-json-org - - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-base - - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - - com.google.code.gson - gson - - - com.google.guava - guava - - - com.sun.jersey - jersey-client - - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-server - - - commons-codec - commons-codec - - - commons-fileupload - commons-fileupload - - - commons-io - commons-io - - - commons-net - commons-net - - - eclipselink - javax.persistence - - - eclipselink - javax.resource - - - equinoxSDK381 - javax.servlet - - - equinoxSDK381 - javax.servlet.jsp - - - equinoxSDK381 - org.apache.felix.gogo.command - - - equinoxSDK381 - org.apache.felix.gogo.runtime - - - equinoxSDK381 - org.apache.felix.gogo.shell - - - equinoxSDK381 - org.eclipse.equinox.cm - - - equinoxSDK381 - org.eclipse.equinox.console - - - equinoxSDK381 - org.eclipse.equinox.ds - - - equinoxSDK381 - org.eclipse.equinox.launcher - - - equinoxSDK381 - org.eclipse.equinox.util - - - equinoxSDK381 - org.eclipse.osgi - - - equinoxSDK381 - org.eclipse.osgi.services - - - - geminiweb - org.eclipse.gemini.web.core - - - geminiweb - org.eclipse.gemini.web.extender - - - geminiweb - org.eclipse.gemini.web.tomcat - - - geminiweb - org.eclipse.virgo.kernel.equinox.extensions - - - geminiweb - org.eclipse.virgo.util.common - - - geminiweb - org.eclipse.virgo.util.io - - - geminiweb - org.eclipse.virgo.util.math - - - geminiweb - org.eclipse.virgo.util.osgi - - - geminiweb - org.eclipse.virgo.util.osgi.manifest - - - geminiweb - org.eclipse.virgo.util.parser.manifest - - - io.netty - netty-buffer - - - io.netty - netty-codec - - - io.netty - netty-codec-http - - - io.netty - netty-common - - - - - io.netty - netty-handler - - - io.netty - netty-transport - - - orbit - javax.activation - - - orbit - javax.annotation - - - orbit - javax.ejb - - - orbit - javax.el - - - orbit - javax.mail.glassfish - - - orbit - javax.servlet.jsp.jstl - - - orbit - javax.servlet.jsp.jstl.impl - - - orbit - javax.xml.rpc - - - orbit - org.apache.catalina - - - orbit - org.apache.catalina.ha - - - orbit - org.apache.catalina.tribes - - - orbit - org.apache.coyote - - - orbit - org.apache.el - - - orbit - org.apache.jasper - - - orbit - org.apache.juli.extras - - - orbit - org.apache.tomcat.api - - - orbit - org.apache.tomcat.util - - - org.aopalliance - com.springsource.org.aopalliance - - - org.apache.commons - commons-lang3 - - - org.apache.felix - org.apache.felix.dependencymanager - - - org.apache.felix - org.apache.felix.dependencymanager.shell - - - org.apache.felix - org.apache.felix.fileinstall - - - - org.apache.felix - org.apache.felix.webconsole - all - - - - org.codehaus.jettison - jettison - - - - org.eclipse.equinox.http - servlet - - - org.eclipse.persistence - org.eclipse.persistence.antlr - - - org.eclipse.persistence - org.eclipse.persistence.core - - - org.eclipse.persistence - org.eclipse.persistence.moxy - - - org.javassist - javassist - - - org.jboss.spec.javax.transaction - jboss-transaction-api_1.1_spec - - - org.jolokia - jolokia-osgi - - - - org.json - json - - - org.opendaylight.controller - appauth - - - org.opendaylight.controller - arphandler - - - org.opendaylight.controller - bundlescanner - - - org.opendaylight.controller - bundlescanner.implementation - - - org.opendaylight.controller - clustering.services - - - org.opendaylight.controller - clustering.services-implementation - - - - - org.opendaylight.controller - clustering.test - - - org.opendaylight.controller - commons.httpclient - - - - - org.opendaylight.controller - commons.northbound - - - org.opendaylight.controller - configuration - - - org.opendaylight.controller - configuration.implementation - - - org.opendaylight.controller - connectionmanager - - - org.opendaylight.controller - connectionmanager.implementation - - - org.opendaylight.controller - connectionmanager.northbound - - - org.opendaylight.controller - containermanager - - - org.opendaylight.controller - containermanager.implementation - - - org.opendaylight.controller - containermanager.northbound - - - org.opendaylight.controller - controllermanager.northbound - - - org.opendaylight.controller - devices.web - - - org.opendaylight.controller - flowprogrammer.northbound - - - org.opendaylight.controller - flows.web - - - org.opendaylight.controller - forwarding.staticrouting - - - org.opendaylight.controller - forwarding.staticrouting.northbound - - - org.opendaylight.controller - forwardingrulesmanager - - - org.opendaylight.controller - forwardingrulesmanager.implementation - - - org.opendaylight.controller - hosttracker - - - org.opendaylight.controller - hosttracker.implementation - - - org.opendaylight.controller - hosttracker.northbound - - - org.opendaylight.controller - httpservice-bridge - - - org.opendaylight.controller - jolokia-bridge - - - - - org.opendaylight.controller - logging.bridge - - - org.opendaylight.controller - networkconfig.bridgedomain.northbound - - - - - org.opendaylight.controller - networkconfig.neutron - - - org.opendaylight.controller - networkconfig.neutron.implementation - - - org.opendaylight.controller - networkconfig.neutron.northbound - - - org.opendaylight.controller - osgi-brandfragment.web - - - - - org.opendaylight.controller - protocol_plugins.openflow - - - org.opendaylight.controller - routing.dijkstra_implementation - - - - - org.opendaylight.controller - sal - - - - - - org.opendaylight.controller - sal.connection - - - org.opendaylight.controller - sal.connection.implementation - - - org.opendaylight.controller - sal.implementation - - - org.opendaylight.controller - sal.networkconfiguration - - - org.opendaylight.controller - sal.networkconfiguration.implementation - - - - - org.opendaylight.controller - samples.loadbalancer - - - org.opendaylight.controller - samples.loadbalancer.northbound - - - org.opendaylight.controller - samples.simpleforwarding - - - org.opendaylight.controller - security - - - org.opendaylight.controller - statistics.northbound - - - org.opendaylight.controller - statisticsmanager - - - org.opendaylight.controller - statisticsmanager.implementation - - - org.opendaylight.controller - subnets.northbound - - - org.opendaylight.controller - switchmanager - - - org.opendaylight.controller - switchmanager.implementation - - - org.opendaylight.controller - switchmanager.northbound - - - org.opendaylight.controller - topology.northbound - - - org.opendaylight.controller - topology.web - - - org.opendaylight.controller - topologymanager - - - org.opendaylight.controller - troubleshoot.web - - - org.opendaylight.controller - usermanager - - - org.opendaylight.controller - usermanager.implementation - - - org.opendaylight.controller - usermanager.northbound - - - - - - org.opendaylight.controller - web - - - org.opendaylight.controller.thirdparty - com.sun.jersey.jersey-servlet - - - - - org.opendaylight.controller.thirdparty - net.sf.jung2 - - - org.opendaylight.controller.thirdparty - org.openflow.openflowj - - - - org.ow2.asm - asm-all - - - - org.ow2.chameleon.management - chameleon-mbeans - - - - - org.slf4j - jcl-over-slf4j - - - org.slf4j - log4j-over-slf4j - - - org.slf4j - slf4j-api - - - org.springframework - org.springframework.aop - - - - org.springframework - org.springframework.asm - - - org.springframework - org.springframework.beans - - - org.springframework - org.springframework.context - - - org.springframework - org.springframework.context.support - - - org.springframework - org.springframework.core - - - org.springframework - org.springframework.expression - - - org.springframework - org.springframework.transaction - - - org.springframework - org.springframework.web - - - org.springframework - org.springframework.web.servlet - - - - org.springframework.security - spring-security-config - - - org.springframework.security - spring-security-core - - - org.springframework.security - spring-security-taglibs - - - org.springframework.security - spring-security-web - - - virgomirror - org.eclipse.jdt.core.compiler.batch - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - unpack-provided-configs - - unpack-dependencies - - generate-resources - - ${project.build.directory}/configuration - sal-rest-connector-config,config-netty-config,md-sal-config,netconf-config,toaster-config,netconf-connector-config,sal-clustering-config - **\/*.xml,**/*.conf - true - false - - - - - - maven-assembly-plugin - 2.3 - - - distro-assembly - - single - - package - - - src/assemble/bin.xml - - ${project.artifactId} - - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - - **\/target\/,**\/bin\/,**\/target-ide\/,**\/configuration\/initial\/ - - - - org.codehaus.mojo - buildnumber-maven-plugin - 1.2 - - false - false - VersionUnknown - - - - - create - - validate - - - - - - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - HEAD - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - - - - - notduringrelease - - - !DOINGRELEASE - - - - - ${project.groupId} - ietf-netconf-monitoring - - - ${project.groupId} - ietf-netconf-monitoring-extension - - - commons-lang - commons-lang - - - org.apache.servicemix.bundles - org.apache.servicemix.bundles.xerces - 2.11.0_1 - - - org.bouncycastle - bcpkix-jdk15on - - - org.bouncycastle - bcprov-jdk15on - - - org.eclipse.birt.runtime.3_7_1 - org.apache.xml.resolver - 1.2.0 - - - org.opendaylight.controller - concepts - - - - - org.opendaylight.controller - config-api - - - org.opendaylight.controller - config-manager - - - org.opendaylight.controller - config-util - - - org.opendaylight.controller - config-netconf-connector - - - org.opendaylight.controller - config-persister-api - - - org.opendaylight.controller - config-persister-directory-xml-adapter - - - org.opendaylight.controller - config-persister-file-xml-adapter - - - org.opendaylight.controller - config-persister-impl - - - org.opendaylight.controller - filter-valve - - - org.opendaylight.controller - logback-config - - - - - org.opendaylight.controller - netconf-api - - - org.opendaylight.controller - netconf-client - - - - - org.opendaylight.controller - netconf-config-dispatcher - - - org.opendaylight.controller - netconf-impl - - - org.opendaylight.controller - netconf-mapping-api - - - org.opendaylight.controller - netconf-monitoring - - - org.opendaylight.controller - netconf-netty-util - - - org.opendaylight.controller - netconf-ssh - - - org.opendaylight.controller - netconf-auth - - - org.opendaylight.controller - netconf-usermanager - - - org.opendaylight.controller - netconf-tcp - - - org.opendaylight.controller - netconf-util - - - org.opendaylight.controller - netty-config-api - - - org.opendaylight.controller - netty-event-executor-config - - - org.opendaylight.controller - netty-threadgroup-config - - - org.opendaylight.controller - netty-timer-config - - - org.opendaylight.controller - protocol-framework - - - - org.opendaylight.controller - sal-binding-api - - - org.opendaylight.controller - sal-binding-broker-impl - - - org.opendaylight.controller - sal-binding-config - - - org.opendaylight.controller - sal-binding-util - - - org.opendaylight.controller - sal-broker-impl - - - - org.opendaylight.controller - sal-common - - - org.opendaylight.controller - sal-common-api - - - org.opendaylight.controller - sal-common-impl - - - org.opendaylight.controller - sal-common-util - - - org.opendaylight.controller - sal-compatibility - - - org.opendaylight.controller - sal-connector-api - - - org.opendaylight.controller - sal-core-api - - - org.opendaylight.controller - sal-core-spi - - - org.opendaylight.controller - sal-inmemory-datastore - - - org.opendaylight.controller - sal-netconf-connector - - - org.opendaylight.controller - sal-remote - - - org.opendaylight.controller - sal-rest-connector - - - org.opendaylight.controller - sal-rest-connector-config - - - org.opendaylight.controller - config-netty-config - - - org.opendaylight.controller - md-sal-config - - - org.opendaylight.controller - netconf-config - - - org.opendaylight.controller - netconf-connector-config - - - org.opendaylight.controller.samples - toaster-config - - - org.opendaylight.controller - sal-rest-docgen - - - org.opendaylight.controller - sal-restconf-broker - - - - - org.opendaylight.controller - shutdown-api - - - org.opendaylight.controller - shutdown-impl - - - - - org.opendaylight.controller - threadpool-config-api - - - org.opendaylight.controller - threadpool-config-impl - - - org.opendaylight.controller - yang-jmx-generator - - - org.opendaylight.controller.md - forwardingrules-manager - - - org.opendaylight.controller.md - inventory-manager - - - org.opendaylight.controller.md - statistics-manager - - - org.opendaylight.controller.md - topology-lldp-discovery - - - org.opendaylight.controller - liblldp - - - org.opendaylight.controller.md - topology-manager - - - org.opendaylight.controller.model - model-flow-base - - - org.opendaylight.controller.model - model-flow-service - - - org.opendaylight.controller.model - model-flow-statistics - - - org.opendaylight.controller.model - model-inventory - - - org.opendaylight.controller.model - model-topology - - - - - org.opendaylight.controller.samples - sample-toaster - - - org.opendaylight.controller.samples - sample-toaster-consumer - - - org.opendaylight.controller.samples - sample-toaster-provider - - - org.apache.sshd - sshd-core - - - org.opendaylight.yangtools - binding-generator-api - - - org.opendaylight.yangtools - binding-generator-impl - - - org.opendaylight.yangtools - binding-data-codec - - - org.opendaylight.yangtools - binding-generator-spi - - - org.opendaylight.yangtools - binding-generator-util - - - org.opendaylight.yangtools - binding-model-api - - - org.opendaylight.yangtools - binding-type-provider - - - org.opendaylight.yangtools - concepts - - - org.opendaylight.yangtools - object-cache-api - - - org.opendaylight.yangtools - object-cache-guava - - - org.opendaylight.yangtools - restconf-client-api - - - org.opendaylight.yangtools - restconf-client-impl - - - org.opendaylight.yangtools - util - - - - org.opendaylight.yangtools - yang-binding - - - org.opendaylight.yangtools - yang-common - - - org.opendaylight.yangtools - yang-data-api - - - org.opendaylight.yangtools - yang-data-impl - - - org.opendaylight.yangtools - yang-data-util - - - org.opendaylight.yangtools - yang-model-api - - - org.opendaylight.yangtools - yang-model-util - - - org.opendaylight.yangtools - yang-parser-api - - - org.opendaylight.yangtools - yang-parser-impl - - - org.opendaylight.yangtools - yang-data-codec-gson - - - org.opendaylight.yangtools - yang-data-composite-node - - - - org.opendaylight.yangtools.model - ietf-inet-types - - - org.opendaylight.yangtools.model - ietf-restconf - - - org.opendaylight.yangtools.model - ietf-topology - - - org.opendaylight.yangtools.model - ietf-yang-types - - - org.opendaylight.yangtools.model - ietf-yang-types-20130715 - - - org.opendaylight.yangtools.model - opendaylight-l2-types - - - org.opendaylight.yangtools.model - yang-ext - - - - org.opendaylight.yangtools.thirdparty - antlr4-runtime-osgi-nohead - - - org.opendaylight.yangtools.thirdparty - xtend-lib-osgi - - - org.openexi - nagasena - - - org.openexi - nagasena-rta - - - org.zeromq - jeromq - 0.3.1 - - - org.opendaylight.controller - sal-clustering-config - - - org.eclipse.jetty - jetty-servlets - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-client - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-continuation - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-util - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-server - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-io - 8.1.14.v20131031 - - - org.eclipse.jetty - jetty-http - 8.1.14.v20131031 - - - - - integrationtests - - false - - - - org.opendaylight.controller - sanitytest - - - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.8 - - - - org.opendaylight.controller - sanitytest - jar - - - - - - org.opendaylight.controller - sanitytest - ${sanitytest.version} - - - - - copy - - copy - - package - - - unpack-provided-configs - - unpack-dependencies - - generate-resources - - ${project.build.directory}/configuration - sal-rest-connector-config,config-netty-config,md-sal-config,netconf-config,toaster-config,netconf-connector-config,sal-clustering-config - **\/*.xml,**/*.conf - true - false - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.2.1 - - ${java.home}/bin/java - - -cp - ./target/dependency/* - org.opendaylight.controller.distribution.Sanity - - - ${java.home} - - - - - sanity-test - - exec - - package - - - - - - - - docs - - false - - - - org.opendaylight.controller - swagger-ui - 0.1.0-SNAPSHOT - - - - - diff --git a/opendaylight/distribution/opendaylight/runsanity.bat b/opendaylight/distribution/opendaylight/runsanity.bat deleted file mode 100644 index f219828bad..0000000000 --- a/opendaylight/distribution/opendaylight/runsanity.bat +++ /dev/null @@ -1,23 +0,0 @@ -rem Inject the sanitytest jar as a controller plugin -copy .\target\dependency\sanitytest*.jar .\target\distribution.opendaylight-osgipackage\opendaylight\plugins - -rem Store the current working directory in a variable so that we can get back to it later -set cwd=%cd% - -rem Switch to the distribution folder -cd .\target\distribution.opendaylight-osgipackage\opendaylight - -rem Run the controller -cmd.exe /c run.bat - -rem Store the exit value of the controller in a variable -set success=%ERRORLEVEL% - -rem Switch back to the directory from which this script was invoked -cd %cwd% - -rem Remove the sanitytest jar from the plugins directory -del .\target\distribution.opendaylight-osgipackage\opendaylight\plugins\sanitytest*.jar - -rem Exit using the exit code that we had captured earlier after running the controller -exit /b %SUCCESS% \ No newline at end of file diff --git a/opendaylight/distribution/opendaylight/runsanity.sh b/opendaylight/distribution/opendaylight/runsanity.sh deleted file mode 100755 index 4ee9555b97..0000000000 --- a/opendaylight/distribution/opendaylight/runsanity.sh +++ /dev/null @@ -1,24 +0,0 @@ -# Inject the sanitytest jar as a controller plugin -cp ./target/dependency/sanitytest*.jar ./target/distribution.opendaylight-osgipackage/opendaylight/plugins - -# Store the current working directory in a variable so that we can get back to it later -cwd=`pwd` - -# Switch to the distribution folder -cd ./target/distribution.opendaylight-osgipackage/opendaylight/ - -# Run the controller -./run.sh - -# Store the exit value of the controller in a variable -success=`echo $?` - -# Switch back to the directory from which this script was invoked -cd $cwd - -# Remove the sanitytest jar from the plugins directory -rm ./target/distribution.opendaylight-osgipackage/opendaylight/plugins/sanitytest*.jar - -# Exit using the exit code that we had captured earlier after running the controller -exit $success - diff --git a/opendaylight/distribution/opendaylight/src/assemble/bin.xml b/opendaylight/distribution/opendaylight/src/assemble/bin.xml deleted file mode 100644 index e5fc98a863..0000000000 --- a/opendaylight/distribution/opendaylight/src/assemble/bin.xml +++ /dev/null @@ -1,98 +0,0 @@ - - osgipackage - - dir - zip - - false - - - opendaylight/plugins - - equinoxSDK381:org.eclipse.osgi - equinoxSDK381:org.eclipse.equinox.console - equinoxSDK381:org.eclipse.equinox.launcher - equinoxSDK381:org.eclipse.equinox.ds - equinoxSDK381:org.eclipse.equinox.util - equinoxSDK381:org.eclipse.osgi.services - virgomirror:org.eclipse.jdt.core.compiler.batch - org.apache.felix:org.apache.felix.fileinstall - geminiweb:org.eclipse.virgo.kernel.equinox.extensions - org.slf4j:slf4j-api - ch.qos.logback:logback-core - ch.qos.logback:logback-classic - com.sun.jersey:jersey-core - com.sun.jersey:jersey-server - org.opendaylight.controller:logging.bridge - org.opendaylight.controller:sanitytest - - - ${artifact.groupId}.${artifact.artifactId}-${artifact.version}${dashClassifier?}.${artifact.extension} - - false - runtime - false - - - opendaylight/lib - - equinoxSDK381:org.eclipse.osgi - equinoxSDK381:org.eclipse.equinox.console - equinoxSDK381:org.eclipse.equinox.launcher - equinoxSDK381:org.eclipse.equinox.ds - equinoxSDK381:org.eclipse.equinox.util - equinoxSDK381:org.eclipse.osgi.services - virgomirror:org.eclipse.jdt.core.compiler.batch - org.apache.felix:org.apache.felix.fileinstall - geminiweb:org.eclipse.virgo.kernel.equinox.extensions - org.slf4j:slf4j-api - ch.qos.logback:logback-core - ch.qos.logback:logback-classic - com.sun.jersey:jersey-core - com.sun.jersey:jersey-server - org.opendaylight.controller:logging.bridge - - false - - ${artifact.artifactId}-${artifact.version}${dashClassifier?}.${artifact.extension} - - false - runtime - - - - - - src/main/resources/ - - - version.properties - configuration/config.ini - - - opendaylight/ - - - - ${project.build.directory}/configuration/initial - /opendaylight/configuration/initial - - **/META-INF/** - - - - - - src/main/resources/version.properties - opendaylight - true - - - src/main/resources/configuration/config.ini - opendaylight/configuration - true - - - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/config.ini b/opendaylight/distribution/opendaylight/src/main/resources/configuration/config.ini deleted file mode 100644 index 691d83d45e..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/config.ini +++ /dev/null @@ -1,163 +0,0 @@ -osgi.bundles=\ - reference\:file\:../lib/org.apache.felix.fileinstall-3.1.6.jar@1:start,\ - reference\:file\:../lib/org.eclipse.jdt.core.compiler.batch-3.8.0.I20120518-2145.jar@1:start,\ - reference\:file\:../lib/org.eclipse.equinox.ds-1.4.0.v20120522-1841.jar@2:start,\ - reference\:file\:../lib/org.eclipse.equinox.util-1.0.400.v20120522-2049.jar@2:start,\ - reference\:file\:../lib/org.eclipse.osgi.services-3.3.100.v20120522-1822@2:start,\ - reference\:file\:../lib/org.eclipse.equinox.console-1.0.0.v20120522-1841.jar@start,\ - reference\:file\:../lib/slf4j-api-1.7.2.jar@1:start,\ - reference\:file\:../lib/logback-classic-1.0.9.jar@1:start,\ - reference\:file\:../lib/logback-core-1.0.9.jar@1:start,\ - reference\:file\:../lib/logging.bridge-${logging.bridge.version}@1:start,\ - reference\:file\:../lib/jersey-core-1.17.jar@2:start,\ - reference\:file\:../lib/jersey-server-1.17.jar@2:start - -# Netconf startup configuration - -# Netconf tcp address:port is optional -#netconf.tcp.address=127.0.0.1 -#netconf.tcp.port=8383 - -# Netconf tcp address:port is optional -netconf.ssh.address=0.0.0.0 -netconf.ssh.port=1830 -netconf.ssh.pk.path = ./configuration/RSA.pk -netconf.ssh.default.user = netconf -netconf.ssh.default.password = netconf - - -netconf.config.persister.active=1,2 -# read startup configuration -netconf.config.persister.1.storageAdapterClass=org.opendaylight.controller.config.persist.storage.directory.xml.XmlDirectoryStorageAdapter -netconf.config.persister.1.properties.directoryStorage=configuration/initial/ -# include only xml files, files with other extensions will be skipped, multiple extensions are permitted e.g. netconf.config.persister.1.properties.includeExtensions=xml,cfg,config -netconf.config.persister.1.properties.includeExtensions=xml -netconf.config.persister.1.readonly=true - -netconf.config.persister.2.storageAdapterClass=org.opendaylight.controller.config.persist.storage.file.xml.XmlFileStorageAdapter -netconf.config.persister.2.properties.fileStorage=configuration/current/controller.currentconfig.xml -netconf.config.persister.2.properties.numberOfBackups=1 - -# Set Default start level for framework -osgi.bundles.defaultStartLevel=4 -# Extra packages to import from the boot class loader -org.osgi.framework.system.packages.extra=sun.reflect,sun.reflect.misc,sun.misc,sun.nio.ch -# This is not Eclipse App -eclipse.ignoreApp=true -# Don't shutdown equinox if the eclipse App has ended, -# which is our case because we are not running any eclipse application -osgi.noShutdown=true -# Clean any cached data on restart of the framework -osgi.clean=true - -# https://bugs.eclipse.org/bugs/show_bug.cgi?id=325578 -# Extend the framework to avoid the resources to be presented with -# a URL of type bundleresource: but to be presented as file: -osgi.hook.configurators.include=org.eclipse.virgo.kernel.equinox.extensions.hooks.ExtensionsHookConfigurator - -# Directory from where the fileinstall will monitor for new bundles -felix.fileinstall.dir=./plugins -# Immediately learn new bundles at startup -felix.fileinstall.noInitialDelay=true -# Auto start the bundles at level 4 -felix.fileinstall.start.level=4 -# Avoid to auto-install following bundles, that means those need -# to be started manually or in other way like osgi.bundles -felix.fileinstall.filter=^(?!org.apache.felix.fileinstall).* - -# logback configuration -logback.configurationFile=configuration/logback.xml - -# Container configuration -container.profile = Container - -# Connection manager configuration -connection.scheme = ANY_CONTROLLER_ONE_MASTER - -# Embedded Tomcat configuration File -org.eclipse.gemini.web.tomcat.config.path=configuration/tomcat-server.xml -org.apache.tomcat.util.buf.UDecoder.ALLOW_ENCODED_SLASH=true - -# Open Flow related system parameters -# TCP port on which the controller is listening (default 6633) -# of.listenPort=6633 -# IP address of the controller (default: wild card) -# of.address = 127.0.0.1 -# The time (in milliseconds) the controller will wait for a response after sending a Barrier Request or a Statistic Request message (default 2000 msec) -# of.messageResponseTimer=2000 -# The switch liveness timeout value (default 60500 msec) -# of.switchLivenessTimeout=60500 -# The size of the queue holding pending statistics requests (default 64). For large networks of n switches, it is recommended to set the queue size to n -# of.statsQueueSize = 64 -# The flow statistics polling interval in second (default 10 sec) -# of.flowStatsPollInterval=10 -# The port statistics polling interval in second (default 5 sec) -# of.portStatsPollInterval=5 -# The description statistics polling interval in second (default 60 sec) -# of.descStatsPollInterval=60 -# The table statistics polling interval in second (default 10 sec) -# of.tableStatsPollInterval=10 -# The maximum number of asynchronous messages can be sent before sending a Barrier Request (default 100) -# of.barrierMessagePriorCount=100 -# The interval which determines how often the discovery packets should be sent (default 300 sec) -# of.discoveryInterval=300 -# The timeout multiple of discovery interval -# of.discoveryTimeoutMultiple=2 -# For newly added ports, allow one more retry if the elapsed time exceeds this threshold (default 30 sec) -# of.discoveryThreshold=30 -# The maximum number of ports handled in one discovery batch (default 512) -# of.discoveryBatchMaxPorts=512 - -# OVSDB configuration -# ovsdb plugin supports both active and passive connections. It listens on port 6640 by default for Active connections. -ovsdb.listenPort=6640 - -# ovsdb creates Openflow nodes/bridges. This configuration configures the bridge's Openflow version. -# default Openflow version = 1.3, we also support 1.0. -ovsdb.of.version=1.3 - -# ovsdb can be configured with ml2 to perform l3 forwarding. The config below enables that functionality, which is -# disabled by default. -# ovsdb.l3.fwd.enabled=yes - -# ovsdb can be configured with ml2 to perform l3 forwarding. When used in that scenario, the mac address of the default -# gateway --on the external subnet-- is expected to be resolved from its inet address. The config below overrides that -# specific arp/neighDiscovery lookup. -# ovsdb.l3gateway.mac=00:00:5E:00:02:01 - -# TLS configuration -# To enable TLS, set secureChannelEnabled=true and specify the location of controller Java KeyStore and TrustStore files. -# The Java KeyStore contains controller's private key and certificate. The Java TrustStore contains the trusted certificate -# entries, including switches' Certification Authority (CA) certificates. For example, -# secureChannelEnabled=true -# controllerKeyStore=./configuration/ctlKeyStore -# controllerKeyStorePassword=xxxxxxxx (this password should match the password used for KeyStore generation and at least 6 characters) -# controllerTrustStore=./configuration/ctlTrustStore -# controllerTrustStorePassword=xxxxxxxx (this password should match the password used for TrustStore generation and at least 6 characters) - -secureChannelEnabled=false -controllerKeyStore= -controllerKeyStorePassword= -controllerTrustStore= -controllerTrustStorePassword= - -# User Manager configurations -enableStrongPasswordCheck = false - -#Jolokia configurations -org.jolokia.listenForHttpService=false - -# Logging configuration for Tomcat-JUL logging -java.util.logging.config.file=configuration/tomcat-logging.properties - -#Hosttracker hostsdb key scheme setting -hosttracker.keyscheme=IP - -# LISP Flow Mapping configuration -# Map-Register messages overwrite existing RLOC sets in EID-to-RLOC mappings -lisp.mappingOverwrite = true -# Enable the Solicit-Map-Request (SMR) mechanism -lisp.smr = false - -#RESTConf websocket listen port (default is 8181) -restconf.websocket.port=8181 diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/context.xml b/opendaylight/distribution/opendaylight/src/main/resources/configuration/context.xml deleted file mode 100644 index 90b9ddf1c8..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/context.xml +++ /dev/null @@ -1 +0,0 @@ - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/cors-config.xml b/opendaylight/distribution/opendaylight/src/main/resources/configuration/cors-config.xml deleted file mode 100644 index c148b83a7a..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/cors-config.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - CorsFilter - org.apache.catalina.filters.CorsFilter - - cors.allowed.origins - * - - - cors.allowed.methods - GET,POST,HEAD,OPTIONS,PUT,DELETE - - - cors.allowed.headers - Content-Type,X-Requested-With,accept,authorization, - origin,Origin,Access-Control-Request-Method,Access-Control-Request-Headers - - - - cors.exposed.headers - Access-Control-Allow-Origin,Access-Control-Allow-Credentials - - - cors.support.credentials - true - - - cors.preflight.maxage - 10 - - - - - - CorsFilter - - - - - CorsFilter - /* - - - - - - CorsFilter - - - - - CorsFilter - /* - - - - - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/logback.xml b/opendaylight/distribution/opendaylight/src/main/resources/configuration/logback.xml deleted file mode 100644 index b73244bc0a..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/logback.xml +++ /dev/null @@ -1,105 +0,0 @@ - - - - - %date{"yyyy-MM-dd HH:mm:ss.SSS z"} [%thread] %-5level %logger{36} %X{akkaSource} - %msg%n - - - - logs/opendaylight.log - - - logs/opendaylight.%d.log.zip - 1 - - - - 10MB - - - - %date{"yyyy-MM-dd HH:mm:ss.SSS z"} [%thread] %-5level %logger{35} - %msg%n - - - - logs/audit.log - true - - %date{"yyyy-MM-dd HH:mm:ss.SSS z"} %msg %n - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/startup/README b/opendaylight/distribution/opendaylight/src/main/resources/configuration/startup/README deleted file mode 100644 index 2da70fff1e..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/startup/README +++ /dev/null @@ -1 +0,0 @@ -Directory where the opendaylight controller modules store their configuration files diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-logging.properties b/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-logging.properties deleted file mode 100644 index 3dbd9921b3..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-logging.properties +++ /dev/null @@ -1,42 +0,0 @@ -############################################################ -# Configuration file for tomcat logging -############################################################ -# Handlers: -# "handlers" specifies a comma separated list of log Handler -# classes. These handlers will be installed during VM startup. -# Note that these classes must be on the system classpath. -# Following line configures a ConsoleHandler and a FileHandler - -handlers= java.util.logging.FileHandler,java.util.logging.ConsoleHandler - -############################################################ -# Handler specific properties -# Describes specific configuration info for Handlers -# JUL does not support rolling file handler based on date -# For now we will keep count of files to 5 with rolling size of 10MB -############################################################ - -java.util.logging.FileHandler.pattern = logs/tomcat%g.log -java.util.logging.FileHandler.limit = 104857600 -java.util.logging.FileHandler.count = 5 -java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter -java.util.logging.FileHandler.append = true -java.util.logging.FileHandler.level = INFO - -# Limit the message that are printed on the console to SEVERE and above. -java.util.logging.ConsoleHandler.level = WARNING -java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter - -# SimpleFormatter output format to print one-line log message like this: -# --
:: [] > -# -java.util.logging.SimpleFormatter.format=%1$tF %1$tT %1$tZ [%3$s] %4$s %2$s %5$s%6$s%n - -############################################################ -# Facility specific properties. -# Provides extra control for each logger. -############################################################ - -# For example, set the com.xyz.foo logger to only log SEVERE -# messages -#org.apache.catalina = SEVERE diff --git a/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-server.xml b/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-server.xml deleted file mode 100644 index da2500be62..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/configuration/tomcat-server.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/functions.sh b/opendaylight/distribution/opendaylight/src/main/resources/functions.sh deleted file mode 100644 index 21dd4c16ba..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/functions.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -# Function harvestHelp searches in run.sh part for line starting with "##". -# Next lines starting with "#" will be printed without first char # (=help content). -# Help content has to end with "##" on new line. -# Example: -##foo -# Foo is world wide used synnonym for bar. -## -function harvestHelp() { - key="$1" - if [ -z "${key}" ]; then - key='HELP' - fi - echo - sed -rn "/^##${key}$/,/^##/ p" $0 | sed -r '1 d; $ d; s/^#/ /' - grep "##${key}" $0 > /dev/null -} diff --git a/opendaylight/distribution/opendaylight/src/main/resources/run.bat b/opendaylight/distribution/opendaylight/src/main/resources/run.bat deleted file mode 100644 index ce13e33968..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/run.bat +++ /dev/null @@ -1,280 +0,0 @@ -@ECHO OFF -SETLOCAL ENABLEDELAYEDEXPANSION - -IF NOT EXIST "%JAVA_HOME%" ( - ECHO JAVA_HOME environment variable is not set - EXIT /B 2 -) - -SET basedir=%~dp0 -SET debugport=8000 -SET consoleport=2400 -SET jmxport=1088 -SET jvmMaxMemory= -SET extraJVMOpts= -SET consoleOpts=-console -consoleLog -SET PID= -SET JAVA_H=%JAVA_HOME%\bin\jps.exe - -:LOOP -IF "%~1" NEQ "" ( - SET CARG=%~1 - IF "!CARG!"=="-debug" ( - SET debugEnabled=true - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-debugsuspend" ( - SET debugEnabled=true - SET debugSuspended=true - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-debugport" ( - SET debugEnabled=true - SET debugport=%~2 - SHIFT & SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-jmx" ( - SET jmxEnabled=true - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-jmxport" ( - SET jmxEnabled=true - SET jmxport=%~2 - SHIFT & SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-start" ( - SET startEnabled=true - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-consoleport" ( - SET consoleport=%~2 - SHIFT & SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-console" ( - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-status" ( - for /F "TOKENS=1" %%G in ('""!JAVA_H!" -lvV ^| find /I "opendaylight""') do ( - set PID=%%G - ) - if "!PID!" NEQ "" ( - ECHO Controller is running with PID !PID! - ) else ( - ECHO Controller is not running. - ) - GOTO :EOF - ) - IF "!CARG!"=="-stop" ( - for /F "TOKENS=1" %%G in ('""!JAVA_H!" -lvV ^| find /I "opendaylight""') do ( - set PID=%%G - ) - if "!PID!" NEQ "" ( - ECHO Stopping controller PID !PID! - TASKKILL /F /PID !PID! - ) else ( - ECHO Controller is not running. - ) - GOTO :EOF - ) - IF "!CARG:~0,4!"=="-Xmx" ( - SET jvmMaxMemory=!CARG! - SHIFT - GOTO :LOOP - ) - IF "!CARG:~0,2!"=="-D" ( - SET extraJVMOpts=!extraJVMOpts! !CARG! - SHIFT - GOTO :LOOP - ) - IF "!CARG:~0,2!"=="-X" ( - SET extraJVMOpts=!extraJVMOpts! !CARG! - SHIFT - GOTO :LOOP - ) - IF "!CARG!"=="-help" ( - SHIFT - SET CARG=%2 - IF "!CARG!" NEQ "" ( - CALL:!CARG! - ) ELSE ( - CALL:helper - ) - GOTO :EOF - ) - - ECHO "Unknown option: !CARG!" - EXIT /B 1 -) - -IF "%debugEnabled%" NEQ "" ( - REM ECHO "DEBUG enabled" - SET extraJVMOpts=!extraJVMOpts! -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=%debugport% -) - -IF "%debugSuspended%" NEQ "" ( - REM ECHO "DEBUG enabled suspended" - SET extraJVMOpts=!extraJVMOpts! -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=%debugport% -) - -IF "%jvmMaxMemory%"=="" ( - SET jvmMaxMemory=-Xmx1G - ECHO Setting maximum memory to 1G. -) - -SET extraJVMOpts=!extraJVMOpts! %jvmMaxMemory% - -IF "%jmxEnabled%" NEQ "" ( - REM ECHO "JMX enabled " - SET extraJVMOpts=!extraJVMOpts! -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=%jmxport% -Dcom.sun.management.jmxremote -) -IF "%startEnabled%" NEQ "" ( - REM ECHO "START enabled " - SET consoleOpts=-console %consoleport% -consoleLog -) - -REM Check if controller is already running -for /F "TOKENS=1" %%G in ('""!JAVA_H!" -lvV ^| find /I "opendaylight""') do ( - SET PID=%%G -) -if "!PID!" NEQ "" ( - ECHO Controller is already running with PID !PID! - EXIT /B 1 -) - - -REM Now set the classpath: -SET cp="%basedir%lib\org.eclipse.osgi-3.8.1.v20120830-144521.jar;%basedir%lib\org.eclipse.virgo.kernel.equinox.extensions-3.6.0.RELEASE.jar;%basedir%lib\org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar" - -REM Now set framework classpath -SET fwcp="file:\%basedir%lib\org.eclipse.osgi-3.8.1.v20120830-144521.jar,file:\%basedir%lib\org.eclipse.virgo.kernel.equinox.extensions-3.6.0.RELEASE.jar,file:\%basedir%lib\org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar" - -SET RUN_CMD="%JAVA_HOME%\bin\java.exe" -Dopendaylight.controller !extraJVMOpts! -Djava.io.tmpdir="%basedir%work\tmp" -Djava.awt.headless=true -Dosgi.install.area=%basedir% -Dosgi.configuration.area="%basedir%configuration" -Dosgi.frameworkClassPath=%fwcp% -Dosgi.framework="file:\%basedir%lib\org.eclipse.osgi-3.8.1.v20120830-144521.jar" -classpath %cp% org.eclipse.equinox.launcher.Main %consoleOpts% - -ECHO !RUN_CMD! - -if "%startEnabled%" NEQ "" ( - START /B cmd /C CALL !RUN_CMD! > %basedir%\logs\controller.out 2>&1 - ECHO Running controller in the background. - EXIT /B 1 -) else ( - !RUN_CMD! - EXIT /B %ERRORLEVEL% -) - -:helper -echo. For more information on a specific command, type -help command-name. -echo. -echo jmx ^[-jmx^] -echo jmxport ^[-jmxport ^^] - DEFAULT is 1088 -echo debug ^[-debug^] -echo debugsuspend ^[-debugsuspend^] -echo debugport ^[-debugport ^^] - DEFAULT is 8000 -echo start ^[-start ^[^^]^] - DEFAULT port is 2400 -echo stop ^[-stop^] -echo status ^[-status^] -echo console ^[-console^] -echo agentpath ^[-agentpath:^^] -exit/B 1 - -:debugsuspend -ECHO. -ECHO. debugsuspend ^[-debugsuspend^] -ECHO. -ECHO. This command sets suspend on true in runjdwp in extra JVM options. If its true, VMStartEvent has a suspendPolicy of SUSPEND_ALL. If its false, VMStartEvent has a suspendPolicy of SUSPEND_NONE. -ECHO. -EXIT /B 1 - -:debugport -ECHO. -ECHO. debugport ^[-debugport ^^] - DEFAULT is 8000 -ECHO. -ECHO. Set address for settings in runjdwp in extra JVM options. -ECHO. The address is transport address for the connection. -ECHO. The address has to be in the range ^[1024,65535^]. If the option was not call, port will be set to default value. -ECHO. -EXIT /B 1 - -:jmxport -ECHO. -ECHO. jmxport ^[-jmxport ^^] - DEFAULT is 1088 -ECHO. -ECHO. Set jmx port for com.sun.management.jmxremote.port in JMX support. Port has to be in the range ^[1024,65535^]. If this option was not call, port will be set to default value. -ECHO. -EXIT /B 1 - -:debug -ECHO. -ECHO. debug [-debug] -ECHO. -ECHO. Run ODL controller with -Xdebug and -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=^$^{debugport^} -ECHO. -ECHO. -Xdebug enables debugging capabilities in the JVM which are used by the Java Virtual Machine Tools Interface (JVMTI). JVMTI is a low-level debugging interface used by debuggers and profiling tools. -ECHO. -ECHO. -Xrunjdwp option loads the JPDA reference implementation of JDWP. This library resides in the target VM and uses JVMDI and JNI to interact with it. It uses a transport and the JDWP protocol to communicate with a separate debugger application. -ECHO. -ECHO. settings for -Xrunjdwp: -ECHO. transport - name of the transport to use in connecting to debugger application -ECHO. server - if 'y', listen for a debugger application to attach; otherwise, attach to the debugger application at the specified address -ECHO. - if 'y' and no address is specified, choose a transport address at which to listen for a debugger application, and print the address to the standard output stream -ECHO. suspend - if 'y', VMStartEvent has a suspend Policy of SUSPEND_ALL -ECHO. - if 'n', VMStartEvent has a suspend policy of SUSPEND_NONE -ECHO. address - transport address for the connection -ECHO. - if server=n, attempt to attach to debugger application at this address -ECHO. - if server=y, listen for a connection at this address -ECHO. -EXIT /B 1 - -:jmx -ECHO. -ECHO. jmx [-jmx] -ECHO. -ECHO. Add JMX support. With settings for extra JVM options: -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=^$^{jmxport^} -Dcom.sun.management.jmxremote -ECHO. jmxport can by set with option -jmxport ^. Default num for the option is 1088. -ECHO. -EXIT /B 1 - -:stop -ECHO. -ECHO. stop ^[-stop^] -ECHO. -ECHO. If a controller is running, the command stop controller. Pid will be clean. -ECHO. -EXIT /B 1 - -:status -ECHO. -ECHO. status ^[-status^] -ECHO. -ECHO. Find out whether a controller is running and print it. -ECHO. -EXIT /B 1 - -:start -ECHO. -ECHO. start ^[-start ^[^^]^] -ECHO. -ECHO. If controller is not running, the command with argument^(for set port, where controller has start^) will start new controller on a port. The port has to be in the range ^[1024,65535^]. If this option was not call, port will be set to default value. Pid will be create. -EXIT /B 1 - -:console -ECHO. -ECHO. console [-console] -ECHO. Default option. -EXIT /B 1 - -:agentpath -ECHO. -ECHO. agentpath ^[-agentpath:^^] -ECHO. -ECHO. Agentpath option passes path to agent to jvm in order to load native agent library, e.g. yourkit profiler agent. -EXIT /B 1 - - diff --git a/opendaylight/distribution/opendaylight/src/main/resources/run.sh b/opendaylight/distribution/opendaylight/src/main/resources/run.sh deleted file mode 100755 index b032469b7f..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/run.sh +++ /dev/null @@ -1,331 +0,0 @@ -#!/bin/bash - -##HELP -# For more information on a specific command, type -help command-name. -# -# jmx [-jmx] -# jmxport [-jmxport ] - DEFAULT is 1088 -# debug [-debug] -# debugsuspend [-debugsuspend] -# debugport [-debugport ] - DEFAULT is 8000 -# start [-start []] - DEFAULT port is 2400 -# stop [-stop] -# status [-status] -# console [-console] -# agentpath [-agentpath:] -## - -platform='unknown' -unamestr=`uname` -if [[ "$unamestr" == 'Linux' ]]; then - platform='linux' -elif [[ "$unamestr" == 'Darwin' ]]; then - platform='osx' -fi - -if [[ $platform == 'linux' ]]; then - fullpath=`readlink -f $0` - - if [[ -z ${JAVA_HOME} ]]; then - # Find the actual location of the Java launcher: - java_launcher=`command -v java` - java_launcher=`readlink -f "${java_launcher}"` - - # Compute the Java home from the location of the Java launcher: - export JAVA_HOME="${java_launcher%/bin/java}" - fi -elif [[ $platform == 'osx' ]]; then - TARGET_FILE=$0 - cd `dirname "$TARGET_FILE"` - TARGET_FILE=`basename $TARGET_FILE` - - # Iterate down a (possible) chain of symlinks - while [ -L "$TARGET_FILE" ] - do - TARGET_FILE=`readlink "$TARGET_FILE"` - cd `dirname "$TARGET_FILE"` - TARGET_FILE=`basename "$TARGET_FILE"` - done - - # Compute the canonicalized name by finding the physical path - # for the directory we're in and appending the target file. - PHYS_DIR=`pwd -P` - RESULT=$PHYS_DIR/$TARGET_FILE - fullpath=$RESULT - - [[ -z ${JAVA_HOME} ]] && [[ -x "/usr/libexec/java_home" ]] && export JAVA_HOME=`/usr/libexec/java_home -v 1.7`; - -fi - -[[ -z ${JAVA_HOME} ]] && echo "Need to set JAVA_HOME environment variable" && exit -1; -[[ ! -x ${JAVA_HOME}/bin/java ]] && echo "Cannot find an executable \ -JVM at path ${JAVA_HOME}/bin/java check your JAVA_HOME" && exit -1; - -if [ -z ${ODL_BASEDIR} ]; then - basedir=`dirname "${fullpath}"` -else - basedir=${ODL_BASEDIR} -fi - -if [ -z ${ODL_DATADIR} ]; then - datadir=`dirname "${fullpath}"` -else - datadir=${ODL_DATADIR} -fi - -if [ -z ${TMP} ]; then - pidfile="/tmp/opendaylight.PID" -else - pidfile="${TMP}/opendaylight.PID" -fi -debug=0 -debugsuspend=0 -debugport=8000 -debugportread="" -startdaemon=0 -daemonport=2400 -daemonportread="" -jmxport=1088 -jmxportread="" -startjmx=0 -stopdaemon=0 -statusdaemon=0 -consolestart=1 -dohelp=0 -jvmMaxMemory="-Xmx1G" -extraJVMOpts="" -agentPath="" -unknown_option=0 -helper="" -while true ; do - case "$1" in - -debug) debug=1; shift ;; - -help) dohelp=1; shift; helper=$1; break ;; - -jmx) startjmx=1; shift ;; - -debugsuspend) debugsuspend=1; shift ;; - -debugport) shift; debugportread="$1"; if [[ "${debugportread}" =~ ^[0-9]+$ ]] ; then debugport=${debugportread}; shift; else echo "-debugport expects a number but was not found"; exit -1; fi;; - -jmxport) shift; jmxportread="$1"; if [[ "${jmxportread}" =~ ^[0-9]+$ ]] ; then jmxport=${jmxportread}; shift; else echo "-jmxport expects a number but was not found"; exit -1; fi;; - -start) startdaemon=1; shift; daemonportread="$1"; if [[ "${daemonportread}" =~ ^[0-9]+$ ]] ; then daemonport=${daemonportread}; shift; fi;; - -stop) stopdaemon=1; shift ;; - -status) statusdaemon=1; shift ;; - -console) shift ;; - -Xmx*) jvmMaxMemory="$1"; shift;; - -D*) extraJVMOpts="${extraJVMOpts} $1"; shift;; - -X*) extraJVMOpts="${extraJVMOpts} $1"; shift;; - -J*) extraJVMOpts="${extraJVMOpts} -$(echo "$1" | cut -d'J' -f2)"; shift;; - -agentpath:*) agentPath="$1"; shift;; - "") break ;; - *) echo "Unknown option $1"; unknown_option=1; break ;; - esac -done - - - -if [ "${unknown_option}" -eq 1 ]; then - echo "Use -help for more information." - exit 1 -fi - - -if [ "${dohelp}" -eq 1 ]; then - . ${basedir}/functions.sh - harvestHelp ${helper} - echo -e '\nFor other information type -help.\n' - exit 1 -fi - -extraJVMOpts="${extraJVMOpts} ${jvmMaxMemory}" - -##debugport -#debugport [-debugport ] - DEFAULT is 8000 -# -# Set address for settings in runjdwp in extra JVM options. -# The address is transport address for the connection. -# The address has to be in the range [1024,65535]. If this option was not call, port will be set to default value. -## -# Validate debug port -if [[ "${debugport}" -lt 1024 ]] || [[ "${debugport}" -gt 65535 ]]; then - echo "Debug Port not in the range [1024,65535] ${debugport}" - exit -1 -fi - -# Validate daemon console port -if [[ "${daemonport}" -lt 1024 ]] || [[ "${daemonport}" -gt 65535 ]]; then - echo "Daemon console Port not in the range [1024,65535] value is ${daemonport}" - exit -1 -fi - -##jmxport -#jmxport [-jmxport ] - DEFAULT is 1088 -# -# Set jmx port for com.sun.management.jmxremote.port in JMX support. Port has to be in the range [1024,65535]. If this option was not call, port will be set to default value. -## -# Validate jmx port -if [[ "${jmxport}" -lt 1024 ]] || [[ "${jmxport}" -gt 65535 ]]; then - echo "JMX Port not in the range [1024,65535] value is ${jmxport}" - exit -1 -fi -##debug -#debug [-debug] -# -#Run ODL controller with -Xdebug and -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=\${debugport} -#-Xdebug enables debugging capabilities in the JVM which are used by the Java Virtual Machine Tools Interface (JVMTI). JVMTI is a low-level debugging interface used by debuggers and profiling tools. -#-Xrunjdwp option loads the JPDA reference implementation of JDWP. This library resides in the target VM and uses JVMDI and JNI to interact with it. It uses a transport and the JDWP protocol to -#communicate with a separate debugger application. -#settings for -Xrunjdwp: -# transport - name of the transport to use in connecting to debugger application -# server - if “y”, listen for a debugger application to attach; otherwise, attach to the debugger application at the specified address -# - if “y” and no address is specified, choose a transport address at which to listen for a debugger application, and print the address to the standard output stream -# suspend - if “y”, VMStartEvent has a suspend Policy of SUSPEND_ALL -# - if “n”, VMStartEvent has a suspend policy of SUSPEND_NONE -# address - transport address for the connection -# - if server=n, attempt to attach to debugger application at this address -# - if server=y, listen for a connection at this address -## - -##debugsuspend -#debugsuspend [-debugsuspend] -# -#This command sets suspend on true in runjdwp in extra JVM options. If its true, VMStartEvent has a suspendPolicy of SUSPEND_ALL. If its false, VMStartEvent has a suspendPolicy of SUSPEND_NONE. -## -# Debug options -if [ "${debugsuspend}" -eq 1 ]; then - extraJVMOpts="${extraJVMOpts} -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=${debugport}" -elif [ "${debug}" -eq 1 ]; then - extraJVMOpts="${extraJVMOpts} -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${debugport}" -fi -##jmx -#jmx [-jmx] -# -#Add JMX support. With settings for extra JVM options: -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=\${jmxport} -Dcom.sun.management.jmxremote -#jmxport can by set with command -jmxport . Default num for the option is 1088. -## -# Add JMX support -if [ "${startjmx}" -eq 1 ]; then - extraJVMOpts="${extraJVMOpts} -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=${jmxport} -Dcom.sun.management.jmxremote" -fi - -######################################## -# Now add to classpath the OSGi JAR -######################################## -CLASSPATH=${CLASSPATH}:${basedir}/lib/org.eclipse.osgi-3.8.1.v20120830-144521.jar -FWCLASSPATH=file:"${basedir}"/lib/org.eclipse.osgi-3.8.1.v20120830-144521.jar - -######################################## -# Now add the extensions -######################################## - -# Extension 1: this is used to be able to convert all the -# bundleresouce: URL in file: so packages that are not OSGi ready can -# still work. Notably this is the case for spring classes -CLASSPATH=${CLASSPATH}:${basedir}/lib/org.eclipse.virgo.kernel.equinox.extensions-3.6.0.RELEASE.jar -FWCLASSPATH=${FWCLASSPATH},file:${basedir}/lib/org.eclipse.virgo.kernel.equinox.extensions-3.6.0.RELEASE.jar - -######################################## -# Now add the launcher -######################################## -CLASSPATH=${CLASSPATH}:${basedir}/lib/org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar -FWCLASSPATH=${FWCLASSPATH},file:${basedir}/lib/org.eclipse.equinox.launcher-1.3.0.v20120522-1813.jar - -cd $basedir - -##stop -#stop [-stop] -# -#If a controller is running, the command stop controller. Pid will be clean. -## -if [ "${stopdaemon}" -eq 1 ]; then - if [ -e "${pidfile}" ]; then - daemonpid=`cat "${pidfile}"` - kill "${daemonpid}" - rm -f "${pidfile}" - echo "Controller with PID: ${daemonpid} -- Stopped!" - exit 0 - else - echo "Doesn't seem any Controller daemon is currently running" - exit -1 - fi -fi - -##status -#status [-status] -# -#Find out whether a controller is running and print it. -## -if [ "${statusdaemon}" -eq 1 ]; then - if [ -e "${pidfile}" ]; then - daemonpid=`cat "${pidfile}"` - ps -p ${daemonpid} > /dev/null - daemonexists=$? - if [ "${daemonexists}" -eq 0 ]; then - echo "Controller with PID: ${daemonpid} -- Running!" - exit 0 - else - echo "Controller with PID: ${daemonpid} -- Doesn't seem to exist" - rm -f "${pidfile}" - exit 1 - fi - else - echo "Doesn't seem any Controller daemon is currently running, at least no PID file has been found" - exit -1 - fi -fi - -iotmpdir=`echo "${datadir}" | sed 's/ /\\ /g'` -bdir=`echo "${basedir}" | sed 's/ /\\ /g'` -confarea=`echo "${datadir}" | sed 's/ /\\ /g'` -fwclasspath=`echo "${FWCLASSPATH}" | sed 's/ /\\ /g'` - -##start -#start [-start []] -# -# If controller is not running, the command with argument(for set port, where controller has start) will start new controller on a port. The port has to be in the range [1024,65535]. If this option was not call, port will be set to default value. Pid will be create. -## -##console -#console [-console] -# -# Default option. -## -##agentpath -#agentpath [-agentpath:] -# -# Agentpath option passes path to agent to jvm in order to load native agent library, e.g. yourkit profiler agent. -## -echo "JVM maximum memory was set to ${jvmMaxMemory}." -if [ "${startdaemon}" -eq 1 ]; then - if [ -e "${pidfile}" ]; then - echo "Another instance of controller running, check with $0 -status" - exit -1 - fi - $JAVA_HOME/bin/java ${extraJVMOpts} \ - ${agentPath} \ - -Djava.io.tmpdir="${iotmpdir}/work/tmp" \ - -Dosgi.install.area="${bdir}" \ - -Dosgi.configuration.area="${confarea}/configuration" \ - -Dosgi.frameworkClassPath="${fwclasspath}" \ - -Dosgi.framework=file:"${bdir}/lib/org.eclipse.osgi-3.8.1.v20120830-144521.jar" \ - -Djava.awt.headless=true \ - -classpath "${CLASSPATH}" \ - org.eclipse.equinox.launcher.Main \ - -console ${daemonport} \ - -consoleLog & - daemonpid=$! - echo ${daemonpid} > ${pidfile} -elif [ "${consolestart}" -eq 1 ]; then - if [ -e "${pidfile}" ]; then - echo "Another instance of controller running, check with $0 -status" - exit -1 - fi - $JAVA_HOME/bin/java ${extraJVMOpts} \ - ${agentPath} \ - -Djava.io.tmpdir="${iotmpdir}/work/tmp" \ - -Dosgi.install.area="${bdir}" \ - -Dosgi.configuration.area="${confarea}/configuration" \ - -Dosgi.frameworkClassPath="${fwclasspath}" \ - -Dosgi.framework=file:"${bdir}/lib/org.eclipse.osgi-3.8.1.v20120830-144521.jar" \ - -Djava.awt.headless=true \ - -classpath "${CLASSPATH}" \ - org.eclipse.equinox.launcher.Main \ - -console \ - -consoleLog -fi diff --git a/opendaylight/distribution/opendaylight/src/main/resources/version.properties b/opendaylight/distribution/opendaylight/src/main/resources/version.properties deleted file mode 100644 index 90af49dfc2..0000000000 --- a/opendaylight/distribution/opendaylight/src/main/resources/version.properties +++ /dev/null @@ -1,6 +0,0 @@ -org.opendaylight.controller.version = 0.1 -org.opendaylight.controller.build.scm.version = ${buildNumber} -org.opendaylight.controller.build.user = ${env.USER} -org.opendaylight.controller.build.workspace = ********** -org.opendaylight.controller.build.timestamp = ${timestamp} -org.opendaylight.controller.build.machine = ********** diff --git a/opendaylight/distribution/sanitytest/pom.xml b/opendaylight/distribution/sanitytest/pom.xml deleted file mode 100644 index 9955afa274..0000000000 --- a/opendaylight/distribution/sanitytest/pom.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - 4.0.0 - - org.opendaylight.controller - commons.opendaylight - 1.5.0-SNAPSHOT - ../../commons/opendaylight - - - sanitytest - 0.5.0-SNAPSHOT - bundle - - - org.osgi - org.osgi.core - provided - - - - - - org.apache.felix - maven-bundle-plugin - true - - - org.opendaylight.controller.sanitytest - javax.xml.bind.annotation, - org.osgi.service.component, - org.slf4j, - org.eclipse.osgi.framework.console, - org.osgi.framework, - org.eclipse.osgi.baseadaptor, - org.eclipse.osgi.framework.adaptor, - org.osgi.framework.wiring - org.opendaylight.controller.sanitytest.internal.Activator - - ${project.basedir}/META-INF - - - - - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - HEAD - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - - diff --git a/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/distribution/Sanity.java b/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/distribution/Sanity.java deleted file mode 100644 index 9e6d43a06b..0000000000 --- a/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/distribution/Sanity.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Public License v1.0 which accompanies this distribution, - * and is available at http://www.eclipse.org/legal/epl-v10.html - */ -package org.opendaylight.controller.distribution; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -import java.util.ArrayList; -import java.util.List; - -public class Sanity { - - static void copy(InputStream in, OutputStream out) throws IOException { - while (true) { - int c = in.read(); - if (c == -1) break; - out.write((char)c); - } - } - - public static void main(String[] args) throws IOException, InterruptedException { - String cwd = System.getProperty("user.dir"); - - System.out.println("Current working directory = " + cwd); - - String os = System.getProperty("os.name").toLowerCase(); - List script = new ArrayList(); - - if(os.contains("windows")){ - script.add("cmd.exe"); - script.add("/c"); - script.add("runsanity.bat"); - } else { - script.add("./runsanity.sh"); - } - - ProcessBuilder processBuilder = new ProcessBuilder(); - processBuilder.inheritIO().command(script); - Process p = processBuilder.start(); - - copy(p.getInputStream(), System.out); - - p.waitFor(); - - System.out.println("Test exited with exitValue = " + p.exitValue()); - - System.exit(p.exitValue()); - } -} diff --git a/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/sanitytest/internal/Activator.java b/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/sanitytest/internal/Activator.java deleted file mode 100644 index 60db1b31e6..0000000000 --- a/opendaylight/distribution/sanitytest/src/main/java/org/opendaylight/controller/sanitytest/internal/Activator.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. - * - * This program and the accompanying materials are made available under the - * terms of the Eclipse Public License v1.0 which accompanies this distribution, - * and is available at http://www.eclipse.org/legal/epl-v10.html - */ -package org.opendaylight.controller.sanitytest.internal; - -import java.util.Timer; -import java.util.TimerTask; - -import org.osgi.framework.Bundle; -import org.osgi.framework.BundleActivator; -import org.osgi.framework.BundleContext; -import org.osgi.framework.wiring.BundleRevision; - -public class Activator implements BundleActivator { - //10 Second initial, 1 second subsequent - private static final int INITIAL_DELAY = 10000; - private static final int SUBSEQUENT_DELAY = 1000; - private static final int MAX_ATTEMPTS = 120; - - - private String stateToString(int state) { - switch (state) { - case Bundle.ACTIVE: - return "ACTIVE"; - case Bundle.INSTALLED: - return "INSTALLED"; - case Bundle.RESOLVED: - return "RESOLVED"; - case Bundle.UNINSTALLED: - return "UNINSTALLED"; - case Bundle.STARTING: - return "STARTING"; - default: - return "Not CONVERTED: state value is " + state; - } - } - - public void start(final BundleContext bundleContext) throws Exception { - Timer monitorTimer = new Timer("monitor timer", true); - monitorTimer.schedule(new TimerTask() { - @Override - public void run() { - int countup = 0; - boolean failed = false; - boolean resolved = false; - while (!resolved) { - resolved = true; - failed = false; - for(Bundle bundle : bundleContext.getBundles()){ - /* - * A bundle should be ACTIVE, unless it a fragment, in which case it should be RESOLVED - */ - int state = bundle.getState(); - if ((bundle.adapt(BundleRevision.class).getTypes() & BundleRevision.TYPE_FRAGMENT) != 0) { - //fragment - if (state != Bundle.RESOLVED) { - System.out.println("------ Failed to activate/resolve fragment = " + bundle.getSymbolicName() + " state = " + stateToString(bundle.getState())); - failed = true; - if (state == Bundle.STARTING) - resolved = false; - } - } else { - if(state != Bundle.ACTIVE) { - System.out.println("------ Failed to activate/resolve bundle = " + bundle.getSymbolicName() + " state = " + stateToString(bundle.getState())); - failed = true; - if (state == Bundle.STARTING) - resolved = false; - } - } - } - if (!resolved) { - countup++; - if (countup < MAX_ATTEMPTS) { - System.out.println("all bundles haven't finished starting, will repeat"); - try { - Thread.sleep(SUBSEQUENT_DELAY); - } catch (Exception e) { - System.out.println("Thread.sleep interuptted."); - break; - } - } else - resolved = true; - } - } - - if(failed){ - System.out.flush(); - System.out.println("exiting with 1 as failed"); - System.out.close(); - Runtime.getRuntime().exit(1); - } else { - System.out.flush(); - System.out.println("exiting with 0 as succeeded"); - System.out.close(); - Runtime.getRuntime().exit(0); - } - } - }, INITIAL_DELAY); - } - - public void stop(BundleContext bundleContext) throws Exception { - - } -} diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/ExampleActor.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/ExampleActor.java index 8e4a44cf20..6dfa4afd6b 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/ExampleActor.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/ExampleActor.java @@ -80,7 +80,7 @@ public class ExampleActor extends RaftActor { } else if (message instanceof PrintRole) { if(LOG.isDebugEnabled()) { String followers = ""; - if (getRaftState() == RaftState.Leader) { + if (getRaftState() == RaftState.Leader || getRaftState() == RaftState.IsolatedLeader) { followers = ((Leader)this.getCurrentBehavior()).printFollowerStates(); LOG.debug("{} = {}, Peers={}, followers={}", getId(), getRaftState(), getPeers(), followers); } else { diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/TestDriver.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/TestDriver.java index f202a8bb1d..de6169791e 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/TestDriver.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/example/TestDriver.java @@ -44,6 +44,11 @@ public class TestDriver { * stopLoggingForClient:{nodeName} * printNodes * printState + * + * Note: when run on IDE and on debug log level, the debug logs in + * AbstractUptypedActor and AbstractUptypedPersistentActor would need to be commented out. + * Also RaftActor handleCommand(), debug log which prints for every command other than AE/AER + * * @param args * @throws Exception */ diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/ConfigParams.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/ConfigParams.java index bff2a27797..433c3f7e4b 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/ConfigParams.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/ConfigParams.java @@ -62,4 +62,10 @@ public interface ConfigParams { * The number of journal log entries to batch on recovery before applying. */ int getJournalRecoveryLogBatchSize(); + + /** + * The interval in which the leader needs to check itself if its isolated + * @return FiniteDuration + */ + FiniteDuration getIsolatedCheckInterval(); } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/DefaultConfigParamsImpl.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/DefaultConfigParamsImpl.java index dc4145358a..a2092234d5 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/DefaultConfigParamsImpl.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/DefaultConfigParamsImpl.java @@ -44,6 +44,8 @@ public class DefaultConfigParamsImpl implements ConfigParams { private FiniteDuration heartBeatInterval = HEART_BEAT_INTERVAL; private long snapshotBatchCount = SNAPSHOT_BATCH_COUNT; private int journalRecoveryLogBatchSize = JOURNAL_RECOVERY_LOG_BATCH_SIZE; + private FiniteDuration isolatedLeaderCheckInterval = + new FiniteDuration(HEART_BEAT_INTERVAL.length() * 1000, HEART_BEAT_INTERVAL.unit()); public void setHeartBeatInterval(FiniteDuration heartBeatInterval) { this.heartBeatInterval = heartBeatInterval; @@ -57,6 +59,10 @@ public class DefaultConfigParamsImpl implements ConfigParams { this.journalRecoveryLogBatchSize = journalRecoveryLogBatchSize; } + public void setIsolatedLeaderCheckInterval(FiniteDuration isolatedLeaderCheckInterval) { + this.isolatedLeaderCheckInterval = isolatedLeaderCheckInterval; + } + @Override public long getSnapshotBatchCount() { return snapshotBatchCount; @@ -87,4 +93,9 @@ public class DefaultConfigParamsImpl implements ConfigParams { public int getJournalRecoveryLogBatchSize() { return journalRecoveryLogBatchSize; } + + @Override + public FiniteDuration getIsolatedCheckInterval() { + return isolatedLeaderCheckInterval; + } } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftState.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftState.java index 65114eb659..216ad4103d 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftState.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/RaftState.java @@ -3,5 +3,6 @@ package org.opendaylight.controller.cluster.raft; public enum RaftState { Candidate, Follower, - Leader + Leader, + IsolatedLeader; } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/base/messages/IsolatedLeaderCheck.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/base/messages/IsolatedLeaderCheck.java new file mode 100644 index 0000000000..36fd813664 --- /dev/null +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/base/messages/IsolatedLeaderCheck.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.cluster.raft.base.messages; + +/** + * Message sent by the IsolatedLeaderCheck scheduler in the Leader to itself + * in order to check if its isolated. + */ +public class IsolatedLeaderCheck { +} diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractLeader.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractLeader.java new file mode 100644 index 0000000000..d85ac8ef67 --- /dev/null +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractLeader.java @@ -0,0 +1,738 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ + +package org.opendaylight.controller.cluster.raft.behaviors; + +import akka.actor.ActorRef; +import akka.actor.ActorSelection; +import akka.actor.Cancellable; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Optional; +import com.google.common.base.Preconditions; +import com.google.protobuf.ByteString; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import org.opendaylight.controller.cluster.raft.ClientRequestTracker; +import org.opendaylight.controller.cluster.raft.ClientRequestTrackerImpl; +import org.opendaylight.controller.cluster.raft.FollowerLogInformation; +import org.opendaylight.controller.cluster.raft.FollowerLogInformationImpl; +import org.opendaylight.controller.cluster.raft.RaftActorContext; +import org.opendaylight.controller.cluster.raft.RaftState; +import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry; +import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot; +import org.opendaylight.controller.cluster.raft.base.messages.InitiateInstallSnapshot; +import org.opendaylight.controller.cluster.raft.base.messages.Replicate; +import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat; +import org.opendaylight.controller.cluster.raft.base.messages.SendInstallSnapshot; +import org.opendaylight.controller.cluster.raft.messages.AppendEntries; +import org.opendaylight.controller.cluster.raft.messages.AppendEntriesReply; +import org.opendaylight.controller.cluster.raft.messages.InstallSnapshot; +import org.opendaylight.controller.cluster.raft.messages.InstallSnapshotReply; +import org.opendaylight.controller.cluster.raft.messages.RaftRPC; +import org.opendaylight.controller.cluster.raft.messages.RequestVoteReply; +import scala.concurrent.duration.FiniteDuration; + +/** + * The behavior of a RaftActor when it is in the Leader state + *

+ * Leaders: + *

    + *
  • Upon election: send initial empty AppendEntries RPCs + * (heartbeat) to each server; repeat during idle periods to + * prevent election timeouts (§5.2) + *
  • If command received from client: append entry to local log, + * respond after entry applied to state machine (§5.3) + *
  • If last log index ≥ nextIndex for a follower: send + * AppendEntries RPC with log entries starting at nextIndex + *
      + *
    • If successful: update nextIndex and matchIndex for + * follower (§5.3) + *
    • If AppendEntries fails because of log inconsistency: + * decrement nextIndex and retry (§5.3) + *
    + *
  • If there exists an N such that N > commitIndex, a majority + * of matchIndex[i] ≥ N, and log[N].term == currentTerm: + * set commitIndex = N (§5.3, §5.4). + */ +public abstract class AbstractLeader extends AbstractRaftActorBehavior { + protected final Map followerToLog = new HashMap<>(); + protected final Map mapFollowerToSnapshot = new HashMap<>(); + + protected final Set followers; + + private Cancellable heartbeatSchedule = null; + + private List trackerList = new ArrayList<>(); + + protected final int minReplicationCount; + + protected final int minIsolatedLeaderPeerCount; + + private Optional snapshot; + + public AbstractLeader(RaftActorContext context) { + super(context); + + followers = context.getPeerAddresses().keySet(); + + for (String followerId : followers) { + FollowerLogInformation followerLogInformation = + new FollowerLogInformationImpl(followerId, + new AtomicLong(context.getCommitIndex()), + new AtomicLong(-1), + context.getConfigParams().getElectionTimeOutInterval()); + + followerToLog.put(followerId, followerLogInformation); + } + + leaderId = context.getId(); + + if(LOG.isDebugEnabled()) { + LOG.debug("Election:Leader has following peers: {}", followers); + } + + minReplicationCount = getMajorityVoteCount(followers.size()); + + // the isolated Leader peer count will be 1 less than the majority vote count. + // this is because the vote count has the self vote counted in it + // for e.g + // 0 peers = 1 votesRequired , minIsolatedLeaderPeerCount = 0 + // 2 peers = 2 votesRequired , minIsolatedLeaderPeerCount = 1 + // 4 peers = 3 votesRequired, minIsolatedLeaderPeerCount = 2 + minIsolatedLeaderPeerCount = minReplicationCount > 0 ? (minReplicationCount - 1) : 0; + + snapshot = Optional.absent(); + + // Immediately schedule a heartbeat + // Upon election: send initial empty AppendEntries RPCs + // (heartbeat) to each server; repeat during idle periods to + // prevent election timeouts (§5.2) + scheduleHeartBeat(new FiniteDuration(0, TimeUnit.SECONDS)); + } + + private Optional getSnapshot() { + return snapshot; + } + + @VisibleForTesting + void setSnapshot(Optional snapshot) { + this.snapshot = snapshot; + } + + @Override + protected RaftActorBehavior handleAppendEntries(ActorRef sender, + AppendEntries appendEntries) { + + if(LOG.isDebugEnabled()) { + LOG.debug(appendEntries.toString()); + } + + return this; + } + + @Override + protected RaftActorBehavior handleAppendEntriesReply(ActorRef sender, + AppendEntriesReply appendEntriesReply) { + + if(! appendEntriesReply.isSuccess()) { + if(LOG.isDebugEnabled()) { + LOG.debug(appendEntriesReply.toString()); + } + } + + // Update the FollowerLogInformation + String followerId = appendEntriesReply.getFollowerId(); + FollowerLogInformation followerLogInformation = + followerToLog.get(followerId); + + if(followerLogInformation == null){ + LOG.error("Unknown follower {}", followerId); + return this; + } + + followerLogInformation.markFollowerActive(); + + if (appendEntriesReply.isSuccess()) { + followerLogInformation + .setMatchIndex(appendEntriesReply.getLogLastIndex()); + followerLogInformation + .setNextIndex(appendEntriesReply.getLogLastIndex() + 1); + } else { + + // TODO: When we find that the follower is out of sync with the + // Leader we simply decrement that followers next index by 1. + // Would it be possible to do better than this? The RAFT spec + // does not explicitly deal with it but may be something for us to + // think about + + followerLogInformation.decrNextIndex(); + } + + // Now figure out if this reply warrants a change in the commitIndex + // If there exists an N such that N > commitIndex, a majority + // of matchIndex[i] ≥ N, and log[N].term == currentTerm: + // set commitIndex = N (§5.3, §5.4). + for (long N = context.getCommitIndex() + 1; ; N++) { + int replicatedCount = 1; + + for (FollowerLogInformation info : followerToLog.values()) { + if (info.getMatchIndex().get() >= N) { + replicatedCount++; + } + } + + if (replicatedCount >= minReplicationCount) { + ReplicatedLogEntry replicatedLogEntry = context.getReplicatedLog().get(N); + if (replicatedLogEntry != null && + replicatedLogEntry.getTerm() == currentTerm()) { + context.setCommitIndex(N); + } + } else { + break; + } + } + + // Apply the change to the state machine + if (context.getCommitIndex() > context.getLastApplied()) { + applyLogToStateMachine(context.getCommitIndex()); + } + + return this; + } + + protected ClientRequestTracker removeClientRequestTracker(long logIndex) { + + ClientRequestTracker toRemove = findClientRequestTracker(logIndex); + if(toRemove != null) { + trackerList.remove(toRemove); + } + + return toRemove; + } + + protected ClientRequestTracker findClientRequestTracker(long logIndex) { + for (ClientRequestTracker tracker : trackerList) { + if (tracker.getIndex() == logIndex) { + return tracker; + } + } + return null; + } + + @Override + protected RaftActorBehavior handleRequestVoteReply(ActorRef sender, + RequestVoteReply requestVoteReply) { + return this; + } + + @Override + public RaftState state() { + return RaftState.Leader; + } + + @Override + public RaftActorBehavior handleMessage(ActorRef sender, Object originalMessage) { + Preconditions.checkNotNull(sender, "sender should not be null"); + + Object message = fromSerializableMessage(originalMessage); + + if (message instanceof RaftRPC) { + RaftRPC rpc = (RaftRPC) message; + // If RPC request or response contains term T > currentTerm: + // set currentTerm = T, convert to follower (§5.1) + // This applies to all RPC messages and responses + if (rpc.getTerm() > context.getTermInformation().getCurrentTerm()) { + context.getTermInformation().updateAndPersist(rpc.getTerm(), null); + + return switchBehavior(new Follower(context)); + } + } + + try { + if (message instanceof SendHeartBeat) { + sendHeartBeat(); + return this; + + } else if(message instanceof InitiateInstallSnapshot) { + installSnapshotIfNeeded(); + + } else if(message instanceof SendInstallSnapshot) { + // received from RaftActor + setSnapshot(Optional.of(((SendInstallSnapshot) message).getSnapshot())); + sendInstallSnapshot(); + + } else if (message instanceof Replicate) { + replicate((Replicate) message); + + } else if (message instanceof InstallSnapshotReply){ + handleInstallSnapshotReply((InstallSnapshotReply) message); + + } + } finally { + scheduleHeartBeat(context.getConfigParams().getHeartBeatInterval()); + } + + return super.handleMessage(sender, message); + } + + private void handleInstallSnapshotReply(InstallSnapshotReply reply) { + String followerId = reply.getFollowerId(); + FollowerToSnapshot followerToSnapshot = mapFollowerToSnapshot.get(followerId); + FollowerLogInformation followerLogInformation = followerToLog.get(followerId); + followerLogInformation.markFollowerActive(); + + if (followerToSnapshot != null && + followerToSnapshot.getChunkIndex() == reply.getChunkIndex()) { + + if (reply.isSuccess()) { + if(followerToSnapshot.isLastChunk(reply.getChunkIndex())) { + //this was the last chunk reply + if(LOG.isDebugEnabled()) { + LOG.debug("InstallSnapshotReply received, " + + "last chunk received, Chunk:{}. Follower:{} Setting nextIndex:{}", + reply.getChunkIndex(), followerId, + context.getReplicatedLog().getSnapshotIndex() + 1 + ); + } + + followerLogInformation.setMatchIndex( + context.getReplicatedLog().getSnapshotIndex()); + followerLogInformation.setNextIndex( + context.getReplicatedLog().getSnapshotIndex() + 1); + mapFollowerToSnapshot.remove(followerId); + + if(LOG.isDebugEnabled()) { + LOG.debug("followerToLog.get(followerId).getNextIndex().get()=" + + followerToLog.get(followerId).getNextIndex().get()); + } + + if (mapFollowerToSnapshot.isEmpty()) { + // once there are no pending followers receiving snapshots + // we can remove snapshot from the memory + setSnapshot(Optional.absent()); + } + + } else { + followerToSnapshot.markSendStatus(true); + } + } else { + LOG.info("InstallSnapshotReply received, " + + "sending snapshot chunk failed, Will retry, Chunk:{}", + reply.getChunkIndex() + ); + followerToSnapshot.markSendStatus(false); + } + + } else { + LOG.error("ERROR!!" + + "FollowerId in InstallSnapshotReply not known to Leader" + + " or Chunk Index in InstallSnapshotReply not matching {} != {}", + followerToSnapshot.getChunkIndex(), reply.getChunkIndex() + ); + } + } + + private void replicate(Replicate replicate) { + long logIndex = replicate.getReplicatedLogEntry().getIndex(); + + if(LOG.isDebugEnabled()) { + LOG.debug("Replicate message {}", logIndex); + } + + // Create a tracker entry we will use this later to notify the + // client actor + trackerList.add( + new ClientRequestTrackerImpl(replicate.getClientActor(), + replicate.getIdentifier(), + logIndex) + ); + + if (followers.size() == 0) { + context.setCommitIndex(logIndex); + applyLogToStateMachine(logIndex); + } else { + sendAppendEntries(); + } + } + + private void sendAppendEntries() { + // Send an AppendEntries to all followers + for (String followerId : followers) { + ActorSelection followerActor = context.getPeerActorSelection(followerId); + + if (followerActor != null) { + FollowerLogInformation followerLogInformation = followerToLog.get(followerId); + long followerNextIndex = followerLogInformation.getNextIndex().get(); + boolean isFollowerActive = followerLogInformation.isFollowerActive(); + List entries = null; + + if (mapFollowerToSnapshot.get(followerId) != null) { + // if install snapshot is in process , then sent next chunk if possible + if (isFollowerActive && mapFollowerToSnapshot.get(followerId).canSendNextChunk()) { + sendSnapshotChunk(followerActor, followerId); + } else { + // we send a heartbeat even if we have not received a reply for the last chunk + sendAppendEntriesToFollower(followerActor, followerNextIndex, + Collections.emptyList()); + } + + } else { + long leaderLastIndex = context.getReplicatedLog().lastIndex(); + long leaderSnapShotIndex = context.getReplicatedLog().getSnapshotIndex(); + + if (isFollowerActive && + context.getReplicatedLog().isPresent(followerNextIndex)) { + // FIXME : Sending one entry at a time + entries = context.getReplicatedLog().getFrom(followerNextIndex, 1); + + } else if (isFollowerActive && followerNextIndex >= 0 && + leaderLastIndex >= followerNextIndex ) { + // if the followers next index is not present in the leaders log, and + // if the follower is just not starting and if leader's index is more than followers index + // then snapshot should be sent + + if(LOG.isDebugEnabled()) { + LOG.debug("InitiateInstallSnapshot to follower:{}," + + "follower-nextIndex:{}, leader-snapshot-index:{}, " + + "leader-last-index:{}", followerId, + followerNextIndex, leaderSnapShotIndex, leaderLastIndex + ); + } + actor().tell(new InitiateInstallSnapshot(), actor()); + + // we would want to sent AE as the capture snapshot might take time + entries = Collections.emptyList(); + + } else { + //we send an AppendEntries, even if the follower is inactive + // in-order to update the followers timestamp, in case it becomes active again + entries = Collections.emptyList(); + } + + sendAppendEntriesToFollower(followerActor, followerNextIndex, entries); + + } + } + } + } + + private void sendAppendEntriesToFollower(ActorSelection followerActor, long followerNextIndex, + List entries) { + followerActor.tell( + new AppendEntries(currentTerm(), context.getId(), + prevLogIndex(followerNextIndex), + prevLogTerm(followerNextIndex), entries, + context.getCommitIndex()).toSerializable(), + actor() + ); + } + + /** + * An installSnapshot is scheduled at a interval that is a multiple of + * a HEARTBEAT_INTERVAL. This is to avoid the need to check for installing + * snapshots at every heartbeat. + * + * Install Snapshot works as follows + * 1. Leader sends a InitiateInstallSnapshot message to self + * 2. Leader then initiates the capture snapshot by sending a CaptureSnapshot message to actor + * 3. RaftActor on receipt of the CaptureSnapshotReply (from Shard), stores the received snapshot in the replicated log + * and makes a call to Leader's handleMessage , with SendInstallSnapshot message. + * 4. Leader , picks the snapshot from im-mem ReplicatedLog and sends it in chunks to the Follower + * 5. On complete, Follower sends back a InstallSnapshotReply. + * 6. On receipt of the InstallSnapshotReply for the last chunk, Leader marks the install complete for that follower + * and replenishes the memory by deleting the snapshot in Replicated log. + * + */ + private void installSnapshotIfNeeded() { + for (String followerId : followers) { + ActorSelection followerActor = + context.getPeerActorSelection(followerId); + + if(followerActor != null) { + FollowerLogInformation followerLogInformation = + followerToLog.get(followerId); + + long nextIndex = followerLogInformation.getNextIndex().get(); + + if (!context.getReplicatedLog().isPresent(nextIndex) && + context.getReplicatedLog().isInSnapshot(nextIndex)) { + LOG.info("{} follower needs a snapshot install", followerId); + if (snapshot.isPresent()) { + // if a snapshot is present in the memory, most likely another install is in progress + // no need to capture snapshot + sendSnapshotChunk(followerActor, followerId); + + } else { + initiateCaptureSnapshot(); + //we just need 1 follower who would need snapshot to be installed. + // when we have the snapshot captured, we would again check (in SendInstallSnapshot) + // who needs an install and send to all who need + break; + } + + } + } + } + } + + // on every install snapshot, we try to capture the snapshot. + // Once a capture is going on, another one issued will get ignored by RaftActor. + private void initiateCaptureSnapshot() { + LOG.info("Initiating Snapshot Capture to Install Snapshot, Leader:{}", getLeaderId()); + ReplicatedLogEntry lastAppliedEntry = context.getReplicatedLog().get(context.getLastApplied()); + long lastAppliedIndex = -1; + long lastAppliedTerm = -1; + + if (lastAppliedEntry != null) { + lastAppliedIndex = lastAppliedEntry.getIndex(); + lastAppliedTerm = lastAppliedEntry.getTerm(); + } else if (context.getReplicatedLog().getSnapshotIndex() > -1) { + lastAppliedIndex = context.getReplicatedLog().getSnapshotIndex(); + lastAppliedTerm = context.getReplicatedLog().getSnapshotTerm(); + } + + boolean isInstallSnapshotInitiated = true; + actor().tell(new CaptureSnapshot(lastIndex(), lastTerm(), + lastAppliedIndex, lastAppliedTerm, isInstallSnapshotInitiated), + actor()); + } + + + private void sendInstallSnapshot() { + for (String followerId : followers) { + ActorSelection followerActor = context.getPeerActorSelection(followerId); + + if(followerActor != null) { + FollowerLogInformation followerLogInformation = followerToLog.get(followerId); + long nextIndex = followerLogInformation.getNextIndex().get(); + + if (!context.getReplicatedLog().isPresent(nextIndex) && + context.getReplicatedLog().isInSnapshot(nextIndex)) { + sendSnapshotChunk(followerActor, followerId); + } + } + } + } + + /** + * Sends a snapshot chunk to a given follower + * InstallSnapshot should qualify as a heartbeat too. + */ + private void sendSnapshotChunk(ActorSelection followerActor, String followerId) { + try { + if (snapshot.isPresent()) { + followerActor.tell( + new InstallSnapshot(currentTerm(), context.getId(), + context.getReplicatedLog().getSnapshotIndex(), + context.getReplicatedLog().getSnapshotTerm(), + getNextSnapshotChunk(followerId,snapshot.get()), + mapFollowerToSnapshot.get(followerId).incrementChunkIndex(), + mapFollowerToSnapshot.get(followerId).getTotalChunks() + ).toSerializable(), + actor() + ); + LOG.info("InstallSnapshot sent to follower {}, Chunk: {}/{}", + followerActor.path(), mapFollowerToSnapshot.get(followerId).getChunkIndex(), + mapFollowerToSnapshot.get(followerId).getTotalChunks()); + } + } catch (IOException e) { + LOG.error(e, "InstallSnapshot failed for Leader."); + } + } + + /** + * Acccepts snaphot as ByteString, enters into map for future chunks + * creates and return a ByteString chunk + */ + private ByteString getNextSnapshotChunk(String followerId, ByteString snapshotBytes) throws IOException { + FollowerToSnapshot followerToSnapshot = mapFollowerToSnapshot.get(followerId); + if (followerToSnapshot == null) { + followerToSnapshot = new FollowerToSnapshot(snapshotBytes); + mapFollowerToSnapshot.put(followerId, followerToSnapshot); + } + ByteString nextChunk = followerToSnapshot.getNextChunk(); + if (LOG.isDebugEnabled()) { + LOG.debug("Leader's snapshot nextChunk size:{}", nextChunk.size()); + } + return nextChunk; + } + + private void sendHeartBeat() { + if (followers.size() > 0) { + sendAppendEntries(); + } + } + + private void stopHeartBeat() { + if (heartbeatSchedule != null && !heartbeatSchedule.isCancelled()) { + heartbeatSchedule.cancel(); + } + } + + private void scheduleHeartBeat(FiniteDuration interval) { + if(followers.size() == 0){ + // Optimization - do not bother scheduling a heartbeat as there are + // no followers + return; + } + + stopHeartBeat(); + + // Schedule a heartbeat. When the scheduler triggers a SendHeartbeat + // message is sent to itself. + // Scheduling the heartbeat only once here because heartbeats do not + // need to be sent if there are other messages being sent to the remote + // actor. + heartbeatSchedule = context.getActorSystem().scheduler().scheduleOnce( + interval, context.getActor(), new SendHeartBeat(), + context.getActorSystem().dispatcher(), context.getActor()); + } + + @Override + public void close() throws Exception { + stopHeartBeat(); + } + + @Override + public String getLeaderId() { + return context.getId(); + } + + protected boolean isLeaderIsolated() { + int minPresent = minIsolatedLeaderPeerCount; + for (FollowerLogInformation followerLogInformation : followerToLog.values()) { + if (followerLogInformation.isFollowerActive()) { + --minPresent; + if (minPresent == 0) { + break; + } + } + } + return (minPresent != 0); + } + + /** + * Encapsulates the snapshot bytestring and handles the logic of sending + * snapshot chunks + */ + protected class FollowerToSnapshot { + private ByteString snapshotBytes; + private int offset = 0; + // the next snapshot chunk is sent only if the replyReceivedForOffset matches offset + private int replyReceivedForOffset; + // if replyStatus is false, the previous chunk is attempted + private boolean replyStatus = false; + private int chunkIndex; + private int totalChunks; + + public FollowerToSnapshot(ByteString snapshotBytes) { + this.snapshotBytes = snapshotBytes; + replyReceivedForOffset = -1; + chunkIndex = 1; + int size = snapshotBytes.size(); + totalChunks = ( size / context.getConfigParams().getSnapshotChunkSize()) + + ((size % context.getConfigParams().getSnapshotChunkSize()) > 0 ? 1 : 0); + if(LOG.isDebugEnabled()) { + LOG.debug("Snapshot {} bytes, total chunks to send:{}", + size, totalChunks); + } + } + + public ByteString getSnapshotBytes() { + return snapshotBytes; + } + + public int incrementOffset() { + if(replyStatus) { + // if prev chunk failed, we would want to sent the same chunk again + offset = offset + context.getConfigParams().getSnapshotChunkSize(); + } + return offset; + } + + public int incrementChunkIndex() { + if (replyStatus) { + // if prev chunk failed, we would want to sent the same chunk again + chunkIndex = chunkIndex + 1; + } + return chunkIndex; + } + + public int getChunkIndex() { + return chunkIndex; + } + + public int getTotalChunks() { + return totalChunks; + } + + public boolean canSendNextChunk() { + // we only send a false if a chunk is sent but we have not received a reply yet + return replyReceivedForOffset == offset; + } + + public boolean isLastChunk(int chunkIndex) { + return totalChunks == chunkIndex; + } + + public void markSendStatus(boolean success) { + if (success) { + // if the chunk sent was successful + replyReceivedForOffset = offset; + replyStatus = true; + } else { + // if the chunk sent was failure + replyReceivedForOffset = offset; + replyStatus = false; + } + } + + public ByteString getNextChunk() { + int snapshotLength = getSnapshotBytes().size(); + int start = incrementOffset(); + int size = context.getConfigParams().getSnapshotChunkSize(); + if (context.getConfigParams().getSnapshotChunkSize() > snapshotLength) { + size = snapshotLength; + } else { + if ((start + context.getConfigParams().getSnapshotChunkSize()) > snapshotLength) { + size = snapshotLength - start; + } + } + + if(LOG.isDebugEnabled()) { + LOG.debug("length={}, offset={},size={}", + snapshotLength, start, size); + } + return getSnapshotBytes().substring(start, start + size); + + } + } + + // called from example-actor for printing the follower-states + public String printFollowerStates() { + StringBuilder sb = new StringBuilder(); + for(FollowerLogInformation followerLogInformation : followerToLog.values()) { + boolean isFollowerActive = followerLogInformation.isFollowerActive(); + sb.append("{"+followerLogInformation.getId() + " state:" + isFollowerActive + "},"); + + } + return "[" + sb.toString() + "]"; + } + + @VisibleForTesting + void markFollowerActive(String followerId) { + followerToLog.get(followerId).markFollowerActive(); + } +} diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java index eed74bba82..f235221da9 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/AbstractRaftActorBehavior.java @@ -390,7 +390,7 @@ public abstract class AbstractRaftActorBehavior implements RaftActorBehavior { } protected RaftActorBehavior switchBehavior(RaftActorBehavior behavior) { - LOG.info("Switching from behavior {} to {}", this.state(), behavior.state()); + LOG.info("{} :- Switching from behavior {} to {}", context.getId(), this.state(), behavior.state()); try { close(); } catch (Exception e) { @@ -399,4 +399,27 @@ public abstract class AbstractRaftActorBehavior implements RaftActorBehavior { return behavior; } + + protected int getMajorityVoteCount(int numPeers) { + // Votes are required from a majority of the peers including self. + // The numMajority field therefore stores a calculated value + // of the number of votes required for this candidate to win an + // election based on it's known peers. + // If a peer was added during normal operation and raft replicas + // came to know about them then the new peer would also need to be + // taken into consideration when calculating this value. + // Here are some examples for what the numMajority would be for n + // peers + // 0 peers = 1 numMajority -: (0 + 1) / 2 + 1 = 1 + // 2 peers = 2 numMajority -: (2 + 1) / 2 + 1 = 2 + // 4 peers = 3 numMajority -: (4 + 1) / 2 + 1 = 3 + + int numMajority = 0; + if (numPeers > 0) { + int self = 1; + numMajority = (numPeers + self) / 2 + 1; + } + return numMajority; + + } } diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java index 4a3e2c5d66..702417273f 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Candidate.java @@ -56,25 +56,7 @@ public class Candidate extends AbstractRaftActorBehavior { LOG.debug("Election:Candidate has following peers: {}", peers); } - if(peers.size() > 0) { - // Votes are required from a majority of the peers including self. - // The votesRequired field therefore stores a calculated value - // of the number of votes required for this candidate to win an - // election based on it's known peers. - // If a peer was added during normal operation and raft replicas - // came to know about them then the new peer would also need to be - // taken into consideration when calculating this value. - // Here are some examples for what the votesRequired would be for n - // peers - // 0 peers = 1 votesRequired (0 + 1) / 2 + 1 = 1 - // 2 peers = 2 votesRequired (2 + 1) / 2 + 1 = 2 - // 4 peers = 3 votesRequired (4 + 1) / 2 + 1 = 3 - int noOfPeers = peers.size(); - int self = 1; - votesRequired = (noOfPeers + self) / 2 + 1; - } else { - votesRequired = 0; - } + votesRequired = getMajorityVoteCount(peers.size()); startNewTerm(); scheduleElection(electionDuration()); diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeader.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeader.java new file mode 100644 index 0000000000..4f77711a4d --- /dev/null +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeader.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.cluster.raft.behaviors; + +import akka.actor.ActorRef; +import org.opendaylight.controller.cluster.raft.RaftActorContext; +import org.opendaylight.controller.cluster.raft.RaftState; +import org.opendaylight.controller.cluster.raft.messages.AppendEntriesReply; + +/** + * Leader which is termed as isolated. + *

    + * If the reply from the majority of the followers is not received then the leader changes its behavior + * to IsolatedLeader. An isolated leader may have followers and they would continue to receive replicated messages. + *

    + * A schedule is run, at an interval of (10 * Heartbeat-time-interval), in the Leader + * to check if its isolated or not. + *

    + * In the Isolated Leader , on every AppendEntriesReply, we aggressively check if the leader is isolated. + * If no, then the state is switched back to Leader. + * + */ +public class IsolatedLeader extends AbstractLeader { + public IsolatedLeader(RaftActorContext context) { + super(context); + } + + // we received an Append Entries reply, we should switch the Behavior to Leader + @Override + protected RaftActorBehavior handleAppendEntriesReply(ActorRef sender, + AppendEntriesReply appendEntriesReply) { + RaftActorBehavior ret = super.handleAppendEntriesReply(sender, appendEntriesReply); + + // it can happen that this isolated leader interacts with a new leader in the cluster and + // changes its state to Follower, hence we only need to switch to Leader if the state is still Isolated + if (ret.state() == RaftState.IsolatedLeader && !isLeaderIsolated()) { + LOG.info("IsolatedLeader {} switching from IsolatedLeader to Leader", leaderId); + return switchBehavior(new Leader(context)); + } + return ret; + } + + @Override + public RaftState state() { + return RaftState.IsolatedLeader; + } +} diff --git a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java index d83362b580..0dd3900113 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java +++ b/opendaylight/md-sal/sal-akka-raft/src/main/java/org/opendaylight/controller/cluster/raft/behaviors/Leader.java @@ -9,42 +9,14 @@ package org.opendaylight.controller.cluster.raft.behaviors; import akka.actor.ActorRef; -import akka.actor.ActorSelection; import akka.actor.Cancellable; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import org.opendaylight.controller.cluster.raft.ClientRequestTracker; -import org.opendaylight.controller.cluster.raft.ClientRequestTrackerImpl; -import org.opendaylight.controller.cluster.raft.FollowerLogInformation; -import org.opendaylight.controller.cluster.raft.FollowerLogInformationImpl; import org.opendaylight.controller.cluster.raft.RaftActorContext; -import org.opendaylight.controller.cluster.raft.RaftState; -import org.opendaylight.controller.cluster.raft.ReplicatedLogEntry; -import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot; import org.opendaylight.controller.cluster.raft.base.messages.InitiateInstallSnapshot; -import org.opendaylight.controller.cluster.raft.base.messages.Replicate; -import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat; -import org.opendaylight.controller.cluster.raft.base.messages.SendInstallSnapshot; -import org.opendaylight.controller.cluster.raft.messages.AppendEntries; -import org.opendaylight.controller.cluster.raft.messages.AppendEntriesReply; -import org.opendaylight.controller.cluster.raft.messages.InstallSnapshot; -import org.opendaylight.controller.cluster.raft.messages.InstallSnapshotReply; -import org.opendaylight.controller.cluster.raft.messages.RaftRPC; -import org.opendaylight.controller.cluster.raft.messages.RequestVoteReply; +import org.opendaylight.controller.cluster.raft.base.messages.IsolatedLeaderCheck; import scala.concurrent.duration.FiniteDuration; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - /** * The behavior of a RaftActor when it is in the Leader state *

    @@ -67,546 +39,41 @@ import java.util.concurrent.atomic.AtomicLong; * of matchIndex[i] ≥ N, and log[N].term == currentTerm: * set commitIndex = N (§5.3, §5.4). */ -public class Leader extends AbstractRaftActorBehavior { - - - protected final Map followerToLog = new HashMap<>(); - protected final Map mapFollowerToSnapshot = new HashMap<>(); - - private final Set followers; - - private Cancellable heartbeatSchedule = null; +public class Leader extends AbstractLeader { private Cancellable installSnapshotSchedule = null; - - private List trackerList = new ArrayList<>(); - - private final int minReplicationCount; - - private Optional snapshot; + private Cancellable isolatedLeaderCheckSchedule = null; public Leader(RaftActorContext context) { super(context); - followers = context.getPeerAddresses().keySet(); - - for (String followerId : followers) { - FollowerLogInformation followerLogInformation = - new FollowerLogInformationImpl(followerId, - new AtomicLong(context.getCommitIndex()), - new AtomicLong(-1), - context.getConfigParams().getElectionTimeOutInterval()); - - followerToLog.put(followerId, followerLogInformation); - } - - if(LOG.isDebugEnabled()) { - LOG.debug("Election:Leader has following peers: {}", followers); - } - - if (followers.size() > 0) { - minReplicationCount = (followers.size() + 1) / 2 + 1; - } else { - minReplicationCount = 0; - } - - snapshot = Optional.absent(); - - // Immediately schedule a heartbeat - // Upon election: send initial empty AppendEntries RPCs - // (heartbeat) to each server; repeat during idle periods to - // prevent election timeouts (§5.2) - scheduleHeartBeat(new FiniteDuration(0, TimeUnit.SECONDS)); - - scheduleInstallSnapshotCheck( - new FiniteDuration(context.getConfigParams().getHeartBeatInterval().length() * 1000, - context.getConfigParams().getHeartBeatInterval().unit()) - ); - - } - - private Optional getSnapshot() { - return snapshot; - } - - @VisibleForTesting - void setSnapshot(Optional snapshot) { - this.snapshot = snapshot; - } - - @Override protected RaftActorBehavior handleAppendEntries(ActorRef sender, - AppendEntries appendEntries) { - - if(LOG.isDebugEnabled()) { - LOG.debug(appendEntries.toString()); - } - - return this; - } - - @Override protected RaftActorBehavior handleAppendEntriesReply(ActorRef sender, - AppendEntriesReply appendEntriesReply) { - - if(! appendEntriesReply.isSuccess()) { - if(LOG.isDebugEnabled()) { - LOG.debug(appendEntriesReply.toString()); - } - } - - // Update the FollowerLogInformation - String followerId = appendEntriesReply.getFollowerId(); - FollowerLogInformation followerLogInformation = - followerToLog.get(followerId); - - if(followerLogInformation == null){ - LOG.error("Unknown follower {}", followerId); - return this; - } - - followerLogInformation.markFollowerActive(); - - if (appendEntriesReply.isSuccess()) { - followerLogInformation - .setMatchIndex(appendEntriesReply.getLogLastIndex()); - followerLogInformation - .setNextIndex(appendEntriesReply.getLogLastIndex() + 1); - } else { - - // TODO: When we find that the follower is out of sync with the - // Leader we simply decrement that followers next index by 1. - // Would it be possible to do better than this? The RAFT spec - // does not explicitly deal with it but may be something for us to - // think about - - followerLogInformation.decrNextIndex(); - } - - // Now figure out if this reply warrants a change in the commitIndex - // If there exists an N such that N > commitIndex, a majority - // of matchIndex[i] ≥ N, and log[N].term == currentTerm: - // set commitIndex = N (§5.3, §5.4). - for (long N = context.getCommitIndex() + 1; ; N++) { - int replicatedCount = 1; - - for (FollowerLogInformation info : followerToLog.values()) { - if (info.getMatchIndex().get() >= N) { - replicatedCount++; - } - } - - if (replicatedCount >= minReplicationCount) { - ReplicatedLogEntry replicatedLogEntry = - context.getReplicatedLog().get(N); - if (replicatedLogEntry != null - && replicatedLogEntry.getTerm() - == currentTerm()) { - context.setCommitIndex(N); - } - } else { - break; - } - } - - // Apply the change to the state machine - if (context.getCommitIndex() > context.getLastApplied()) { - applyLogToStateMachine(context.getCommitIndex()); - } - - return this; - } - - protected ClientRequestTracker removeClientRequestTracker(long logIndex) { - - ClientRequestTracker toRemove = findClientRequestTracker(logIndex); - if(toRemove != null) { - trackerList.remove(toRemove); - } - - return toRemove; - } - - protected ClientRequestTracker findClientRequestTracker(long logIndex) { - for (ClientRequestTracker tracker : trackerList) { - if (tracker.getIndex() == logIndex) { - return tracker; - } - } - - return null; - } - - @Override protected RaftActorBehavior handleRequestVoteReply(ActorRef sender, - RequestVoteReply requestVoteReply) { - return this; - } + scheduleInstallSnapshotCheck(context.getConfigParams().getIsolatedCheckInterval()); - @Override public RaftState state() { - return RaftState.Leader; + scheduleIsolatedLeaderCheck( + new FiniteDuration(context.getConfigParams().getHeartBeatInterval().length() * 10, + context.getConfigParams().getHeartBeatInterval().unit())); } @Override public RaftActorBehavior handleMessage(ActorRef sender, Object originalMessage) { Preconditions.checkNotNull(sender, "sender should not be null"); - Object message = fromSerializableMessage(originalMessage); - - if (message instanceof RaftRPC) { - RaftRPC rpc = (RaftRPC) message; - // If RPC request or response contains term T > currentTerm: - // set currentTerm = T, convert to follower (§5.1) - // This applies to all RPC messages and responses - if (rpc.getTerm() > context.getTermInformation().getCurrentTerm()) { - context.getTermInformation().updateAndPersist(rpc.getTerm(), null); - - return switchBehavior(new Follower(context)); - } - } - - try { - if (message instanceof SendHeartBeat) { - sendHeartBeat(); - return this; - - } else if(message instanceof InitiateInstallSnapshot) { - installSnapshotIfNeeded(); - - } else if(message instanceof SendInstallSnapshot) { - // received from RaftActor - setSnapshot(Optional.of(((SendInstallSnapshot) message).getSnapshot())); - sendInstallSnapshot(); - - } else if (message instanceof Replicate) { - replicate((Replicate) message); - - } else if (message instanceof InstallSnapshotReply){ - handleInstallSnapshotReply( - (InstallSnapshotReply) message); - } - } finally { - scheduleHeartBeat(context.getConfigParams().getHeartBeatInterval()); - } - - return super.handleMessage(sender, message); - } - - private void handleInstallSnapshotReply(InstallSnapshotReply reply) { - String followerId = reply.getFollowerId(); - FollowerToSnapshot followerToSnapshot = mapFollowerToSnapshot.get(followerId); - FollowerLogInformation followerLogInformation = followerToLog.get(followerId); - followerLogInformation.markFollowerActive(); - - if (followerToSnapshot != null && - followerToSnapshot.getChunkIndex() == reply.getChunkIndex()) { - - if (reply.isSuccess()) { - if(followerToSnapshot.isLastChunk(reply.getChunkIndex())) { - //this was the last chunk reply - if(LOG.isDebugEnabled()) { - LOG.debug("InstallSnapshotReply received, " + - "last chunk received, Chunk:{}. Follower:{} Setting nextIndex:{}", - reply.getChunkIndex(), followerId, - context.getReplicatedLog().getSnapshotIndex() + 1 - ); - } - - followerLogInformation.setMatchIndex( - context.getReplicatedLog().getSnapshotIndex()); - followerLogInformation.setNextIndex( - context.getReplicatedLog().getSnapshotIndex() + 1); - mapFollowerToSnapshot.remove(followerId); - - if(LOG.isDebugEnabled()) { - LOG.debug("followerToLog.get(followerId).getNextIndex().get()=" + - followerToLog.get(followerId).getNextIndex().get()); - } - - if (mapFollowerToSnapshot.isEmpty()) { - // once there are no pending followers receiving snapshots - // we can remove snapshot from the memory - setSnapshot(Optional.absent()); - } - - } else { - followerToSnapshot.markSendStatus(true); - } - } else { - LOG.info("InstallSnapshotReply received, " + - "sending snapshot chunk failed, Will retry, Chunk:{}", - reply.getChunkIndex() - ); - followerToSnapshot.markSendStatus(false); + if (originalMessage instanceof IsolatedLeaderCheck) { + if (isLeaderIsolated()) { + LOG.info("At least {} followers need to be active, Switching {} from Leader to IsolatedLeader", + minIsolatedLeaderPeerCount, leaderId); + return switchBehavior(new IsolatedLeader(context)); } - - } else { - LOG.error("ERROR!!" + - "FollowerId in InstallSnapshotReply not known to Leader" + - " or Chunk Index in InstallSnapshotReply not matching {} != {}", - followerToSnapshot.getChunkIndex(), reply.getChunkIndex() - ); - } - } - - private void replicate(Replicate replicate) { - long logIndex = replicate.getReplicatedLogEntry().getIndex(); - - if(LOG.isDebugEnabled()) { - LOG.debug("Replicate message {}", logIndex); } - // Create a tracker entry we will use this later to notify the - // client actor - trackerList.add( - new ClientRequestTrackerImpl(replicate.getClientActor(), - replicate.getIdentifier(), - logIndex) - ); - - if (followers.size() == 0) { - context.setCommitIndex(logIndex); - applyLogToStateMachine(logIndex); - } else { - sendAppendEntries(); - } + return super.handleMessage(sender, originalMessage); } - private void sendAppendEntries() { - // Send an AppendEntries to all followers - for (String followerId : followers) { - ActorSelection followerActor = context.getPeerActorSelection(followerId); - - if (followerActor != null) { - FollowerLogInformation followerLogInformation = followerToLog.get(followerId); - long followerNextIndex = followerLogInformation.getNextIndex().get(); - boolean isFollowerActive = followerLogInformation.isFollowerActive(); - List entries = null; - - if (mapFollowerToSnapshot.get(followerId) != null) { - // if install snapshot is in process , then sent next chunk if possible - if (isFollowerActive && mapFollowerToSnapshot.get(followerId).canSendNextChunk()) { - sendSnapshotChunk(followerActor, followerId); - } else { - // we send a heartbeat even if we have not received a reply for the last chunk - sendAppendEntriesToFollower(followerActor, followerNextIndex, - Collections.emptyList()); - } - - } else { - long leaderLastIndex = context.getReplicatedLog().lastIndex(); - long leaderSnapShotIndex = context.getReplicatedLog().getSnapshotIndex(); - - if (isFollowerActive && - context.getReplicatedLog().isPresent(followerNextIndex)) { - // FIXME : Sending one entry at a time - entries = context.getReplicatedLog().getFrom(followerNextIndex, 1); - - } else if (isFollowerActive && followerNextIndex >= 0 && - leaderLastIndex >= followerNextIndex ) { - // if the followers next index is not present in the leaders log, and - // if the follower is just not starting and if leader's index is more than followers index - // then snapshot should be sent - - if(LOG.isDebugEnabled()) { - LOG.debug("InitiateInstallSnapshot to follower:{}," + - "follower-nextIndex:{}, leader-snapshot-index:{}, " + - "leader-last-index:{}", followerId, - followerNextIndex, leaderSnapShotIndex, leaderLastIndex - ); - } - actor().tell(new InitiateInstallSnapshot(), actor()); - - // we would want to sent AE as the capture snapshot might take time - entries = Collections.emptyList(); - - } else { - //we send an AppendEntries, even if the follower is inactive - // in-order to update the followers timestamp, in case it becomes active again - entries = Collections.emptyList(); - } - - sendAppendEntriesToFollower(followerActor, followerNextIndex, entries); - - } - } - } - } - - private void sendAppendEntriesToFollower(ActorSelection followerActor, long followerNextIndex, - List entries) { - followerActor.tell( - new AppendEntries(currentTerm(), context.getId(), - prevLogIndex(followerNextIndex), - prevLogTerm(followerNextIndex), entries, - context.getCommitIndex()).toSerializable(), - actor() - ); - } - - /** - * An installSnapshot is scheduled at a interval that is a multiple of - * a HEARTBEAT_INTERVAL. This is to avoid the need to check for installing - * snapshots at every heartbeat. - * - * Install Snapshot works as follows - * 1. Leader sends a InitiateInstallSnapshot message to self - * 2. Leader then initiates the capture snapshot by sending a CaptureSnapshot message to actor - * 3. RaftActor on receipt of the CaptureSnapshotReply (from Shard), stores the received snapshot in the replicated log - * and makes a call to Leader's handleMessage , with SendInstallSnapshot message. - * 4. Leader , picks the snapshot from im-mem ReplicatedLog and sends it in chunks to the Follower - * 5. On complete, Follower sends back a InstallSnapshotReply. - * 6. On receipt of the InstallSnapshotReply for the last chunk, Leader marks the install complete for that follower - * and replenishes the memory by deleting the snapshot in Replicated log. - * - */ - private void installSnapshotIfNeeded() { - for (String followerId : followers) { - ActorSelection followerActor = - context.getPeerActorSelection(followerId); - - if(followerActor != null) { - FollowerLogInformation followerLogInformation = - followerToLog.get(followerId); - - long nextIndex = followerLogInformation.getNextIndex().get(); - - if (!context.getReplicatedLog().isPresent(nextIndex) && - context.getReplicatedLog().isInSnapshot(nextIndex)) { - LOG.info("{} follower needs a snapshot install", followerId); - if (snapshot.isPresent()) { - // if a snapshot is present in the memory, most likely another install is in progress - // no need to capture snapshot - sendSnapshotChunk(followerActor, followerId); - - } else { - initiateCaptureSnapshot(); - //we just need 1 follower who would need snapshot to be installed. - // when we have the snapshot captured, we would again check (in SendInstallSnapshot) - // who needs an install and send to all who need - break; - } - - } - } - } - } - - // on every install snapshot, we try to capture the snapshot. - // Once a capture is going on, another one issued will get ignored by RaftActor. - private void initiateCaptureSnapshot() { - LOG.info("Initiating Snapshot Capture to Install Snapshot, Leader:{}", getLeaderId()); - ReplicatedLogEntry lastAppliedEntry = context.getReplicatedLog().get(context.getLastApplied()); - long lastAppliedIndex = -1; - long lastAppliedTerm = -1; - - if (lastAppliedEntry != null) { - lastAppliedIndex = lastAppliedEntry.getIndex(); - lastAppliedTerm = lastAppliedEntry.getTerm(); - } else if (context.getReplicatedLog().getSnapshotIndex() > -1) { - lastAppliedIndex = context.getReplicatedLog().getSnapshotIndex(); - lastAppliedTerm = context.getReplicatedLog().getSnapshotTerm(); - } - - boolean isInstallSnapshotInitiated = true; - actor().tell(new CaptureSnapshot(lastIndex(), lastTerm(), - lastAppliedIndex, lastAppliedTerm, isInstallSnapshotInitiated), - actor()); - } - - - private void sendInstallSnapshot() { - for (String followerId : followers) { - ActorSelection followerActor = context.getPeerActorSelection(followerId); - - if(followerActor != null) { - FollowerLogInformation followerLogInformation = followerToLog.get(followerId); - long nextIndex = followerLogInformation.getNextIndex().get(); - - if (!context.getReplicatedLog().isPresent(nextIndex) && - context.getReplicatedLog().isInSnapshot(nextIndex)) { - sendSnapshotChunk(followerActor, followerId); - } - } - } - } - - /** - * Sends a snapshot chunk to a given follower - * InstallSnapshot should qualify as a heartbeat too. - */ - private void sendSnapshotChunk(ActorSelection followerActor, String followerId) { - try { - if (snapshot.isPresent()) { - followerActor.tell( - new InstallSnapshot(currentTerm(), context.getId(), - context.getReplicatedLog().getSnapshotIndex(), - context.getReplicatedLog().getSnapshotTerm(), - getNextSnapshotChunk(followerId,snapshot.get()), - mapFollowerToSnapshot.get(followerId).incrementChunkIndex(), - mapFollowerToSnapshot.get(followerId).getTotalChunks() - ).toSerializable(), - actor() - ); - LOG.info("InstallSnapshot sent to follower {}, Chunk: {}/{}", - followerActor.path(), mapFollowerToSnapshot.get(followerId).getChunkIndex(), - mapFollowerToSnapshot.get(followerId).getTotalChunks()); - } - } catch (IOException e) { - LOG.error(e, "InstallSnapshot failed for Leader."); - } - } - - /** - * Acccepts snaphot as ByteString, enters into map for future chunks - * creates and return a ByteString chunk - */ - private ByteString getNextSnapshotChunk(String followerId, ByteString snapshotBytes) throws IOException { - FollowerToSnapshot followerToSnapshot = mapFollowerToSnapshot.get(followerId); - if (followerToSnapshot == null) { - followerToSnapshot = new FollowerToSnapshot(snapshotBytes); - mapFollowerToSnapshot.put(followerId, followerToSnapshot); - } - ByteString nextChunk = followerToSnapshot.getNextChunk(); - if (LOG.isDebugEnabled()) { - LOG.debug("Leader's snapshot nextChunk size:{}", nextChunk.size()); - } - return nextChunk; - } - - private void sendHeartBeat() { - if (followers.size() > 0) { - sendAppendEntries(); - } - } - - private void stopHeartBeat() { - if (heartbeatSchedule != null && !heartbeatSchedule.isCancelled()) { - heartbeatSchedule.cancel(); - } - } - - private void stopInstallSnapshotSchedule() { + protected void stopInstallSnapshotSchedule() { if (installSnapshotSchedule != null && !installSnapshotSchedule.isCancelled()) { installSnapshotSchedule.cancel(); } } - private void scheduleHeartBeat(FiniteDuration interval) { - if(followers.size() == 0){ - // Optimization - do not bother scheduling a heartbeat as there are - // no followers - return; - } - - stopHeartBeat(); - - // Schedule a heartbeat. When the scheduler triggers a SendHeartbeat - // message is sent to itself. - // Scheduling the heartbeat only once here because heartbeats do not - // need to be sent if there are other messages being sent to the remote - // actor. - heartbeatSchedule = context.getActorSystem().scheduler().scheduleOnce( - interval, context.getActor(), new SendHeartBeat(), - context.getActorSystem().dispatcher(), context.getActor()); - } - - private void scheduleInstallSnapshotCheck(FiniteDuration interval) { + protected void scheduleInstallSnapshotCheck(FiniteDuration interval) { if(followers.size() == 0){ // Optimization - do not bother scheduling a heartbeat as there are // no followers @@ -624,122 +91,22 @@ public class Leader extends AbstractRaftActorBehavior { context.getActorSystem().dispatcher(), context.getActor()); } - - - @Override public void close() throws Exception { - stopHeartBeat(); - } - - @Override public String getLeaderId() { - return context.getId(); - } - - /** - * Encapsulates the snapshot bytestring and handles the logic of sending - * snapshot chunks - */ - protected class FollowerToSnapshot { - private ByteString snapshotBytes; - private int offset = 0; - // the next snapshot chunk is sent only if the replyReceivedForOffset matches offset - private int replyReceivedForOffset; - // if replyStatus is false, the previous chunk is attempted - private boolean replyStatus = false; - private int chunkIndex; - private int totalChunks; - - public FollowerToSnapshot(ByteString snapshotBytes) { - this.snapshotBytes = snapshotBytes; - replyReceivedForOffset = -1; - chunkIndex = 1; - int size = snapshotBytes.size(); - totalChunks = ( size / context.getConfigParams().getSnapshotChunkSize()) + - ((size % context.getConfigParams().getSnapshotChunkSize()) > 0 ? 1 : 0); - if(LOG.isDebugEnabled()) { - LOG.debug("Snapshot {} bytes, total chunks to send:{}", - size, totalChunks); - } - } - - public ByteString getSnapshotBytes() { - return snapshotBytes; - } - - public int incrementOffset() { - if(replyStatus) { - // if prev chunk failed, we would want to sent the same chunk again - offset = offset + context.getConfigParams().getSnapshotChunkSize(); - } - return offset; - } - - public int incrementChunkIndex() { - if (replyStatus) { - // if prev chunk failed, we would want to sent the same chunk again - chunkIndex = chunkIndex + 1; - } - return chunkIndex; - } - - public int getChunkIndex() { - return chunkIndex; - } - - public int getTotalChunks() { - return totalChunks; - } - - public boolean canSendNextChunk() { - // we only send a false if a chunk is sent but we have not received a reply yet - return replyReceivedForOffset == offset; - } - - public boolean isLastChunk(int chunkIndex) { - return totalChunks == chunkIndex; - } - - public void markSendStatus(boolean success) { - if (success) { - // if the chunk sent was successful - replyReceivedForOffset = offset; - replyStatus = true; - } else { - // if the chunk sent was failure - replyReceivedForOffset = offset; - replyStatus = false; - } - } - - public ByteString getNextChunk() { - int snapshotLength = getSnapshotBytes().size(); - int start = incrementOffset(); - int size = context.getConfigParams().getSnapshotChunkSize(); - if (context.getConfigParams().getSnapshotChunkSize() > snapshotLength) { - size = snapshotLength; - } else { - if ((start + context.getConfigParams().getSnapshotChunkSize()) > snapshotLength) { - size = snapshotLength - start; - } - } - - if(LOG.isDebugEnabled()) { - LOG.debug("length={}, offset={},size={}", - snapshotLength, start, size); - } - return getSnapshotBytes().substring(start, start + size); - + protected void stopIsolatedLeaderCheckSchedule() { + if (isolatedLeaderCheckSchedule != null && !isolatedLeaderCheckSchedule.isCancelled()) { + isolatedLeaderCheckSchedule.cancel(); } } - // called from example-actor for printing the follower-states - public String printFollowerStates() { - StringBuilder sb = new StringBuilder(); - for(FollowerLogInformation followerLogInformation : followerToLog.values()) { - boolean isFollowerActive = followerLogInformation.isFollowerActive(); - sb.append("{"+followerLogInformation.getId() + " state:" + isFollowerActive + "},"); + protected void scheduleIsolatedLeaderCheck(FiniteDuration isolatedCheckInterval) { + isolatedLeaderCheckSchedule = context.getActorSystem().scheduler().schedule(isolatedCheckInterval, isolatedCheckInterval, + context.getActor(), new IsolatedLeaderCheck(), + context.getActorSystem().dispatcher(), context.getActor()); + } - } - return "[" + sb.toString() + "]"; + @Override public void close() throws Exception { + stopInstallSnapshotSchedule(); + stopIsolatedLeaderCheckSchedule(); + super.close(); } @VisibleForTesting diff --git a/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeaderTest.java b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeaderTest.java new file mode 100644 index 0000000000..708068a789 --- /dev/null +++ b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/IsolatedLeaderTest.java @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved. + * + * This program and the accompanying materials are made available under the + * terms of the Eclipse Public License v1.0 which accompanies this distribution, + * and is available at http://www.eclipse.org/legal/epl-v10.html + */ +package org.opendaylight.controller.cluster.raft.behaviors; + +import akka.actor.ActorRef; +import akka.actor.Props; +import akka.testkit.JavaTestKit; +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; +import org.opendaylight.controller.cluster.raft.MockRaftActorContext; +import org.opendaylight.controller.cluster.raft.RaftActorContext; +import org.opendaylight.controller.cluster.raft.RaftState; +import org.opendaylight.controller.cluster.raft.messages.AppendEntriesReply; +import org.opendaylight.controller.cluster.raft.utils.DoNothingActor; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class IsolatedLeaderTest extends AbstractRaftActorBehaviorTest { + + private ActorRef leaderActor = + getSystem().actorOf(Props.create(DoNothingActor.class)); + + private ActorRef senderActor = + getSystem().actorOf(Props.create(DoNothingActor.class)); + + @Override + protected RaftActorBehavior createBehavior( + RaftActorContext actorContext) { + return new Leader(actorContext); + } + + @Override + protected RaftActorContext createActorContext() { + return createActorContext(leaderActor); + } + + + @Test + public void testHandleMessageWithThreeMembers() { + new JavaTestKit(getSystem()) {{ + String followerAddress1 = "akka://test/user/$a"; + String followerAddress2 = "akka://test/user/$b"; + + MockRaftActorContext leaderActorContext = (MockRaftActorContext) createActorContext(); + Map peerAddresses = new HashMap<>(); + peerAddresses.put("follower-1", followerAddress1); + peerAddresses.put("follower-2", followerAddress2); + leaderActorContext.setPeerAddresses(peerAddresses); + + IsolatedLeader isolatedLeader = new IsolatedLeader(leaderActorContext); + assertTrue(isolatedLeader.state() == RaftState.IsolatedLeader); + + // in a 3 node cluster, even if 1 follower is returns a reply, the isolatedLeader is not isolated + RaftActorBehavior behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-1", isolatedLeader.lastTerm() - 1, true, + isolatedLeader.lastIndex() - 1, isolatedLeader.lastTerm() - 1)); + + assertEquals(RaftState.Leader, behavior.state()); + + behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-2", isolatedLeader.lastTerm() - 1, true, + isolatedLeader.lastIndex() -1, isolatedLeader.lastTerm() -1 )); + + assertEquals(RaftState.Leader, behavior.state()); + }}; + } + + @Test + public void testHandleMessageWithFiveMembers() { + new JavaTestKit(getSystem()) {{ + + String followerAddress1 = "akka://test/user/$a"; + String followerAddress2 = "akka://test/user/$b"; + String followerAddress3 = "akka://test/user/$c"; + String followerAddress4 = "akka://test/user/$d"; + + MockRaftActorContext leaderActorContext = (MockRaftActorContext) createActorContext(); + Map peerAddresses = new HashMap<>(); + peerAddresses.put("follower-1", followerAddress1); + peerAddresses.put("follower-2", followerAddress2); + peerAddresses.put("follower-3", followerAddress3); + peerAddresses.put("follower-4", followerAddress4); + leaderActorContext.setPeerAddresses(peerAddresses); + + IsolatedLeader isolatedLeader = new IsolatedLeader(leaderActorContext); + assertEquals(RaftState.IsolatedLeader, isolatedLeader.state()); + + // in a 5 member cluster, atleast 2 followers need to be active and return a reply + RaftActorBehavior behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-1", isolatedLeader.lastTerm() - 1, true, + isolatedLeader.lastIndex() -1, isolatedLeader.lastTerm() -1 )); + + assertEquals(RaftState.IsolatedLeader, behavior.state()); + + behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-2", isolatedLeader.lastTerm() - 1, true, + isolatedLeader.lastIndex() -1, isolatedLeader.lastTerm() -1 )); + + assertEquals(RaftState.Leader, behavior.state()); + + behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-3", isolatedLeader.lastTerm() - 1, true, + isolatedLeader.lastIndex() -1, isolatedLeader.lastTerm() -1 )); + + assertEquals(RaftState.Leader, behavior.state()); + }}; + } + + @Test + public void testHandleMessageFromAnotherLeader() { + new JavaTestKit(getSystem()) {{ + String followerAddress1 = "akka://test/user/$a"; + String followerAddress2 = "akka://test/user/$b"; + + MockRaftActorContext leaderActorContext = (MockRaftActorContext) createActorContext(); + Map peerAddresses = new HashMap<>(); + peerAddresses.put("follower-1", followerAddress1); + peerAddresses.put("follower-2", followerAddress2); + leaderActorContext.setPeerAddresses(peerAddresses); + + IsolatedLeader isolatedLeader = new IsolatedLeader(leaderActorContext); + assertTrue(isolatedLeader.state() == RaftState.IsolatedLeader); + + // if an append-entries reply is received by the isolated-leader, and that reply + // has a term > than its own term, then IsolatedLeader switches to Follower + // bowing itself to another leader in the cluster + RaftActorBehavior behavior = isolatedLeader.handleMessage(senderActor, + new AppendEntriesReply("follower-1", isolatedLeader.lastTerm() + 1, true, + isolatedLeader.lastIndex() + 1, isolatedLeader.lastTerm() + 1)); + + assertEquals(RaftState.Follower, behavior.state()); + }}; + + } +} diff --git a/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java index 168eb3e5f2..6b534deb1f 100644 --- a/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java +++ b/opendaylight/md-sal/sal-akka-raft/src/test/java/org/opendaylight/controller/cluster/raft/behaviors/LeaderTest.java @@ -1,10 +1,20 @@ package org.opendaylight.controller.cluster.raft.behaviors; import akka.actor.ActorRef; +import akka.actor.PoisonPill; import akka.actor.Props; +import akka.actor.Terminated; import akka.testkit.JavaTestKit; import com.google.common.base.Optional; +import com.google.common.util.concurrent.Uninterruptibles; import com.google.protobuf.ByteString; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectOutputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; import org.junit.Assert; import org.junit.Test; import org.opendaylight.controller.cluster.raft.DefaultConfigParamsImpl; @@ -18,6 +28,7 @@ import org.opendaylight.controller.cluster.raft.base.messages.ApplyLogEntries; import org.opendaylight.controller.cluster.raft.base.messages.ApplyState; import org.opendaylight.controller.cluster.raft.base.messages.CaptureSnapshot; import org.opendaylight.controller.cluster.raft.base.messages.InitiateInstallSnapshot; +import org.opendaylight.controller.cluster.raft.base.messages.IsolatedLeaderCheck; import org.opendaylight.controller.cluster.raft.base.messages.Replicate; import org.opendaylight.controller.cluster.raft.base.messages.SendHeartBeat; import org.opendaylight.controller.cluster.raft.base.messages.SendInstallSnapshot; @@ -31,15 +42,6 @@ import org.opendaylight.controller.cluster.raft.utils.MessageCollectorActor; import org.opendaylight.controller.protobuff.messages.cluster.raft.AppendEntriesMessages; import org.opendaylight.controller.protobuff.messages.cluster.raft.InstallSnapshotMessages; import scala.concurrent.duration.FiniteDuration; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectOutputStream; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -941,10 +943,82 @@ public class LeaderTest extends AbstractRaftActorBehaviorTest { raftActorBehavior = leader.handleRequestVoteReply(getRef(), new RequestVoteReply(1, false)); assertEquals(RaftState.Leader, raftActorBehavior.state()); + }}; + } + @Test + public void testIsolatedLeaderCheckNoFollowers() { + new JavaTestKit(getSystem()) {{ + ActorRef leaderActor = getTestActor(); - }}; + MockRaftActorContext leaderActorContext = + new MockRaftActorContext("leader", getSystem(), leaderActor); + + Map peerAddresses = new HashMap<>(); + leaderActorContext.setPeerAddresses(peerAddresses); + Leader leader = new Leader(leaderActorContext); + RaftActorBehavior behavior = leader.handleMessage(leaderActor, new IsolatedLeaderCheck()); + Assert.assertTrue(behavior instanceof Leader); + }}; + } + + @Test + public void testIsolatedLeaderCheckTwoFollowers() throws Exception { + new JavaTestKit(getSystem()) {{ + + ActorRef followerActor1 = getTestActor(); + ActorRef followerActor2 = getTestActor(); + + MockRaftActorContext leaderActorContext = (MockRaftActorContext) createActorContext(); + + Map peerAddresses = new HashMap<>(); + peerAddresses.put("follower-1", followerActor1.path().toString()); + peerAddresses.put("follower-2", followerActor2.path().toString()); + + leaderActorContext.setPeerAddresses(peerAddresses); + + Leader leader = new Leader(leaderActorContext); + leader.stopIsolatedLeaderCheckSchedule(); + + leader.markFollowerActive("follower-1"); + leader.markFollowerActive("follower-2"); + RaftActorBehavior behavior = leader.handleMessage(leaderActor, new IsolatedLeaderCheck()); + Assert.assertTrue("Behavior not instance of Leader when all followers are active", + behavior instanceof Leader); + + // kill 1 follower and verify if that got killed + final JavaTestKit probe = new JavaTestKit(getSystem()); + probe.watch(followerActor1); + followerActor1.tell(PoisonPill.getInstance(), ActorRef.noSender()); + final Terminated termMsg1 = probe.expectMsgClass(Terminated.class); + assertEquals(termMsg1.getActor(), followerActor1); + + //sleep enough for all the follower stopwatches to lapse + Uninterruptibles.sleepUninterruptibly(leaderActorContext.getConfigParams(). + getElectionTimeOutInterval().toMillis(), TimeUnit.MILLISECONDS); + + leader.markFollowerActive("follower-2"); + behavior = leader.handleMessage(leaderActor, new IsolatedLeaderCheck()); + Assert.assertTrue("Behavior not instance of Leader when majority of followers are active", + behavior instanceof Leader); + + // kill 2nd follower and leader should change to Isolated leader + followerActor2.tell(PoisonPill.getInstance(), null); + probe.watch(followerActor2); + followerActor2.tell(PoisonPill.getInstance(), ActorRef.noSender()); + final Terminated termMsg2 = probe.expectMsgClass(Terminated.class); + assertEquals(termMsg2.getActor(), followerActor2); + + //sleep enough for the remaining the follower stopwatches to lapse + Uninterruptibles.sleepUninterruptibly(leaderActorContext.getConfigParams(). + getElectionTimeOutInterval().toMillis(), TimeUnit.MILLISECONDS); + + behavior = leader.handleMessage(leaderActor, new IsolatedLeaderCheck()); + Assert.assertTrue("Behavior not instance of IsolatedLeader when majority followers are inactive", + behavior instanceof IsolatedLeader); + + }}; } class MockLeader extends Leader { diff --git a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/serialization/NormalizedNodeSerializer.java b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/serialization/NormalizedNodeSerializer.java index fae7eb1a33..c7bf7d1f7a 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/serialization/NormalizedNodeSerializer.java +++ b/opendaylight/md-sal/sal-clustering-commons/src/main/java/org/opendaylight/controller/cluster/datastore/node/utils/serialization/NormalizedNodeSerializer.java @@ -179,7 +179,7 @@ public class NormalizedNodeSerializer { ValueSerializer.serialize(builder, this, value); } else if (value instanceof Iterable) { - Iterable iterable = (Iterable) value; + Iterable iterable = (Iterable) value; for (Object o : iterable) { if (o instanceof NormalizedNode) { diff --git a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java index 2574bd681e..cac58587a5 100644 --- a/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java +++ b/opendaylight/md-sal/sal-clustering-commons/src/test/java/org/opendaylight/controller/xml/codec/XmlUtilsTest.java @@ -92,7 +92,7 @@ public class XmlUtilsTest { YangInstanceIdentifier instance = (YangInstanceIdentifier) secondNode.getValue(); Iterable iterable = instance.getPathArguments(); - Iterator it = iterable.iterator(); + Iterator it = iterable.iterator(); YangInstanceIdentifier.NodeIdentifier firstPath = (YangInstanceIdentifier.NodeIdentifier) it.next(); Assert.assertEquals("node", firstPath.getNodeType().getLocalName()); YangInstanceIdentifier.NodeIdentifierWithPredicates secondPath = (YangInstanceIdentifier.NodeIdentifierWithPredicates)it.next(); diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DatastoreContext.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DatastoreContext.java index 2048bde613..e18c00ec4b 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DatastoreContext.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/DatastoreContext.java @@ -119,6 +119,7 @@ public class DatastoreContext { private Timeout shardLeaderElectionTimeout = new Timeout(30, TimeUnit.SECONDS); private boolean persistent = true; private ConfigurationReader configurationReader = new FileConfigurationReader(); + private int shardIsolatedLeaderCheckIntervalInMillis = shardHeartbeatIntervalInMillis * 10; public Builder shardTransactionIdleTimeout(Duration shardTransactionIdleTimeout) { this.shardTransactionIdleTimeout = shardTransactionIdleTimeout; @@ -180,18 +181,24 @@ public class DatastoreContext { return this; } - public Builder persistent(boolean persistent){ this.persistent = persistent; return this; } + public Builder shardIsolatedLeaderCheckIntervalInMillis(int shardIsolatedLeaderCheckIntervalInMillis) { + this.shardIsolatedLeaderCheckIntervalInMillis = shardIsolatedLeaderCheckIntervalInMillis; + return this; + } + public DatastoreContext build() { DefaultConfigParamsImpl raftConfig = new DefaultConfigParamsImpl(); raftConfig.setHeartBeatInterval(new FiniteDuration(shardHeartbeatIntervalInMillis, TimeUnit.MILLISECONDS)); raftConfig.setJournalRecoveryLogBatchSize(shardJournalRecoveryLogBatchSize); raftConfig.setSnapshotBatchCount(shardSnapshotBatchCount); + raftConfig.setIsolatedLeaderCheckInterval( + new FiniteDuration(shardIsolatedLeaderCheckIntervalInMillis, TimeUnit.MILLISECONDS)); return new DatastoreContext(dataStoreProperties, raftConfig, dataStoreMXBeanType, operationTimeoutInSeconds, shardTransactionIdleTimeout, diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedConfigDataStoreProviderModule.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedConfigDataStoreProviderModule.java index 2f3fbdcef1..8eb653a44f 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedConfigDataStoreProviderModule.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedConfigDataStoreProviderModule.java @@ -63,6 +63,8 @@ public class DistributedConfigDataStoreProviderModule extends .shardTransactionCommitQueueCapacity( props.getShardTransactionCommitQueueCapacity().getValue().intValue()) .persistent(props.getPersistent().booleanValue()) + .shardIsolatedLeaderCheckIntervalInMillis( + props.getShardIsolatedLeaderCheckIntervalInMillis().getValue()) .build(); return DistributedDataStoreFactory.createInstance("config", getConfigSchemaServiceDependency(), diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedOperationalDataStoreProviderModule.java b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedOperationalDataStoreProviderModule.java index ecb3a91017..2a12aff4ef 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedOperationalDataStoreProviderModule.java +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/config/yang/config/distributed_datastore_provider/DistributedOperationalDataStoreProviderModule.java @@ -63,6 +63,8 @@ public class DistributedOperationalDataStoreProviderModule extends .shardTransactionCommitQueueCapacity( props.getShardTransactionCommitQueueCapacity().getValue().intValue()) .persistent(props.getPersistent().booleanValue()) + .shardIsolatedLeaderCheckIntervalInMillis( + props.getShardIsolatedLeaderCheckIntervalInMillis().getValue()) .build(); return DistributedDataStoreFactory.createInstance("operational", diff --git a/opendaylight/md-sal/sal-distributed-datastore/src/main/yang/distributed-datastore-provider.yang b/opendaylight/md-sal/sal-distributed-datastore/src/main/yang/distributed-datastore-provider.yang index 995e98f38f..4d3d438b32 100644 --- a/opendaylight/md-sal/sal-distributed-datastore/src/main/yang/distributed-datastore-provider.yang +++ b/opendaylight/md-sal/sal-distributed-datastore/src/main/yang/distributed-datastore-provider.yang @@ -153,6 +153,13 @@ module distributed-datastore-provider { type boolean; description "Enable or disable data persistence"; } + + leaf shard-isolated-leader-check-interval-in-millis { + default 5000; + type heartbeat-interval-type; + description "The interval at which the leader of the shard will check if its majority + followers are active and term itself as isolated"; + } } // Augments the 'configuration' choice node under modules/module. diff --git a/opendaylight/md-sal/sal-dom-broker/pom.xml b/opendaylight/md-sal/sal-dom-broker/pom.xml index 264709f687..60581f99cb 100644 --- a/opendaylight/md-sal/sal-dom-broker/pom.xml +++ b/opendaylight/md-sal/sal-dom-broker/pom.xml @@ -10,10 +10,6 @@ bundle - - com.github.romix - java-concurrent-hash-trie-map - com.google.guava guava @@ -103,7 +99,6 @@ org.opendaylight.yangtools.yang.util, org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.dom.impl.rev131028.* * - java-concurrent-hash-trie-map;inline=true diff --git a/opendaylight/md-sal/sal-dom-xsql/src/main/java/org/opendaylight/controller/md/sal/dom/xsql/jdbc/JDBCServer.java b/opendaylight/md-sal/sal-dom-xsql/src/main/java/org/opendaylight/controller/md/sal/dom/xsql/jdbc/JDBCServer.java index 5be701f82e..5979771d28 100644 --- a/opendaylight/md-sal/sal-dom-xsql/src/main/java/org/opendaylight/controller/md/sal/dom/xsql/jdbc/JDBCServer.java +++ b/opendaylight/md-sal/sal-dom-xsql/src/main/java/org/opendaylight/controller/md/sal/dom/xsql/jdbc/JDBCServer.java @@ -121,7 +121,7 @@ public class JDBCServer extends Thread { while (entry.getValue().next()) { Map rec = entry.getValue().getCurrent(); Map newRec = new HashMap(); - for (Iterator iter = rec.entrySet().iterator(); iter.hasNext();) { + for (Iterator iter = rec.entrySet().iterator(); iter.hasNext();) { Map.Entry e = (Map.Entry) iter.next(); String key = (String) e.getKey(); Object value = e.getValue(); diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java index bca47af5c0..97e294016d 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/config/yang/md/sal/connector/netconf/NetconfConnectorModule.java @@ -9,8 +9,9 @@ package org.opendaylight.controller.config.yang.md.sal.connector.netconf; import static org.opendaylight.controller.config.api.JmxAttributeValidationException.checkCondition; import static org.opendaylight.controller.config.api.JmxAttributeValidationException.checkNotNull; - import com.google.common.base.Optional; +import io.netty.util.concurrent.EventExecutor; +import java.math.BigDecimal; import java.net.InetSocketAddress; import java.util.List; import java.util.concurrent.ExecutorService; @@ -120,13 +121,7 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co final NetconfClientDispatcher dispatcher = getClientDispatcherDependency(); listener.initializeRemoteConnection(dispatcher, clientConfig); - return new AutoCloseable() { - @Override - public void close() throws Exception { - listener.close(); - salFacade.close(); - } - }; + return new MyAutoCloseable(listener, salFacade); } private Optional getUserCapabilities() { @@ -155,9 +150,12 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co public NetconfReconnectingClientConfiguration getClientConfig(final NetconfDeviceCommunicator listener) { final InetSocketAddress socketAddress = getSocketAddress(); - final ReconnectStrategy strategy = getReconnectStrategy(); final long clientConnectionTimeoutMillis = getConnectionTimeoutMillis(); + final ReconnectStrategyFactory sf = new MyReconnectStrategyFactory( + getEventExecutorDependency(), getMaxConnectionAttempts(), getBetweenAttemptsTimeoutMillis(), getSleepFactor()); + final ReconnectStrategy strategy = sf.createReconnectStrategy(); + return NetconfReconnectingClientConfigurationBuilder.create() .withAddress(socketAddress) .withConnectionTimeoutMillis(clientConnectionTimeoutMillis) @@ -167,30 +165,54 @@ public final class NetconfConnectorModule extends org.opendaylight.controller.co .withProtocol(getTcpOnly() ? NetconfClientConfiguration.NetconfClientProtocol.TCP : NetconfClientConfiguration.NetconfClientProtocol.SSH) - .withConnectStrategyFactory(new ReconnectStrategyFactory() { - @Override - public ReconnectStrategy createReconnectStrategy() { - return getReconnectStrategy(); - } - }) + .withConnectStrategyFactory(sf) .build(); } - private ReconnectStrategy getReconnectStrategy() { - final Long connectionAttempts; - if (getMaxConnectionAttempts() != null && getMaxConnectionAttempts() > 0) { - connectionAttempts = getMaxConnectionAttempts(); - } else { - logger.trace("Setting {} on {} to infinity", maxConnectionAttemptsJmxAttribute, this); - connectionAttempts = null; + private static final class MyAutoCloseable implements AutoCloseable { + private final RemoteDeviceHandler salFacade; + private final NetconfDeviceCommunicator listener; + + public MyAutoCloseable(final NetconfDeviceCommunicator listener, + final RemoteDeviceHandler salFacade) { + this.listener = listener; + this.salFacade = salFacade; } - final double sleepFactor = getSleepFactor().doubleValue(); - final int minSleep = getBetweenAttemptsTimeoutMillis(); - final Long maxSleep = null; - final Long deadline = null; - return new TimedReconnectStrategy(getEventExecutorDependency(), getBetweenAttemptsTimeoutMillis(), - minSleep, sleepFactor, maxSleep, connectionAttempts, deadline); + @Override + public void close() { + listener.close(); + salFacade.close(); + } + } + + private static final class MyReconnectStrategyFactory implements ReconnectStrategyFactory { + private final Long connectionAttempts; + private final EventExecutor executor; + private final double sleepFactor; + private final int minSleep; + + MyReconnectStrategyFactory(final EventExecutor executor, final Long maxConnectionAttempts, final int minSleep, final BigDecimal sleepFactor) { + if (maxConnectionAttempts != null && maxConnectionAttempts > 0) { + connectionAttempts = maxConnectionAttempts; + } else { + logger.trace("Setting {} on {} to infinity", maxConnectionAttemptsJmxAttribute, this); + connectionAttempts = null; + } + + this.sleepFactor = sleepFactor.doubleValue(); + this.executor = executor; + this.minSleep = minSleep; + } + + @Override + public ReconnectStrategy createReconnectStrategy() { + final Long maxSleep = null; + final Long deadline = null; + + return new TimedReconnectStrategy(executor, minSleep, + minSleep, sleepFactor, maxSleep, connectionAttempts, deadline); + } } private InetSocketAddress getSocketAddress() { diff --git a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/mapping/NetconfMessageTransformer.java b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/mapping/NetconfMessageTransformer.java index 02819c15c7..2971865a70 100644 --- a/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/mapping/NetconfMessageTransformer.java +++ b/opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/schema/mapping/NetconfMessageTransformer.java @@ -67,18 +67,18 @@ public class NetconfMessageTransformer implements MessageTransformer schemaForRpc = NetconfMessageTransformUtil.findSchemaForRpc(rpc, schemaContext.get()); if(schemaForRpc.isPresent()) { final DataNodeContainer schemaForGetConfig = NetconfMessageTransformUtil.createSchemaForRpc(schemaForRpc.get()); - w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, schemaForGetConfig, codecProvider); + w3cPayload = XmlDocumentUtils.toDocument(rpcPayload, schemaContext.get(), schemaForGetConfig, codecProvider); } else { w3cPayload = toRpcRequestWithoutSchema(rpcPayload, codecProvider); } diff --git a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestCodec.java b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestCodec.java index 665fafacc8..ceac03e3d9 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestCodec.java +++ b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestCodec.java @@ -88,11 +88,6 @@ public class RestCodec { "Value is not instance of IdentityrefTypeDefinition but is {}. Therefore NULL is used as translation of - {}", input == null ? "null" : input.getClass(), String.valueOf(input)); return null; - } else if (type instanceof LeafrefTypeDefinition) { - if (input instanceof IdentityValuesDTO) { - return LEAFREF_DEFAULT_CODEC.deserialize(((IdentityValuesDTO) input).getOriginValue()); - } - return LEAFREF_DEFAULT_CODEC.deserialize(input); } else if (type instanceof InstanceIdentifierTypeDefinition) { if (input instanceof IdentityValuesDTO) { return instanceIdentifier.deserialize(input); @@ -232,7 +227,7 @@ public class RestCodec { IdentityValue valueWithNamespace = data.getValuesWithNamespaces().get(0); Module module = getModuleByNamespace(valueWithNamespace.getNamespace(), mountPoint); if (module == null) { - logger.info("Module by namespace '{}' of first node in instance-identiefier was not found.", + logger.info("Module by namespace '{}' of first node in instance-identifier was not found.", valueWithNamespace.getNamespace()); logger.info("Instance-identifier will be translated as NULL for data - {}", String.valueOf(valueWithNamespace.getValue())); diff --git a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java index cd860efab7..ded398a33d 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java +++ b/opendaylight/md-sal/sal-rest-connector/src/main/java/org/opendaylight/controller/sal/restconf/impl/RestconfImpl.java @@ -84,7 +84,9 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext; import org.opendaylight.yangtools.yang.model.api.SchemaPath; import org.opendaylight.yangtools.yang.model.api.TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.IdentityrefTypeDefinition; +import org.opendaylight.yangtools.yang.model.api.type.LeafrefTypeDefinition; import org.opendaylight.yangtools.yang.model.util.EmptyType; +import org.opendaylight.yangtools.yang.model.util.SchemaContextUtil; import org.opendaylight.yangtools.yang.parser.builder.impl.ContainerSchemaNodeBuilder; import org.opendaylight.yangtools.yang.parser.builder.impl.LeafSchemaNodeBuilder; import org.slf4j.Logger; @@ -1241,7 +1243,9 @@ public class RestconfImpl implements RestconfService { try { this.normalizeNode(nodeWrap, schema, null, mountPoint); } catch (IllegalArgumentException e) { - throw new RestconfDocumentedException(e.getMessage(), ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE); + RestconfDocumentedException restconfDocumentedException = new RestconfDocumentedException(e.getMessage(), ErrorType.PROTOCOL, ErrorTag.INVALID_VALUE); + restconfDocumentedException.addSuppressed(e); + throw restconfDocumentedException; } if (nodeWrap instanceof CompositeNodeWrapper) { return ((CompositeNodeWrapper) nodeWrap).unwrap(); @@ -1319,11 +1323,14 @@ public class RestconfImpl implements RestconfService { final Object value = simpleNode.getValue(); Object inputValue = value; TypeDefinition typeDefinition = this.typeDefinition(schema); - if ((typeDefinition instanceof IdentityrefTypeDefinition)) { - if ((value instanceof String)) { - inputValue = new IdentityValuesDTO(simpleNode.getNamespace().toString(), (String) value, null, - (String) value); - } // else value is already instance of IdentityValuesDTO + + // For leafrefs, extract the type it is pointing to + if(typeDefinition instanceof LeafrefTypeDefinition) { + typeDefinition = SchemaContextUtil.getBaseTypeForLeafRef(((LeafrefTypeDefinition) typeDefinition), mountPoint == null ? this.controllerContext.getGlobalSchema() : mountPoint.getSchemaContext(), schema); + } + + if (typeDefinition instanceof IdentityrefTypeDefinition) { + inputValue = parseToIdentityValuesDTO(simpleNode, value, inputValue); } Object outputValue = inputValue; @@ -1336,6 +1343,14 @@ public class RestconfImpl implements RestconfService { simpleNode.setValue(outputValue); } + private Object parseToIdentityValuesDTO(final SimpleNodeWrapper simpleNode, final Object value, Object inputValue) { + if ((value instanceof String)) { + inputValue = new IdentityValuesDTO(simpleNode.getNamespace().toString(), (String) value, null, + (String) value); + } // else value is already instance of IdentityValuesDTO + return inputValue; + } + private void normalizeCompositeNode(final CompositeNodeWrapper compositeNodeBuilder, final DataNodeContainer schema, final DOMMountPoint mountPoint, final QName currentAugment) { final List> children = compositeNodeBuilder.getValues(); diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/cnsn/to/json/test/CnSnToJsonLeafrefType.java b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/cnsn/to/json/test/CnSnToJsonLeafrefType.java index b5d3528e95..fa79fb7677 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/cnsn/to/json/test/CnSnToJsonLeafrefType.java +++ b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/cnsn/to/json/test/CnSnToJsonLeafrefType.java @@ -63,7 +63,7 @@ public class CnSnToJsonLeafrefType extends YangAndXmlAndDataSchemaLoader { @Test public void leafrefToNotLeafTest() { String json = toJson("/cnsn-to-json/leafref/xml/data_ref_to_not_leaf.xml"); - validateJson(".*\"cont-augment-module\\p{Blank}*:\\p{Blank}*lf6\":\\p{Blank}*\"44.33\".*", json); + validateJson(".*\"cont-augment-module\\p{Blank}*:\\p{Blank}*lf6\":\\p{Blank}*\"44\".*", json); } /** diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/json/to/cnsn/test/JsonLeafrefToCnSnTest.java b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/json/to/cnsn/test/JsonLeafrefToCnSnTest.java index 59696bc534..bdd74e8f96 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/json/to/cnsn/test/JsonLeafrefToCnSnTest.java +++ b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/json/to/cnsn/test/JsonLeafrefToCnSnTest.java @@ -51,9 +51,7 @@ public class JsonLeafrefToCnSnTest extends YangAndXmlAndDataSchemaLoader { } assertNotNull(lf2); - assertTrue(lf2.getValue() instanceof String); - assertEquals("121", lf2.getValue()); - + assertEquals(121, lf2.getValue()); } } diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/XmlAndJsonToCnSnLeafRefTest.java b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/XmlAndJsonToCnSnLeafRefTest.java index 1c8e53e69f..7b216ef1ba 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/XmlAndJsonToCnSnLeafRefTest.java +++ b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/test/XmlAndJsonToCnSnLeafRefTest.java @@ -12,17 +12,27 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.net.URISyntaxException; + import javax.ws.rs.WebApplicationException; + import org.junit.BeforeClass; import org.junit.Test; import org.opendaylight.controller.sal.rest.impl.JsonToCompositeNodeProvider; import org.opendaylight.controller.sal.rest.impl.XmlToCompositeNodeProvider; +import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.data.api.CompositeNode; import org.opendaylight.yangtools.yang.data.api.Node; -import org.opendaylight.yangtools.yang.data.api.SimpleNode; +import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier; public class XmlAndJsonToCnSnLeafRefTest extends YangAndXmlAndDataSchemaLoader { + final QName refContQName = QName.create("referenced:module", "2014-04-17", "cont"); + final QName refLf1QName = QName.create(refContQName, "lf1"); + final QName contQName = QName.create("leafref:module", "2014-04-17", "cont"); + final QName lf1QName = QName.create(contQName, "lf1"); + final QName lf2QName = QName.create(contQName, "lf2"); + final QName lf3QName = QName.create(contQName, "lf3"); + @BeforeClass public static void initialize() { dataLoad("/leafref/yang", 2, "leafref-module", "cont"); @@ -36,7 +46,11 @@ public class XmlAndJsonToCnSnLeafRefTest extends YangAndXmlAndDataSchemaLoader { CompositeNode cnSn = (CompositeNode)node; TestUtils.normalizeCompositeNode(cnSn, modules, schemaNodePath); - verifyContPredicate(cnSn, "/ns:cont/ns:lf1", "/cont/lf1", "/ns:cont/ns:lf1", "../lf1"); + + verifyContPredicate(cnSn, "lf4", YangInstanceIdentifier.builder().node(refContQName).node(refLf1QName).build()); + verifyContPredicate(cnSn, "lf2", YangInstanceIdentifier.builder().node(contQName).node(lf1QName).build()); + verifyContPredicate(cnSn, "lf3", YangInstanceIdentifier.builder().node(contQName).node(lf2QName).build()); + verifyContPredicate(cnSn, "lf5", YangInstanceIdentifier.builder().node(contQName).node(lf3QName).build()); } @Test @@ -47,31 +61,23 @@ public class XmlAndJsonToCnSnLeafRefTest extends YangAndXmlAndDataSchemaLoader { CompositeNode cnSn = (CompositeNode)node; TestUtils.normalizeCompositeNode(cnSn, modules, schemaNodePath); - verifyContPredicate(cnSn, "/leafref-module:cont/leafref-module:lf1", "/leafref-module:cont/leafref-module:lf1", - "/referenced-module:cont/referenced-module:lf1", "/leafref-module:cont/leafref-module:lf1"); + + verifyContPredicate(cnSn, "lf4", YangInstanceIdentifier.builder().node(refContQName).node(refLf1QName).build()); + verifyContPredicate(cnSn, "lf2", YangInstanceIdentifier.builder().node(contQName).node(lf1QName).build()); + verifyContPredicate(cnSn, "lf3", YangInstanceIdentifier.builder().node(contQName).node(lf2QName).build()); + verifyContPredicate(cnSn, "lf5", YangInstanceIdentifier.builder().node(contQName).node(lf3QName).build()); } - private void verifyContPredicate(CompositeNode cnSn, String... values) throws URISyntaxException { - Object lf2Value = null; - Object lf3Value = null; - Object lf4Value = null; - Object lf5Value = null; - - for (Node node : cnSn.getValue()) { - if (node.getNodeType().getLocalName().equals("lf2")) { - lf2Value = ((SimpleNode) node).getValue(); - } else if (node.getNodeType().getLocalName().equals("lf3")) { - lf3Value = ((SimpleNode) node).getValue(); - } else if (node.getNodeType().getLocalName().equals("lf4")) { - lf4Value = ((SimpleNode) node).getValue(); - } else if (node.getNodeType().getLocalName().equals("lf5")) { - lf5Value = ((SimpleNode) node).getValue(); + private void verifyContPredicate(CompositeNode cnSn, String leafName, Object value) throws URISyntaxException { + Object parsed = null; + + for (final Node node : cnSn.getValue()) { + if (node.getNodeType().getLocalName().equals(leafName)) { + parsed = node.getValue(); } } - assertEquals(values[0], lf2Value); - assertEquals(values[1], lf3Value); - assertEquals(values[2], lf4Value); - assertEquals(values[3], lf5Value); + + assertEquals(value, parsed); } } diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/xml/to/cnsn/test/XmlToCnSnTest.java b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/xml/to/cnsn/test/XmlToCnSnTest.java index d0af29e913..64568da769 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/xml/to/cnsn/test/XmlToCnSnTest.java +++ b/opendaylight/md-sal/sal-rest-connector/src/test/java/org/opendaylight/controller/sal/restconf/impl/xml/to/cnsn/test/XmlToCnSnTest.java @@ -54,8 +54,7 @@ public class XmlToCnSnTest extends YangAndXmlAndDataSchemaLoader { } assertNotNull(lf2); - assertTrue(lf2.getValue() instanceof String); - assertEquals("121", lf2.getValue()); + assertEquals(121, lf2.getValue()); } @Test diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/cont-augment-module.yang b/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/cont-augment-module.yang index afc23b7946..27b2dae243 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/cont-augment-module.yang +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/cont-augment-module.yang @@ -1,42 +1,42 @@ module cont-augment-module { - namespace "cont:augment:module"; + namespace "cont:augment:module"; prefix "cntaugmod"; - + import main-module {prefix mamo; revision-date 2013-12-2;} - + revision 2013-12-2 { - + } - + augment "/mamo:cont" { leaf-list lflst1 { type leafref { - path "../lf1"; + path "../mamo:lf1"; } - } - + } + leaf lf4 { type leafref { - path "../lf1"; + path "../mamo:lf1"; } } - + /* reference to not leaf element */ leaf lf6 { type leafref { path "../lflst1"; } } - + leaf lf7 { type leafref { path "../lf4"; } } } - - - + + + } \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/xml/data_ref_to_not_leaf.xml b/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/xml/data_ref_to_not_leaf.xml index 10632a44af..b72d438c28 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/xml/data_ref_to_not_leaf.xml +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/cnsn-to-json/leafref/xml/data_ref_to_not_leaf.xml @@ -1,3 +1,3 @@ - 44.33 + 44 \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/json/jsondata.json b/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/json/jsondata.json index cbe455b33b..f4a435e3bf 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/json/jsondata.json +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/json/jsondata.json @@ -2,7 +2,7 @@ "leafref-module:cont" : { "lf4" : "/referenced-module:cont/referenced-module:lf1", "lf2" : "/leafref-module:cont/leafref-module:lf1", - "lf3" : "/leafref-module:cont/leafref-module:lf1", - "lf5" : "/leafref-module:cont/leafref-module:lf1" + "lf3" : "/leafref-module:cont/leafref-module:lf2", + "lf5" : "/leafref-module:cont/leafref-module:lf3" } } \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/xml/xmldata.xml b/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/xml/xmldata.xml index 01bf092d27..1b5ce835fc 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/xml/xmldata.xml +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/leafref/xml/xmldata.xml @@ -1,6 +1,6 @@ - - /ns:cont/ns:lf1 - /ns:cont/ns:lf1 - /cont/lf1 - ../lf1 + + /nsa:cont/nsa:lf1 + /nsa:cont/nsa:lf1 + /ns:cont/ns:lf2 + /nsa:cont/nsa:lf3 diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/leafref-module b/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/leafref-module index 8ca9f09096..6fe770b40b 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/leafref-module +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/leafref-module @@ -1,19 +1,61 @@ module leafref-module { - namespace "leafref:module"; + namespace "leafref:module"; prefix "lfrfmo"; - revision 2013-11-18 { + revision 2013-11-18 { } + identity base {} + container cont { leaf lf1 { type int32; } leaf lf2 { type leafref { - path "/cont/lf1"; + path "/cont/lf1"; + } + } + + leaf lf-ident { + type identityref { + base "lfrfmo:base"; } } + + leaf lf-ident-ref { + type leafref { + path "/cont/lf-ident"; + } + } + + leaf lf-ident-ref-relative { + type leafref { + path "../lf-ident"; + } + } + + leaf lf-ident-ref-relative-cnd { + type leafref { + path "/lfrfmo:cont/lfrfmo:lis[lfrfmo:id='abc']/lf-ident-ref"; + } + } + + + list lis { + key "id"; + + leaf id { + type string; + } + + leaf lf-ident-ref { + type leafref { + path "/cont/lf-ident"; + } + } + } + } - + } \ No newline at end of file diff --git a/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/xml/data.xml b/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/xml/data.xml index 06200a69b5..c3071e5610 100644 --- a/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/xml/data.xml +++ b/opendaylight/md-sal/sal-rest-connector/src/test/resources/xml-to-cnsn/leafref/xml/data.xml @@ -1,4 +1,8 @@ 121 121 + a:base + a:base + a:base + a:base \ No newline at end of file diff --git a/opendaylight/md-sal/topology-manager/src/main/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporter.java b/opendaylight/md-sal/topology-manager/src/main/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporter.java index 9b36f9f497..bba4b4c2b6 100644 --- a/opendaylight/md-sal/topology-manager/src/main/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporter.java +++ b/opendaylight/md-sal/topology-manager/src/main/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporter.java @@ -169,7 +169,7 @@ class FlowCapableTopologyExporter implements FlowTopologyDiscoveryListener, Open public void applyOperation(final ReadWriteTransaction transaction) { final Link link = toTopologyLink(notification); final InstanceIdentifier path = linkPath(link); - transaction.put(LogicalDatastoreType.OPERATIONAL, path, link, true); + transaction.merge(LogicalDatastoreType.OPERATIONAL, path, link, true); } @Override diff --git a/opendaylight/md-sal/topology-manager/src/test/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporterTest.java b/opendaylight/md-sal/topology-manager/src/test/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporterTest.java index 7f8d021b3b..c3357b7c6c 100644 --- a/opendaylight/md-sal/topology-manager/src/test/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporterTest.java +++ b/opendaylight/md-sal/topology-manager/src/test/java/org/opendaylight/md/controller/topology/manager/FlowCapableTopologyExporterTest.java @@ -538,7 +538,7 @@ public class FlowCapableTopologyExporterTest { waitForSubmit(submitLatch); ArgumentCaptor mergedNode = ArgumentCaptor.forClass(Link.class); - verify(mockTx).put(eq(LogicalDatastoreType.OPERATIONAL), eq(topologyIID.child( + verify(mockTx).merge(eq(LogicalDatastoreType.OPERATIONAL), eq(topologyIID.child( Link.class, new LinkKey(new LinkId(sourceNodeConnKey.getId())))), mergedNode.capture(), eq(true)); assertEquals("Source node ID", "sourceNode", diff --git a/opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/Activator.java b/opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/Activator.java index d544d1fb33..faaa17d528 100644 --- a/opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/Activator.java +++ b/opendaylight/netconf/config-netconf-connector/src/main/java/org/opendaylight/controller/netconf/confignetconfconnector/osgi/Activator.java @@ -28,7 +28,7 @@ public class Activator implements BundleActivator { private static final Logger LOG = LoggerFactory.getLogger(Activator.class); private BundleContext context; - private ServiceRegistration osgiRegistration; + private ServiceRegistration osgiRegistration; private ConfigRegistryLookupThread configRegistryLookup = null; @Override diff --git a/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiator.java b/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiator.java index a48cbbe241..f7bb281b91 100644 --- a/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiator.java +++ b/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiator.java @@ -10,9 +10,9 @@ package org.opendaylight.controller.netconf.impl; import com.google.common.base.Optional; import io.netty.channel.Channel; +import io.netty.channel.local.LocalAddress; import io.netty.util.Timer; import io.netty.util.concurrent.Promise; -import java.net.InetSocketAddress; import org.opendaylight.controller.netconf.api.NetconfDocumentedException; import org.opendaylight.controller.netconf.api.NetconfServerSessionPreferences; import org.opendaylight.controller.netconf.nettyutil.AbstractNetconfSessionNegotiator; @@ -21,19 +21,31 @@ import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAddi import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class NetconfServerSessionNegotiator extends +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.util.AbstractMap; +import java.util.Map; + +public class NetconfServerSessionNegotiator + extends AbstractNetconfSessionNegotiator { - static final Logger logger = LoggerFactory.getLogger(NetconfServerSessionNegotiator.class); + static final Logger logger = LoggerFactory + .getLogger(NetconfServerSessionNegotiator.class); + private static final String UNKNOWN = "unknown"; - protected NetconfServerSessionNegotiator(NetconfServerSessionPreferences sessionPreferences, - Promise promise, Channel channel, Timer timer, NetconfServerSessionListener sessionListener, + protected NetconfServerSessionNegotiator( + NetconfServerSessionPreferences sessionPreferences, + Promise promise, Channel channel, + Timer timer, NetconfServerSessionListener sessionListener, long connectionTimeoutMillis) { - super(sessionPreferences, promise, channel, timer, sessionListener, connectionTimeoutMillis); + super(sessionPreferences, promise, channel, timer, sessionListener, + connectionTimeoutMillis); } @Override - protected void handleMessage(NetconfHelloMessage netconfMessage) throws NetconfDocumentedException { + protected void handleMessage(NetconfHelloMessage netconfMessage) + throws NetconfDocumentedException { NetconfServerSession session = getSessionForHelloMessage(netconfMessage); replaceHelloMessageInboundHandler(session); // Negotiation successful after all non hello messages were processed @@ -41,21 +53,56 @@ public class NetconfServerSessionNegotiator extends } @Override - protected NetconfServerSession getSession(NetconfServerSessionListener sessionListener, Channel channel, NetconfHelloMessage message) { - Optional additionalHeader = message.getAdditionalHeader(); + protected NetconfServerSession getSession( + NetconfServerSessionListener sessionListener, Channel channel, + NetconfHelloMessage message) { + Optional additionalHeader = message + .getAdditionalHeader(); NetconfHelloMessageAdditionalHeader parsedHeader; if (additionalHeader.isPresent()) { parsedHeader = additionalHeader.get(); } else { - InetSocketAddress inetSocketAddress = (InetSocketAddress) channel.localAddress(); - parsedHeader = new NetconfHelloMessageAdditionalHeader("unknown", inetSocketAddress.getHostString(), Integer.toString(inetSocketAddress.getPort()), - "tcp", "client"); + + parsedHeader = new NetconfHelloMessageAdditionalHeader(UNKNOWN, + getHostName(channel.localAddress()).getValue(), + getHostName(channel.localAddress()).getKey(), "tcp", + "client"); + } - logger.debug("Additional header from hello parsed as {} from {}", parsedHeader, additionalHeader); + logger.debug("Additional header from hello parsed as {} from {}", + parsedHeader, additionalHeader); + + return new NetconfServerSession(sessionListener, channel, + getSessionPreferences().getSessionId(), parsedHeader); + } + + /** + * @param socketAddress + * type of socket address LocalAddress, or + * InetSocketAddress, for others returns unknown + * @return Map two values - port and host of socket address + */ + protected static Map.Entry getHostName( + SocketAddress socketAddress) { + + if (socketAddress instanceof InetSocketAddress) { + + InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress; + + return new AbstractMap.SimpleImmutableEntry<>( + Integer.toString(inetSocketAddress.getPort()), + inetSocketAddress.getHostString()); + + } else if (socketAddress instanceof LocalAddress) { + + return new AbstractMap.SimpleImmutableEntry<>(UNKNOWN, + ((LocalAddress) socketAddress).id()); + + } + return new AbstractMap.SimpleImmutableEntry<>(UNKNOWN, UNKNOWN); - return new NetconfServerSession(sessionListener, channel, getSessionPreferences().getSessionId(), parsedHeader); } } diff --git a/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/SubtreeFilter.java b/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/SubtreeFilter.java index 42a8bae448..8b2ca86010 100644 --- a/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/SubtreeFilter.java +++ b/opendaylight/netconf/netconf-impl/src/main/java/org/opendaylight/controller/netconf/impl/SubtreeFilter.java @@ -178,10 +178,10 @@ public class SubtreeFilter { return false; } - final String unprefixedFilterContent = filter.getTextContent().substring(prefix.length()); - final String unprefixedSrcCOntnet = src.getTextContent().substring(prefix.length()); + final String unprefixedFilterContent = filter.getTextContent().substring(prefixToNamespaceOfFilter.getKey().length() + 1); + final String unprefixedSrcContnet = src.getTextContent().substring(prefixToNamespaceOfSrc.getKey().length() + 1); // Finally compare unprefixed content - return unprefixedFilterContent.equals(unprefixedSrcCOntnet); + return unprefixedFilterContent.equals(unprefixedSrcContnet); } enum MatchingResult { diff --git a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiatorTest.java b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiatorTest.java new file mode 100644 index 0000000000..c16046c642 --- /dev/null +++ b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/NetconfServerSessionNegotiatorTest.java @@ -0,0 +1,45 @@ +package org.opendaylight.controller.netconf.impl; + +import io.netty.channel.local.LocalAddress; +import org.apache.sshd.common.SshdSocketAddress; +import org.junit.Test; + +import java.net.InetSocketAddress; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +public class NetconfServerSessionNegotiatorTest { + + @Test + public void testGetInetSocketAddress() throws Exception { + + InetSocketAddress socketAddress = new InetSocketAddress(10); + + assertNotNull(NetconfServerSessionNegotiator.getHostName(socketAddress)); + + assertEquals(socketAddress.getHostName(), + NetconfServerSessionNegotiator.getHostName(socketAddress) + .getValue()); + + socketAddress = new InetSocketAddress("TestPortInet", 20); + + assertEquals(socketAddress.getHostName(), + NetconfServerSessionNegotiator.getHostName(socketAddress) + .getValue()); + + assertEquals(String.valueOf(socketAddress.getPort()), + NetconfServerSessionNegotiator.getHostName(socketAddress) + .getKey()); + + LocalAddress localAddress = new LocalAddress("TestPortLocal"); + + assertEquals(String.valueOf(localAddress.id()), + NetconfServerSessionNegotiator.getHostName(localAddress) + .getValue()); + + SshdSocketAddress embeddedAddress = new SshdSocketAddress( + "TestSshdName", 10); + + } +} \ No newline at end of file diff --git a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/SubtreeFilterTest.java b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/SubtreeFilterTest.java index 5d9470750e..51dfa4b1a8 100644 --- a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/SubtreeFilterTest.java +++ b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/SubtreeFilterTest.java @@ -36,7 +36,7 @@ public class SubtreeFilterTest { @Parameters public static Collection data() { List result = new ArrayList<>(); - for (int i = 0; i <= 9; i++) { + for (int i = 0; i <= 10; i++) { result.add(new Object[]{i}); } return result; diff --git a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivatorTest.java b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivatorTest.java index 9470e6d09c..fd9295a4b3 100644 --- a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivatorTest.java +++ b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfImplActivatorTest.java @@ -34,9 +34,9 @@ public class NetconfImplActivatorTest { @Mock private Filter filter; @Mock - private ServiceReference reference; + private ServiceReference reference; @Mock - private ServiceRegistration registration; + private ServiceRegistration registration; @Before public void setUp() throws Exception { @@ -44,7 +44,7 @@ public class NetconfImplActivatorTest { doReturn(filter).when(bundle).createFilter(anyString()); doNothing().when(bundle).addServiceListener(any(ServiceListener.class), anyString()); - ServiceReference[] refs = new ServiceReference[0]; + ServiceReference[] refs = new ServiceReference[0]; doReturn(refs).when(bundle).getServiceReferences(anyString(), anyString()); doReturn(Arrays.asList(refs)).when(bundle).getServiceReferences(any(Class.class), anyString()); doReturn("").when(bundle).getProperty(anyString()); diff --git a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryTrackerTest.java b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryTrackerTest.java index 374e8aeb9f..0d7158aa21 100644 --- a/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryTrackerTest.java +++ b/opendaylight/netconf/netconf-impl/src/test/java/org/opendaylight/controller/netconf/impl/osgi/NetconfOperationServiceFactoryTrackerTest.java @@ -32,7 +32,7 @@ public class NetconfOperationServiceFactoryTrackerTest { @Mock private NetconfOperationServiceFactory factory; @Mock - private ServiceReference reference; + private ServiceReference reference; private NetconfOperationServiceFactoryTracker tracker; diff --git a/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/post-filter.xml b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/post-filter.xml new file mode 100644 index 0000000000..3331cb8610 --- /dev/null +++ b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/post-filter.xml @@ -0,0 +1,47 @@ + + + + + + + prefix:sal-netconf-connector + controller-config + 1830 + 20000 + 2000 + 1.5 + admin + + prefix:dom-broker-osgi-registry + dom-broker + + + prefix:netconf-client-dispatcher + global-netconf-dispatcher + + admin +

    127.0.0.1
    + + prefix:threadpool + global-netconf-processing-executor + + false + + prefix:binding-broker-osgi-registry + binding-osgi-broker + + 0 + + prefix:netty-event-executor + global-event-executor + + + + + \ No newline at end of file diff --git a/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/pre-filter.xml b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/pre-filter.xml new file mode 100644 index 0000000000..f2620bbb4d --- /dev/null +++ b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/pre-filter.xml @@ -0,0 +1,52 @@ + + + + + + + prefix:sal-netconf-connector + controller-config + 1830 + 20000 + 2000 + 1.5 + admin + + prefix:dom-broker-osgi-registry + dom-broker + + + prefix:netconf-client-dispatcher + global-netconf-dispatcher + + admin +
    127.0.0.1
    + + prefix:threadpool + global-netconf-processing-executor + + false + + prefix:binding-broker-osgi-registry + binding-osgi-broker + + 0 + + prefix:netty-event-executor + global-event-executor + +
    + + prefix:shutdown + shutdown + + +
    +
    +
    \ No newline at end of file diff --git a/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/request.xml b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/request.xml new file mode 100644 index 0000000000..259b123bdf --- /dev/null +++ b/opendaylight/netconf/netconf-impl/src/test/resources/subtree/10/request.xml @@ -0,0 +1,15 @@ + + + + + + + + + x:sal-netconf-connector + controller-config + + + + + diff --git a/pom.xml b/pom.xml index d1e5494b44..87a69e00c9 100644 --- a/pom.xml +++ b/pom.xml @@ -14,15 +14,6 @@ 3.0 - opendaylight/distribution/opendaylight - - - - - - - - third-party/commons/thirdparty opendaylight/md-sal @@ -65,16 +56,4 @@ HEAD https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - - - - integrationtests - - false - - - opendaylight/distribution/sanitytest/ - - - diff --git a/third-party/commons/thirdparty/pom.xml b/third-party/commons/thirdparty/pom.xml deleted file mode 100644 index ad3c27523e..0000000000 --- a/third-party/commons/thirdparty/pom.xml +++ /dev/null @@ -1,230 +0,0 @@ - - - 4.0.0 - - 3.0 - - org.opendaylight.controller - commons.thirdparty - 1.2.0-SNAPSHOT - pom - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - HEAD - - - - https://sonar.opendaylight.org/ - http://nexus.opendaylight.org/content - opendaylight.release - opendaylight.snapshot - dav:http://nexus.opendaylight.org/content/sites/site - 3.2 - 2.6 - UTF-8 - 2.3.2 - 2.13 - 2.3.2 - 1.3.1 - 2.3.7 - - - - - central2 - central2 - http://repo2.maven.org/maven2 - - - - - - fastreassembly - - - - org.apache.maven.plugins - maven-dependency-plugin - 2.4 - - - copyfastreassembly - install - - copy - - - - - ${project.groupId} - ${project.artifactId} - ${project.version} - ${project.groupId}.${project.artifactId}-${project.version}.jar - - - ${fastreassembly.directory} - - - - - - - - - - - - - com.googlecode.maven-java-formatter-plugin - maven-java-formatter-plugin - 0.3.1 - - - **/* - - - - - - - - org.apache.maven.plugins - maven-release-plugin - ${releaseplugin.version} - - - org.apache.felix - maven-bundle-plugin - ${bundle.plugin.version} - true - - - org.apache.maven.plugins - maven-site-plugin - ${siteplugin} - - - - org.apache.maven.plugins - maven-project-info-reports-plugin - ${projectinfo} - - false - false - - - index - project-team - license - mailing-list - plugin-management - cim - issue-tracking - scm - summary - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 2.10 - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.8.1 - - org.jboss.apiviz.APIviz - - org.jboss.apiviz - apiviz - 1.3.2.GA - - ${project.artifactId}-${build.suffix} - true - UTF-8 - UTF-8 - UTF-8 - true - true - true - true - net.sf.jnetlib.*:cern.*:corejava - - - - org.apache.maven.plugins - maven-jxr-plugin - 2.3 - - true - true - - - - - - - - - - - - central2 - central2 - http://repo2.maven.org/maven2 - - false - - - never - true - - - - central - central - http://repo1.maven.org/maven2 - - false - - - never - true - - - - - thirdparty - thirdparty - ${nexusproxy}/repositories/thirdparty - - false - - - never - true - - - - - - - opendaylight-release - ${nexusproxy}/repositories/${nexus.repository.release}/ - - - - opendaylight-snapshot - ${nexusproxy}/repositories/${nexus.repository.snapshot}/ - - - - website - ${sitedeploy} - - - diff --git a/third-party/jersey-servlet/pom.xml b/third-party/jersey-servlet/pom.xml deleted file mode 100644 index 27d503e898..0000000000 --- a/third-party/jersey-servlet/pom.xml +++ /dev/null @@ -1,90 +0,0 @@ - - - - - org.opendaylight.controller - commons.thirdparty - 1.2.0-SNAPSHOT - ../commons/thirdparty - - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - HEAD - - - 4.0.0 - org.opendaylight.controller.thirdparty - com.sun.jersey.jersey-servlet - 1.19.0-SNAPSHOT - bundle - - - - org.apache.felix - maven-bundle-plugin - 2.3.6 - true - - - *;scope=!provided;type=!pom;inline=false - false - - com.sun.jersey.api.core.servlet, - com.sun.jersey.spi.container.servlet, - com.sun.jersey.spi.scanning.servlet, - com.sun.jersey.server.impl.container.servlet - - - com.sun.jersey.api.container, - com.sun.jersey.api.core, - com.sun.jersey.api.model, - com.sun.jersey.api.representation, - com.sun.jersey.api.uri, - com.sun.jersey.api.view, - com.sun.jersey.core.header, - com.sun.jersey.core.reflection, - com.sun.jersey.core.spi.component, - com.sun.jersey.core.spi.component.ioc, - com.sun.jersey.core.spi.scanning, - com.sun.jersey.core.util, - com.sun.jersey.server.impl, - com.sun.jersey.server.impl.application, - com.sun.jersey.server.impl.inject, - com.sun.jersey.server.impl.monitoring, - com.sun.jersey.server.probes, - com.sun.jersey.server.spi.component, - com.sun.jersey.spi, - com.sun.jersey.spi.container, - com.sun.jersey.spi.dispatch, - com.sun.jersey.spi.inject, - com.sun.jersey.spi.service, - com.sun.jersey.spi.template, - javax.naming, - javax.ws.rs, - javax.ws.rs.core, - javax.ws.rs.ext, - *;resolution:=optional - - - ${project.basedir}/META-INF - - - - - - - - com.sun.jersey - jersey-servlet - 1.17 - - - equinoxSDK381 - javax.servlet - 3.0.0.v201112011016 - provided - - - diff --git a/third-party/net.sf.jung2/pom.xml b/third-party/net.sf.jung2/pom.xml deleted file mode 100644 index 63455dc8be..0000000000 --- a/third-party/net.sf.jung2/pom.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - - - org.opendaylight.controller - commons.thirdparty - 1.2.0-SNAPSHOT - ../commons/thirdparty - - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - HEAD - - - 4.0.0 - org.opendaylight.controller.thirdparty - net.sf.jung2 - 2.1.0-SNAPSHOT - bundle - - - - org.apache.felix - maven-bundle-plugin - 2.3.6 - true - - - *;scope=compile|runtime;type=!pom;inline=false - false - - org.apache.commons*, - edu.uci.ics.jung.algorithms.blockmodel, - edu.uci.ics.jung.algorithms.cluster, - edu.uci.ics.jung.algorithms.filters, - edu.uci.ics.jung.algorithms.flows, - edu.uci.ics.jung.algorithms.generators, - edu.uci.ics.jung.algorithms.generators.random, - edu.uci.ics.jung.algorithms.layout, - edu.uci.ics.jung.algorithms.layout.util, - edu.uci.ics.jung.algorithms.metrics, - edu.uci.ics.jung.algorithms.scoring, - edu.uci.ics.jung.algorithms.scoring.util, - edu.uci.ics.jung.algorithms.shortestpath, - edu.uci.ics.jung.algorithms.transformation, - edu.uci.ics.jung.algorithms.util, - edu.uci.ics.jung.graph;-split-package:=merge-first, - edu.uci.ics.jung.graph.event, - edu.uci.ics.jung.graph.util;-split-package:=merge-first - - - !* - - - ${project.basedir}/META-INF - - - - - - - - net.sf.jung - jung-api - 2.0.1 - - - net.sf.jung - jung-graph-impl - 2.0.1 - - - net.sourceforge.collections - collections-generic - 4.01 - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java deleted file mode 100644 index a9d457345e..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * Created on Jan 28, 2004 - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.blockmodel; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.CollectionUtils; -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Identifies sets of structurally equivalent vertices in a graph. Vertices - * i and j are structurally equivalent iff the set of i's - * neighbors is identical to the set of j's neighbors, with the - * exception of i and j themselves. This algorithm finds all - * sets of equivalent vertices in O(V^2) time. - * - *

    You can extend this class to have a different definition of equivalence (by - * overriding isStructurallyEquivalent), and may give it hints for - * accelerating the process by overriding canPossiblyCompare. - * (For example, in a bipartite graph, canPossiblyCompare may - * return false for vertices in - * different partitions. This function should be fast.) - * - * @author Danyel Fisher - */ -public class StructurallyEquivalent implements Transformer, VertexPartition> -{ - public VertexPartition transform(Graph g) - { - Set> vertex_pairs = getEquivalentPairs(g); - - Set> rv = new HashSet>(); - Map> intermediate = new HashMap>(); - for (Pair p : vertex_pairs) - { - Set res = intermediate.get(p.getFirst()); - if (res == null) - res = intermediate.get(p.getSecond()); - if (res == null) // we haven't seen this one before - res = new HashSet(); - res.add(p.getFirst()); - res.add(p.getSecond()); - intermediate.put(p.getFirst(), res); - intermediate.put(p.getSecond(), res); - } - rv.addAll(intermediate.values()); - - // pick up the vertices which don't appear in intermediate; they are - // singletons (equivalence classes of size 1) - Collection singletons = CollectionUtils.subtract(g.getVertices(), - intermediate.keySet()); - for (V v : singletons) - { - Set v_set = Collections.singleton(v); - intermediate.put(v, v_set); - rv.add(v_set); - } - - return new VertexPartition(g, intermediate, rv); - } - - /** - * For each vertex pair v, v1 in G, checks whether v and v1 are fully - * equivalent: meaning that they connect to the exact same vertices. (Is - * this regular equivalence, or whathaveyou?) - * - * Returns a Set of Pairs of vertices, where all the vertices in the inner - * Pairs are equivalent. - * - * @param g - */ - protected Set> getEquivalentPairs(Graph g) { - - Set> rv = new HashSet>(); - Set alreadyEquivalent = new HashSet(); - - List l = new ArrayList(g.getVertices()); - - for (V v1 : l) - { - if (alreadyEquivalent.contains(v1)) - continue; - - for (Iterator iterator = l.listIterator(l.indexOf(v1) + 1); iterator.hasNext();) { - V v2 = iterator.next(); - - if (alreadyEquivalent.contains(v2)) - continue; - - if (!canPossiblyCompare(v1, v2)) - continue; - - if (isStructurallyEquivalent(g, v1, v2)) { - Pair p = new Pair(v1, v2); - alreadyEquivalent.add(v2); - rv.add(p); - } - } - } - - return rv; - } - - /** - * Checks whether a pair of vertices are structurally equivalent. - * Specifically, whether v1's predecessors are equal to v2's predecessors, - * and same for successors. - * - * @param g the graph in which the structural equivalence comparison is to take place - * @param v1 the vertex to check for structural equivalence to v2 - * @param v2 the vertex to check for structural equivalence to v1 - */ - protected boolean isStructurallyEquivalent(Graph g, V v1, V v2) { - - if( g.degree(v1) != g.degree(v2)) { - return false; - } - - Set n1 = new HashSet(g.getPredecessors(v1)); - n1.remove(v2); - n1.remove(v1); - Set n2 = new HashSet(g.getPredecessors(v2)); - n2.remove(v1); - n2.remove(v2); - - Set o1 = new HashSet(g.getSuccessors(v1)); - Set o2 = new HashSet(g.getSuccessors(v2)); - o1.remove(v1); - o1.remove(v2); - o2.remove(v1); - o2.remove(v2); - - // this neglects self-loops and directed edges from 1 to other - boolean b = (n1.equals(n2) && o1.equals(o2)); - if (!b) - return b; - - // if there's a directed edge v1->v2 then there's a directed edge v2->v1 - b &= ( g.isSuccessor(v1, v2) == g.isSuccessor(v2, v1)); - - // self-loop check - b &= ( g.isSuccessor(v1, v1) == g.isSuccessor(v2, v2)); - - return b; - - } - - /** - * This is a space for optimizations. For example, for a bipartite graph, - * vertices from different partitions cannot possibly be compared. - * - * @param v1 - * @param v2 - */ - protected boolean canPossiblyCompare(V v1, V v2) { - return true; - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java deleted file mode 100644 index b5ec5831ba..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Feb 3, 2004 - */ -package edu.uci.ics.jung.algorithms.blockmodel; - -import java.util.*; - -import edu.uci.ics.jung.graph.Graph; - - -/** - * Maintains information about a vertex partition of a graph. - * This can be built from a map from vertices to vertex sets - * or from a collection of (disjoint) vertex sets, - * such as those created by various clustering methods. - */ -public class VertexPartition -{ - private Map> vertex_partition_map; - private Collection> vertex_sets; - private Graph graph; - - /** - * Creates an instance based on the specified graph and mapping from vertices - * to vertex sets, and generates a set of partitions based on this mapping. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - */ - public VertexPartition(Graph g, Map> partition_map) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.graph = g; - } - - /** - * Creates an instance based on the specified graph, vertex-set mapping, - * and set of disjoint vertex sets. The vertex-set mapping and vertex - * partitions must be consistent; that is, the mapping must reflect the - * division of vertices into partitions, and each vertex must appear in - * exactly one partition. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Map> partition_map, - Collection> vertex_sets) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.vertex_sets = vertex_sets; - this.graph = g; - } - - /** - * Creates an instance based on the specified graph and set of disjoint vertex sets, - * and generates a vertex-to-partition map based on these sets. - * @param g the graph over which the vertex partition is defined - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Collection> vertex_sets) - { - this.vertex_sets = vertex_sets; - this.graph = g; - } - - /** - * Returns the graph on which the partition is defined. - * @return the graph on which the partition is defined - */ - public Graph getGraph() - { - return graph; - } - - /** - * Returns a map from each vertex in the input graph to its partition. - * This map is generated if it does not already exist. - * @return a map from each vertex in the input graph to a vertex set - */ - public Map> getVertexToPartitionMap() - { - if (vertex_partition_map == null) - { - this.vertex_partition_map = new HashMap>(); - for (Set set : this.vertex_sets) - for (V v : set) - this.vertex_partition_map.put(v, set); - } - return vertex_partition_map; - } - - /** - * Returns a collection of vertex sets, where each vertex in the - * input graph is in exactly one set. - * This collection is generated based on the vertex-to-partition map - * if it does not already exist. - * @return a collection of vertex sets such that each vertex in the - * instance's graph is in exactly one set - */ - public Collection> getVertexPartitions() - { - if (vertex_sets == null) - { - this.vertex_sets = new HashSet>(); - this.vertex_sets.addAll(vertex_partition_map.values()); - } - return vertex_sets; - } - - /** - * Returns the number of partitions. - */ - public int numPartitions() - { - return vertex_sets.size(); - } - - @Override - public String toString() - { - return "Partitions: " + vertex_partition_map; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/package.html deleted file mode 100644 index d1cb06acae..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/package.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - -Support for establishing and maintaining graph element equivalence (such as in blockmodeling). -

    -In blockmodeling, groups of vertices are clustered together by similarity -(as if members of a "block" appearing on the diagonal of the graph's adjacency -matrix). -

    -This support currently includes: -

      -
    • VertexPartition: A class that maintains information on a -division of the vertices of a graph into disjoint sets. -
    • StructurallyEquivalent: An algorithm that finds sets of vertices that are -structurally equivalent. -
    - -

    - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java deleted file mode 100644 index aa697c7dbb..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java +++ /dev/null @@ -1,162 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; -import java.util.Stack; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Finds all biconnected components (bicomponents) of an undirected graph. - * A graph is a biconnected component if - * at least 2 vertices must be removed in order to disconnect the graph. (Graphs - * consisting of one vertex, or of two connected vertices, are also biconnected.) Biconnected - * components of three or more vertices have the property that every pair of vertices in the component - * are connected by two or more vertex-disjoint paths. - *

    - * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges - * @see "Depth first search and linear graph algorithms by R. E. Tarjan (1972), SIAM J. Comp." - * - * @author Joshua O'Madadhain - */ -public class BicomponentClusterer implements Transformer, Set>> -{ - protected Map dfs_num; - protected Map high; - protected Map parents; - protected Stack stack; - protected int converse_depth; - - /** - * Constructs a new bicomponent finder - */ - public BicomponentClusterer() { - } - - /** - * Extracts the bicomponents from the graph. - * @param theGraph the graph whose bicomponents are to be extracted - * @return the ClusterSet of bicomponents - */ - public Set> transform(UndirectedGraph theGraph) - { - Set> bicomponents = new LinkedHashSet>(); - - if (theGraph.getVertices().isEmpty()) - return bicomponents; - - // initialize DFS number for each vertex to 0 - dfs_num = new HashMap(); - for (V v : theGraph.getVertices()) - { - dfs_num.put(v, 0); - } - - for (V v : theGraph.getVertices()) - { - if (dfs_num.get(v).intValue() == 0) // if we haven't hit this vertex yet... - { - high = new HashMap(); - stack = new Stack(); - parents = new HashMap(); - converse_depth = theGraph.getVertexCount(); - // find the biconnected components for this subgraph, starting from v - findBiconnectedComponents(theGraph, v, bicomponents); - - // if we only visited one vertex, this method won't have - // ID'd it as a biconnected component, so mark it as one - if (theGraph.getVertexCount() - converse_depth == 1) - { - Set s = new HashSet(); - s.add(v); - bicomponents.add(s); - } - } - } - - return bicomponents; - } - - /** - *

    Stores, in bicomponents, all the biconnected - * components that are reachable from v.

    - * - *

    The algorithm basically proceeds as follows: do a depth-first - * traversal starting from v, marking each vertex with - * a value that indicates the order in which it was encountered (dfs_num), - * and with - * a value that indicates the highest point in the DFS tree that is known - * to be reachable from this vertex using non-DFS edges (high). (Since it - * is measured on non-DFS edges, "high" tells you how far back in the DFS - * tree you can reach by two distinct paths, hence biconnectivity.) - * Each time a new vertex w is encountered, push the edge just traversed - * on a stack, and call this method recursively. If w.high is no greater than - * v.dfs_num, then the contents of the stack down to (v,w) is a - * biconnected component (and v is an articulation point, that is, a - * component boundary). In either case, set v.high to max(v.high, w.high), - * and continue. If w has already been encountered but is - * not v's parent, set v.high max(v.high, w.dfs_num) and continue. - * - *

    (In case anyone cares, the version of this algorithm on p. 224 of - * Udi Manber's "Introduction to Algorithms: A Creative Approach" seems to be - * wrong: the stack should be initialized outside this method, - * (v,w) should only be put on the stack if w hasn't been seen already, - * and there's no real benefit to putting v on the stack separately: just - * check for (v,w) on the stack rather than v. Had I known this, I could - * have saved myself a few days. JRTOM)

    - * - */ - protected void findBiconnectedComponents(UndirectedGraph g, V v, Set> bicomponents) - { - int v_dfs_num = converse_depth; - dfs_num.put(v, v_dfs_num); - converse_depth--; - high.put(v, v_dfs_num); - - for (V w : g.getNeighbors(v)) - { - int w_dfs_num = dfs_num.get(w).intValue();//get(w, dfs_num); - E vw = g.findEdge(v,w); - if (w_dfs_num == 0) // w hasn't yet been visited - { - parents.put(w, v); // v is w's parent in the DFS tree - stack.push(vw); - findBiconnectedComponents(g, w, bicomponents); - int w_high = high.get(w).intValue();//get(w, high); - if (w_high <= v_dfs_num) - { - // v disconnects w from the rest of the graph, - // i.e., v is an articulation point - // thus, everything between the top of the stack and - // v is part of a single biconnected component - Set bicomponent = new HashSet(); - E e; - do - { - e = stack.pop(); - bicomponent.addAll(g.getIncidentVertices(e)); - } - while (e != vw); - bicomponents.add(bicomponent); - } - high.put(v, Math.max(w_high, high.get(v).intValue())); - } - else if (w != parents.get(v)) // (v,w) is a back or a forward edge - high.put(v, Math.max(w_dfs_num, high.get(v).intValue())); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java deleted file mode 100644 index 59e4605e35..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java +++ /dev/null @@ -1,109 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.BetweennessCentrality; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - * An algorithm for computing clusters (community structure) in graphs based on edge betweenness. - * The betweenness of an edge is defined as the extent to which that edge lies along - * shortest paths between all pairs of nodes. - * - * This algorithm works by iteratively following the 2 step process: - *
      - *
    • Compute edge betweenness for all edges in current graph - *
    • Remove edge with highest betweenness - *
    - *

    - * Running time is: O(kmn) where k is the number of edges to remove, m is the total number of edges, and - * n is the total number of vertices. For very sparse graphs the running time is closer to O(kn^2) and for - * graphs with strong community structure, the complexity is even lower. - *

    - * This algorithm is a slight modification of the algorithm discussed below in that the number of edges - * to be removed is parameterized. - * @author Scott White - * @author Tom Nelson (converted to jung2) - * @see "Community structure in social and biological networks by Michelle Girvan and Mark Newman" - */ -public class EdgeBetweennessClusterer implements Transformer,Set>> { - private int mNumEdgesToRemove; - private Map> edges_removed; - - /** - * Constructs a new clusterer for the specified graph. - * @param numEdgesToRemove the number of edges to be progressively removed from the graph - */ - public EdgeBetweennessClusterer(int numEdgesToRemove) { - mNumEdgesToRemove = numEdgesToRemove; - edges_removed = new LinkedHashMap>(); - } - - /** - * Finds the set of clusters which have the strongest "community structure". - * The more edges removed the smaller and more cohesive the clusters. - * @param graph the graph - */ - public Set> transform(Graph graph) { - - if (mNumEdgesToRemove < 0 || mNumEdgesToRemove > graph.getEdgeCount()) { - throw new IllegalArgumentException("Invalid number of edges passed in."); - } - - edges_removed.clear(); - - for (int k=0;k bc = new BetweennessCentrality(graph); - E to_remove = null; - double score = 0; - for (E e : graph.getEdges()) - if (bc.getEdgeScore(e) > score) - { - to_remove = e; - score = bc.getEdgeScore(e); - } - edges_removed.put(to_remove, graph.getEndpoints(to_remove)); - graph.removeEdge(to_remove); - } - - WeakComponentClusterer wcSearch = new WeakComponentClusterer(); - Set> clusterSet = wcSearch.transform(graph); - - for (Map.Entry> entry : edges_removed.entrySet()) - { - Pair endpoints = entry.getValue(); - graph.addEdge(entry.getKey(), endpoints.getFirst(), endpoints.getSecond()); - } - return clusterSet; - } - - /** - * Retrieves the list of all edges that were removed - * (assuming extract(...) was previously called). - * The edges returned - * are stored in order in which they were removed. - * - * @return the edges in the original graph - */ - public List getEdgesRemoved() - { - return new ArrayList(edges_removed.keySet()); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java deleted file mode 100644 index 859c06307c..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java +++ /dev/null @@ -1,366 +0,0 @@ -/* - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 12, 2004 - */ -package edu.uci.ics.jung.algorithms.cluster; - -import edu.uci.ics.jung.algorithms.scoring.VoltageScorer; -import edu.uci.ics.jung.algorithms.util.DiscreteDistribution; -import edu.uci.ics.jung.algorithms.util.KMeansClusterer; -import edu.uci.ics.jung.algorithms.util.KMeansClusterer.NotEnoughClustersException; -import edu.uci.ics.jung.graph.Graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - -/** - *

    Clusters vertices of a Graph based on their ranks as - * calculated by VoltageScorer. This algorithm is based on, - * but not identical with, the method described in the paper below. - * The primary difference is that Wu and Huberman assume a priori that the clusters - * are of approximately the same size, and therefore use a more complex - * method than k-means (which is used here) for determining cluster - * membership based on co-occurrence data.

    - * - *

    The algorithm proceeds as follows: - *

      - *
    • first, generate a set of candidate clusters as follows: - *
        - *
      • pick (widely separated) vertex pair, run VoltageScorer - *
      • group the vertices in two clusters according to their voltages - *
      • store resulting candidate clusters - *
      - *
    • second, generate k-1 clusters as follows: - *
        - *
      • pick a vertex v as a cluster 'seed' - *
        (Wu/Huberman: most frequent vertex in candidate clusters) - *
      • calculate co-occurrence over all candidate clusters of v with each other - * vertex - *
      • separate co-occurrence counts into high/low; - * high vertices constitute a cluster - *
      • remove v's vertices from candidate clusters; continue - *
      - *
    • finally, remaining unassigned vertices are assigned to the kth ("garbage") - * cluster. - *

    - * - *

    NOTE: Depending on how the co-occurrence data splits the data into - * clusters, the number of clusters returned by this algorithm may be less than the - * number of clusters requested. The number of clusters will never be more than - * the number requested, however.

    - * - * @author Joshua O'Madadhain - * @see "'Finding communities in linear time: a physics approach', Fang Wu and Bernardo Huberman, http://www.hpl.hp.com/research/idl/papers/linear/" - * @see VoltageScorer - * @see KMeansClusterer - */ -public class VoltageClusterer -{ - protected int num_candidates; - protected KMeansClusterer kmc; - protected Random rand; - protected Graph g; - - /** - * Creates an instance of a VoltageCluster with the specified parameters. - * These are mostly parameters that are passed directly to VoltageScorer - * and KMeansClusterer. - * - * @param num_candidates the number of candidate clusters to create - */ - public VoltageClusterer(Graph g, int num_candidates) - { - if (num_candidates < 1) - throw new IllegalArgumentException("must generate >=1 candidates"); - - this.num_candidates = num_candidates; - this.kmc = new KMeansClusterer(); - rand = new Random(); - this.g = g; - } - - protected void setRandomSeed(int random_seed) - { - rand = new Random(random_seed); - } - - /** - * Returns a community (cluster) centered around v. - * @param v the vertex whose community we wish to discover - */ - public Collection> getCommunity(V v) - { - return cluster_internal(v, 2); - } - - /** - * Clusters the vertices of g into - * num_clusters clusters, based on their connectivity. - * @param num_clusters the number of clusters to identify - */ - public Collection> cluster(int num_clusters) - { - return cluster_internal(null, num_clusters); - } - - /** - * Does the work of getCommunity and cluster. - * @param origin the vertex around which clustering is to be done - * @param num_clusters the (maximum) number of clusters to find - */ - protected Collection> cluster_internal(V origin, int num_clusters) - { - // generate candidate clusters - // repeat the following 'samples' times: - // * pick (widely separated) vertex pair, run VoltageScorer - // * use k-means to identify 2 communities in ranked graph - // * store resulting candidate communities - ArrayList v_array = new ArrayList(g.getVertices()); - - LinkedList> candidates = new LinkedList>(); - - for (int j = 0; j < num_candidates; j++) - { - V source; - if (origin == null) - source = v_array.get((int)(rand.nextDouble() * v_array.size())); - else - source = origin; - V target = null; - do - { - target = v_array.get((int)(rand.nextDouble() * v_array.size())); - } - while (source == target); - VoltageScorer vs = new VoltageScorer(g, source, target); - vs.evaluate(); - - Map voltage_ranks = new HashMap(); - for (V v : g.getVertices()) - voltage_ranks.put(v, new double[] {vs.getVertexScore(v)}); - -// addOneCandidateCluster(candidates, voltage_ranks); - addTwoCandidateClusters(candidates, voltage_ranks); - } - - // repeat the following k-1 times: - // * pick a vertex v as a cluster seed - // (Wu/Huberman: most frequent vertex in candidates) - // * calculate co-occurrence (in candidate clusters) - // of this vertex with all others - // * use k-means to separate co-occurrence counts into high/low; - // high vertices are a cluster - // * remove v's vertices from candidate clusters - - Collection> clusters = new LinkedList>(); - Set remaining = new HashSet(g.getVertices()); - - List seed_candidates = getSeedCandidates(candidates); - int seed_index = 0; - - for (int j = 0; j < (num_clusters - 1); j++) - { - if (remaining.isEmpty()) - break; - - V seed; - if (seed_index == 0 && origin != null) - seed = origin; - else - { - do { seed = seed_candidates.get(seed_index++); } - while (!remaining.contains(seed)); - } - - Map occur_counts = getObjectCounts(candidates, seed); - if (occur_counts.size() < 2) - break; - - // now that we have the counts, cluster them... - try - { - Collection> high_low = kmc.cluster(occur_counts, 2); - // ...get the cluster with the highest-valued centroid... - Iterator> h_iter = high_low.iterator(); - Map cluster1 = h_iter.next(); - Map cluster2 = h_iter.next(); - double[] centroid1 = DiscreteDistribution.mean(cluster1.values()); - double[] centroid2 = DiscreteDistribution.mean(cluster2.values()); - Set new_cluster; - if (centroid1[0] >= centroid2[0]) - new_cluster = cluster1.keySet(); - else - new_cluster = cluster2.keySet(); - - // ...remove the elements of new_cluster from each candidate... - for (Set cluster : candidates) - cluster.removeAll(new_cluster); - clusters.add(new_cluster); - remaining.removeAll(new_cluster); - } - catch (NotEnoughClustersException nece) - { - // all remaining vertices are in the same cluster - break; - } - } - - // identify remaining vertices (if any) as a 'garbage' cluster - if (!remaining.isEmpty()) - clusters.add(remaining); - - return clusters; - } - - /** - * Do k-means with three intervals and pick the - * smaller two clusters (presumed to be on the ends); this is closer to the Wu-Huberman method. - * @param candidates - * @param voltage_ranks - */ - protected void addTwoCandidateClusters(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters = new ArrayList>(kmc.cluster(voltage_ranks, 3)); - boolean b01 = clusters.get(0).size() > clusters.get(1).size(); - boolean b02 = clusters.get(0).size() > clusters.get(2).size(); - boolean b12 = clusters.get(1).size() > clusters.get(2).size(); - if (b01 && b02) - { - candidates.add(clusters.get(1).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b01 && b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b02 && !b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(1).keySet()); - } - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue - } - } - - /** - * alternative to addTwoCandidateClusters(): cluster vertices by voltages into 2 clusters. - * We only consider the smaller of the two clusters returned - * by k-means to be a 'true' cluster candidate; the other is a garbage cluster. - * @param candidates - * @param voltage_ranks - */ - protected void addOneCandidateCluster(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters; - clusters = new ArrayList>(kmc.cluster(voltage_ranks, 2)); - if (clusters.get(0).size() < clusters.get(1).size()) - candidates.add(clusters.get(0).keySet()); - else - candidates.add(clusters.get(1).keySet()); - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue - } - } - - /** - * Returns an array of cluster seeds, ranked in decreasing order - * of number of appearances in the specified collection of candidate - * clusters. - * @param candidates - */ - protected List getSeedCandidates(Collection> candidates) - { - final Map occur_counts = getObjectCounts(candidates, null); - - ArrayList occurrences = new ArrayList(occur_counts.keySet()); - Collections.sort(occurrences, new MapValueArrayComparator(occur_counts)); - - System.out.println("occurrences: "); - for (int i = 0; i < occurrences.size(); i++) - System.out.println(occur_counts.get(occurrences.get(i))[0]); - - return occurrences; - } - - protected Map getObjectCounts(Collection> candidates, V seed) - { - Map occur_counts = new HashMap(); - for (V v : g.getVertices()) - occur_counts.put(v, new double[]{0}); - - for (Set candidate : candidates) - { - if (seed == null) - System.out.println(candidate.size()); - if (seed == null || candidate.contains(seed)) - { - for (V element : candidate) - { - double[] count = occur_counts.get(element); - count[0]++; - } - } - } - - if (seed == null) - { - System.out.println("occur_counts size: " + occur_counts.size()); - for (V v : occur_counts.keySet()) - System.out.println(occur_counts.get(v)[0]); - } - - return occur_counts; - } - - protected class MapValueArrayComparator implements Comparator - { - private Map map; - - protected MapValueArrayComparator(Map map) - { - this.map = map; - } - - public int compare(V o1, V o2) - { - double[] count0 = map.get(o1); - double[] count1 = map.get(o2); - if (count0[0] < count1[0]) - return 1; - else if (count0[0] > count1[0]) - return -1; - return 0; - } - - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java deleted file mode 100644 index cb79a78448..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * Finds all weak components in a graph as sets of vertex sets. A weak component is defined as - * a maximal subgraph in which all pairs of vertices in the subgraph are reachable from one - * another in the underlying undirected subgraph. - *

    This implementation identifies components as sets of vertex sets. - * To create the induced graphs from any or all of these vertex sets, - * see algorithms.filters.FilterUtils. - *

    - * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges. - * @author Scott White - */ -public class WeakComponentClusterer implements Transformer, Set>> -{ - /** - * Extracts the weak components from a graph. - * @param graph the graph whose weak components are to be extracted - * @return the list of weak components - */ - public Set> transform(Graph graph) { - - Set> clusterSet = new HashSet>(); - - HashSet unvisitedVertices = new HashSet(graph.getVertices()); - - while (!unvisitedVertices.isEmpty()) { - Set cluster = new HashSet(); - V root = unvisitedVertices.iterator().next(); - unvisitedVertices.remove(root); - cluster.add(root); - - Buffer queue = new UnboundedFifoBuffer(); - queue.add(root); - - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - Collection neighbors = graph.getNeighbors(currentVertex); - - for(V neighbor : neighbors) { - if (unvisitedVertices.contains(neighbor)) { - queue.add(neighbor); - unvisitedVertices.remove(neighbor); - cluster.add(neighbor); - } - } - } - clusterSet.add(cluster); - } - return clusterSet; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/package.html deleted file mode 100644 index f8bdb2279a..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/cluster/package.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - -Mechanisms for identifying clusters in graphs. Where these clusters define disjoint sets of vertices, -they may be used to define a VertexPartition for more convenient manipulation of the vertex/set -relationships. - -Current clustering algorithms include: -

      -
    • BicomponentClusterer: finds all subsets of vertices for which at least -2 vertices must be removed in order to disconnect the induced subgraphs. -
    • EdgeBetweennessClusterer: identifies vertex clusters by removing the edges of the highest -'betweenness' scores (see the importance/scoring package). -
    • VoltageClusterer: Clusters vertices based on their ranks as -calculated by VoltageRanker. -
    • WeakComponentVertexClusterer: Clusters vertices based on their membership in weakly -connected components of a graph. -
    - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java deleted file mode 100644 index 5e3be06d18..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Created on May 19, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.filters; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Transforms the input graph into one which contains only those edges - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). All vertices from the original graph - * are copied into the new graph (even if they are not incident to any - * edges in the new graph). - * - * @author Joshua O'Madadhain - */ -public class EdgePredicateFilter implements Filter -{ - protected Predicate edge_pred; - - /** - * Creates an instance based on the specified edge Predicate. - * @param edge_pred the predicate that specifies which edges to add to the filtered graph - */ - public EdgePredicateFilter(Predicate edge_pred) - { - this.edge_pred = edge_pred; - } - - @SuppressWarnings("unchecked") - public Graph transform(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - - for (V v : g.getVertices()) - filtered.addVertex(v); - - for (E e : g.getEdges()) - { - if (edge_pred.evaluate(e)) - filtered.addEdge(e, g.getIncidentVertices(e)); - } - - return filtered; - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java deleted file mode 100644 index a62895cc43..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java +++ /dev/null @@ -1,26 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.filters; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * An interface for classes that return a subset of the input Graph - * as a Graph. The Graph returned may be either a - * new graph or a view into an existing graph; the documentation for the filter - * must specify which. - * - * @author danyelf - */ -public interface Filter extends Transformer, Graph>{ } diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java deleted file mode 100644 index 4845c0f37b..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jun 7, 2008 - * - */ -package edu.uci.ics.jung.algorithms.filters; - -import java.util.ArrayList; -import java.util.Collection; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Utility methods relating to filtering. - */ -public class FilterUtils -{ - /** - * Creates the induced subgraph from graph whose vertex set - * is equal to vertices. The graph returned has - * vertices as its vertex set, and includes all edges from - * graph which are incident only to elements of - * vertices. - * - * @param the vertex type - * @param the edge type - * @param vertices the subset of graph's vertices around - * which the subgraph is to be constructed - * @param graph the graph whose subgraph is to be constructed - * @return the subgraph induced by vertices - * @throws IllegalArgumentException if any vertex in - * vertices is not in graph - */ - @SuppressWarnings("unchecked") - public static > G createInducedSubgraph(Collection - vertices, G graph) - { - G subgraph = null; - try - { - subgraph = (G)graph.getClass().newInstance(); - - for (V v : vertices) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + - " is not an element of " + graph); - subgraph.addVertex(v); - } - - for (E e : graph.getEdges()) - { - Collection incident = graph.getIncidentVertices(e); - if (vertices.containsAll(incident)) - subgraph.addEdge(e, incident, graph.getEdgeType(e)); - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - return subgraph; - } - - /** - * Creates the induced subgraphs of graph associated with each - * element of vertex_collections. - * Note that these vertex collections need not be disjoint. - * @param the vertex type - * @param the edge type - * @param vertex_collections the collections of vertex collections to be - * used to induce the subgraphs - * @param graph the graph whose subgraphs are to be created - * @return the induced subgraphs of graph associated with each - * element of vertex_collections - */ - public static > Collection - createAllInducedSubgraphs(Collection> - vertex_collections, G graph) - { - Collection subgraphs = new ArrayList(); - - for (Collection vertex_set : vertex_collections) - subgraphs.add(createInducedSubgraph(vertex_set, graph)); - - return subgraphs; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java deleted file mode 100644 index 62bcfc29b5..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Dec 26, 2001 - * - */ -package edu.uci.ics.jung.algorithms.filters; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import edu.uci.ics.jung.algorithms.filters.Filter; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * A filter used to extract the k-neighborhood around one or more root node(s). - * The k-neighborhood is defined as the subgraph induced by the set of - * vertices that are k or fewer hops (unweighted shortest-path distance) - * away from the root node. - * - * @author Danyel Fisher - */ -public class KNeighborhoodFilter implements Filter { - - /** - * The type of edge to follow for defining the neighborhood. - */ - public static enum EdgeType { IN_OUT, IN, OUT } - private Set rootNodes; - private int radiusK; - private EdgeType edgeType; - - /** - * Constructs a new instance of the filter. - * @param rootNodes the set of root nodes - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(Set rootNodes, int radiusK, EdgeType edgeType) { - this.rootNodes = rootNodes; - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs a new instance of the filter. - * @param rootNode the root node - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(V rootNode, int radiusK, EdgeType edgeType) { - this.rootNodes = new HashSet(); - this.rootNodes.add(rootNode); - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs an unassembled graph containing the k-neighborhood around the root node(s). - */ - @SuppressWarnings("unchecked") - public Graph transform(Graph graph) { - // generate a Set of Vertices we want - // add all to the UG - int currentDepth = 0; - List currentVertices = new ArrayList(); - Set visitedVertices = new HashSet(); - Set visitedEdges = new HashSet(); - Set acceptedVertices = new HashSet(); - //Copy, mark, and add all the root nodes to the new subgraph - for (V currentRoot : rootNodes) { - - visitedVertices.add(currentRoot); - acceptedVertices.add(currentRoot); - currentVertices.add(currentRoot); - } - ArrayList newVertices = null; - //Use BFS to locate the neighborhood around the root nodes within distance k - while (currentDepth < radiusK) { - newVertices = new ArrayList(); - for (V currentVertex : currentVertices) { - - Collection edges = null; - switch (edgeType) { - case IN_OUT : - edges = graph.getIncidentEdges(currentVertex); - break; - case IN : - edges = graph.getInEdges(currentVertex); - break; - case OUT : - edges = graph.getOutEdges(currentVertex); - break; - } - for (E currentEdge : edges) { - - V currentNeighbor = - graph.getOpposite(currentVertex, currentEdge); - if (!visitedEdges.contains(currentEdge)) { - visitedEdges.add(currentEdge); - if (!visitedVertices.contains(currentNeighbor)) { - visitedVertices.add(currentNeighbor); - acceptedVertices.add(currentNeighbor); - newVertices.add(currentNeighbor); - } - } - } - } - currentVertices = newVertices; - currentDepth++; - } - Graph ug = null; - try { - ug = graph.getClass().newInstance(); - for(E edge : graph.getEdges()) { - Pair endpoints = graph.getEndpoints(edge); - if(acceptedVertices.containsAll(endpoints)) { - ug.addEdge(edge, endpoints.getFirst(), endpoints.getSecond()); - } - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - return ug; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java deleted file mode 100644 index 4543b424dc..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Created on May 19, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.filters; - -import java.util.Collection; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Transforms the input graph into one which contains only those vertices - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). Only those edges whose entire incident vertex collection - * passes the predicate are copied into the new graph. - * - * @author Joshua O'Madadhain - */ -public class VertexPredicateFilter implements Filter -{ - protected Predicate vertex_pred; - - /** - * Creates an instance based on the specified vertex Predicate. - * @param vertex_pred the predicate that specifies which vertices to add to the filtered graph - */ - public VertexPredicateFilter(Predicate vertex_pred) - { - this.vertex_pred = vertex_pred; - } - - @SuppressWarnings("unchecked") - public Graph transform(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - - for (V v : g.getVertices()) - if (vertex_pred.evaluate(v)) - filtered.addVertex(v); - - Collection filtered_vertices = filtered.getVertices(); - - for (E e : g.getEdges()) - { - Collection incident = g.getIncidentVertices(e); - if (filtered_vertices.containsAll(incident)) - filtered.addEdge(e, incident); - } - - return filtered; - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/package.html deleted file mode 100644 index 0f9a018f88..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/filters/package.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - -Filtering mechanisms that produce subgraphs of an original graph. -Currently includes: -
      -
    • Filter: an interface for graph filters -
    • {Edge,Vertex}PredicateFilter: graph filters that return the -induced subgraph according to the -specified edge or vertex Predicate, respectively. -
    • KNeighborhoodFilter: a filter that returns the subgraph -induced by vertices within (unweighted) distance k of a specified vertex. -
    - - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java deleted file mode 100644 index af9ee34c4c..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java +++ /dev/null @@ -1,314 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.flows; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; - - -/** - * Implements the Edmonds-Karp maximum flow algorithm for solving the maximum flow problem. - * After the algorithm is executed, - * the input {@code Map} is populated with a {@code Number} for each edge that indicates - * the flow along that edge. - *

    - * An example of using this algorithm is as follows: - *

    - * EdmondsKarpMaxFlow ek = new EdmondsKarpMaxFlow(graph, source, sink, edge_capacities, edge_flows, 
    - * edge_factory);
    - * ek.evaluate(); // This instructs the class to compute the max flow
    - * 
    - * - * @see "Introduction to Algorithms by Cormen, Leiserson, Rivest, and Stein." - * @see "Network Flows by Ahuja, Magnanti, and Orlin." - * @see "Theoretical improvements in algorithmic efficiency for network flow problems by Edmonds and Karp, 1972." - * @author Scott White, adapted to jung2 by Tom Nelson - */ -public class EdmondsKarpMaxFlow extends IterativeProcess { - - private DirectedGraph mFlowGraph; - private DirectedGraph mOriginalGraph; - private V source; - private V target; - private int mMaxFlow; - private Set mSourcePartitionNodes; - private Set mSinkPartitionNodes; - private Set mMinCutEdges; - - private Map residualCapacityMap = new HashMap(); - private Map parentMap = new HashMap(); - private Map parentCapacityMap = new HashMap(); - private Transformer edgeCapacityTransformer; - private Map edgeFlowMap; - private Factory edgeFactory; - - /** - * Constructs a new instance of the algorithm solver for a given graph, source, and sink. - * Source and sink vertices must be elements of the specified graph, and must be - * distinct. - * @param directedGraph the flow graph - * @param source the source vertex - * @param sink the sink vertex - * @param edgeCapacityTransformer the transformer that gets the capacity for each edge. - * @param edgeFlowMap the map where the solver will place the value of the flow for each edge - * @param edgeFactory used to create new edge instances for backEdges - */ - @SuppressWarnings("unchecked") - public EdmondsKarpMaxFlow(DirectedGraph directedGraph, V source, V sink, - Transformer edgeCapacityTransformer, Map edgeFlowMap, - Factory edgeFactory) { - - if(directedGraph.getVertices().contains(source) == false || - directedGraph.getVertices().contains(sink) == false) { - throw new IllegalArgumentException("source and sink vertices must be elements of the specified graph"); - } - if (source.equals(sink)) { - throw new IllegalArgumentException("source and sink vertices must be distinct"); - } - mOriginalGraph = directedGraph; - - this.source = source; - this.target = sink; - this.edgeFlowMap = edgeFlowMap; - this.edgeCapacityTransformer = edgeCapacityTransformer; - this.edgeFactory = edgeFactory; - try { - mFlowGraph = directedGraph.getClass().newInstance(); - for(E e : mOriginalGraph.getEdges()) { - mFlowGraph.addEdge(e, mOriginalGraph.getSource(e), - mOriginalGraph.getDest(e), EdgeType.DIRECTED); - } - for(V v : mOriginalGraph.getVertices()) { - mFlowGraph.addVertex(v); - } - - } catch (InstantiationException e) { - e.printStackTrace(); - } catch (IllegalAccessException e) { - e.printStackTrace(); - } - mMaxFlow = 0; - mSinkPartitionNodes = new HashSet(); - mSourcePartitionNodes = new HashSet(); - mMinCutEdges = new HashSet(); - } - - private void clearParentValues() { - parentMap.clear(); - parentCapacityMap.clear(); - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); - } - - protected boolean hasAugmentingPath() { - - mSinkPartitionNodes.clear(); - mSourcePartitionNodes.clear(); - mSinkPartitionNodes.addAll(mFlowGraph.getVertices()); - - Set visitedEdgesMap = new HashSet(); - Buffer queue = new UnboundedFifoBuffer(); - queue.add(source); - - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - mSinkPartitionNodes.remove(currentVertex); - mSourcePartitionNodes.add(currentVertex); - Number currentCapacity = parentCapacityMap.get(currentVertex); - - Collection neighboringEdges = mFlowGraph.getOutEdges(currentVertex); - - for (E neighboringEdge : neighboringEdges) { - - V neighboringVertex = mFlowGraph.getDest(neighboringEdge); - - Number residualCapacity = residualCapacityMap.get(neighboringEdge); - if (residualCapacity.intValue() <= 0 || visitedEdgesMap.contains(neighboringEdge)) - continue; - - V neighborsParent = parentMap.get(neighboringVertex); - Number neighborCapacity = parentCapacityMap.get(neighboringVertex); - int newCapacity = Math.min(residualCapacity.intValue(),currentCapacity.intValue()); - - if ((neighborsParent == null) || newCapacity > neighborCapacity.intValue()) { - parentMap.put(neighboringVertex, currentVertex); - parentCapacityMap.put(neighboringVertex, new Integer(newCapacity)); - visitedEdgesMap.add(neighboringEdge); - if (neighboringVertex != target) { - queue.add(neighboringVertex); - } - } - } - } - - boolean hasAugmentingPath = false; - Number targetsParentCapacity = parentCapacityMap.get(target); - if (targetsParentCapacity != null && targetsParentCapacity.intValue() > 0) { - updateResidualCapacities(); - hasAugmentingPath = true; - } - clearParentValues(); - return hasAugmentingPath; - } - - @Override - public void step() { - while (hasAugmentingPath()) { - } - computeMinCut(); -// return 0; - } - - private void computeMinCut() { - - for (E e : mOriginalGraph.getEdges()) { - - V source = mOriginalGraph.getSource(e); - V destination = mOriginalGraph.getDest(e); - if (mSinkPartitionNodes.contains(source) && mSinkPartitionNodes.contains(destination)) { - continue; - } - if (mSourcePartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - if (mSinkPartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - mMinCutEdges.add(e); - } - } - - /** - * Returns the value of the maximum flow from the source to the sink. - */ - public int getMaxFlow() { - return mMaxFlow; - } - - /** - * Returns the nodes which share the same partition (as defined by the min-cut edges) - * as the sink node. - */ - public Set getNodesInSinkPartition() { - return mSinkPartitionNodes; - } - - /** - * Returns the nodes which share the same partition (as defined by the min-cut edges) - * as the source node. - */ - public Set getNodesInSourcePartition() { - return mSourcePartitionNodes; - } - - /** - * Returns the edges in the minimum cut. - */ - public Set getMinCutEdges() { - return mMinCutEdges; - } - - /** - * Returns the graph for which the maximum flow is calculated. - */ - public DirectedGraph getFlowGraph() { - return mFlowGraph; - } - - @Override - protected void initializeIterations() { - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); - - List edgeList = new ArrayList(mFlowGraph.getEdges()); - - for (int eIdx=0;eIdx< edgeList.size();eIdx++) { - E edge = edgeList.get(eIdx); - Number capacity = edgeCapacityTransformer.transform(edge); - - if (capacity == null) { - throw new IllegalArgumentException("Edge capacities must be provided in Transformer passed to constructor"); - } - residualCapacityMap.put(edge, capacity); - - V source = mFlowGraph.getSource(edge); - V destination = mFlowGraph.getDest(edge); - - if(mFlowGraph.isPredecessor(source, destination) == false) { - E backEdge = edgeFactory.create(); - mFlowGraph.addEdge(backEdge, destination, source, EdgeType.DIRECTED); - residualCapacityMap.put(backEdge, 0); - } - } - } - - @Override - protected void finalizeIterations() { - - for (E currentEdge : mFlowGraph.getEdges()) { - Number capacity = edgeCapacityTransformer.transform(currentEdge); - - Number residualCapacity = residualCapacityMap.get(currentEdge); - if (capacity != null) { - Integer flowValue = new Integer(capacity.intValue()-residualCapacity.intValue()); - this.edgeFlowMap.put(currentEdge, flowValue); - } - } - - Set backEdges = new HashSet(); - for (E currentEdge: mFlowGraph.getEdges()) { - - if (edgeCapacityTransformer.transform(currentEdge) == null) { - backEdges.add(currentEdge); - } else { - residualCapacityMap.remove(currentEdge); - } - } - for(E e : backEdges) { - mFlowGraph.removeEdge(e); - } - } - - private void updateResidualCapacities() { - - Number augmentingPathCapacity = parentCapacityMap.get(target); - mMaxFlow += augmentingPathCapacity.intValue(); - V currentVertex = target; - V parentVertex = null; - while ((parentVertex = parentMap.get(currentVertex)) != currentVertex) { - E currentEdge = mFlowGraph.findEdge(parentVertex, currentVertex); - - Number residualCapacity = residualCapacityMap.get(currentEdge); - - residualCapacity = residualCapacity.intValue() - augmentingPathCapacity.intValue(); - residualCapacityMap.put(currentEdge, residualCapacity); - - E backEdge = mFlowGraph.findEdge(currentVertex, parentVertex); - residualCapacity = residualCapacityMap.get(backEdge); - residualCapacity = residualCapacity.intValue() + augmentingPathCapacity.intValue(); - residualCapacityMap.put(backEdge, residualCapacity); - currentVertex = parentVertex; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/package.html deleted file mode 100644 index 1ec243d845..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/flows/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Methods for calculating properties relating to network flows (such as max flow/min cut). - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java deleted file mode 100644 index d351f9b1ca..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.generators; - - - -/** - * An interface for algorithms that generate graphs that evolve iteratively. - * @author Scott White - */ -public interface EvolvingGraphGenerator extends GraphGenerator { - - /** - * Instructs the algorithm to evolve the graph N steps. - * @param numSteps number of steps to iterate from the current state - */ - void evolveGraph(int numSteps); - - /** - * Retrieves the total number of steps elapsed. - * @return number of elapsed steps - */ - int numIterations(); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java deleted file mode 100644 index a32906095f..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; - -/** - * An interface for algorithms that generate graphs. - * @author Scott White - */ -public interface GraphGenerator extends Factory>{ } diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java deleted file mode 100644 index e84425cebb..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2009, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ - -package edu.uci.ics.jung.algorithms.generators; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * Simple generator of an m x n lattice where each vertex - * is incident with each of its neighbors (to the left, right, up, and down). - * May be toroidal, in which case the vertices on the edges are connected to - * their counterparts on the opposite edges as well. - * - *

    If the graph factory supplied has a default edge type of {@code EdgeType.DIRECTED}, - * then edges will be created in both directions between adjacent vertices. - * - * @author Joshua O'Madadhain - */ -public class Lattice2DGenerator implements GraphGenerator -{ - protected int row_count; - protected int col_count; - protected boolean is_toroidal; - protected boolean is_directed; - protected Factory> graph_factory; - protected Factory vertex_factory; - protected Factory edge_factory; - private List v_array; - - /** - * Constructs a generator of square lattices of size {@code latticeSize} - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param latticeSize the number of rows and columns of the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int latticeSize, boolean isToroidal) - { - this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, isToroidal); - } - - /** - * Creates a generator of {@code row_count} x {@code col_count} lattices - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param row_count the number of rows in the lattice - * @param col_count the number of columns in the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, boolean isToroidal) - { - if (row_count < 2 || col_count < 2) - { - throw new IllegalArgumentException("Row and column counts must each be at least 2."); - } - - this.row_count = row_count; - this.col_count = col_count; - this.is_toroidal = isToroidal; - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.is_directed = (graph_factory.create().getDefaultEdgeType() == EdgeType.DIRECTED); - } - - /** - * @see edu.uci.ics.jung.algorithms.generators.GraphGenerator#create() - */ - @SuppressWarnings("unchecked") - public Graph create() - { - int vertex_count = row_count * col_count; - Graph graph = graph_factory.create(); - v_array = new ArrayList(vertex_count); - for (int i = 0; i < vertex_count; i++) - { - V v = vertex_factory.create(); - graph.addVertex(v); - v_array.add(i, v); - } - - int start = is_toroidal ? 0 : 1; - int end_row = is_toroidal ? row_count : row_count - 1; - int end_col = is_toroidal ? col_count : col_count - 1; - - // fill in edges - // down - for (int i = 0; i < end_row; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i+1, j)); - // right - for (int i = 0; i < row_count; i++) - for (int j = 0; j < end_col; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i, j+1)); - - // if the graph is directed, fill in the edges going the other direction... - if (graph.getDefaultEdgeType() == EdgeType.DIRECTED) - { - // up - for (int i = start; i < row_count; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i-1, j)); - // left - for (int i = 0; i < row_count; i++) - for (int j = start; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i, j-1)); - } - - return graph; - } - - /** - * Returns the number of edges found in a lattice of this generator's specifications. - * (This is useful for subclasses that may modify the generated graphs to add more edges.) - */ - public int getGridEdgeCount() - { - int boundary_adjustment = (is_toroidal ? 0 : 1); - int vertical_edge_count = col_count * (row_count - boundary_adjustment); - int horizontal_edge_count = row_count * (col_count - boundary_adjustment); - - return (vertical_edge_count + horizontal_edge_count) * (is_directed ? 2 : 1); - } - - protected int getIndex(int i, int j) - { - return ((mod(i, row_count)) * col_count) + (mod(j, col_count)); - } - - protected int mod(int i, int modulus) - { - int i_mod = i % modulus; - return i_mod >= 0 ? i_mod : i_mod + modulus; - } - - /** - * Returns the vertex at position ({@code i mod row_count, j mod col_count}). - */ - protected V getVertex(int i, int j) - { - return v_array.get(getIndex(i, j)); - } - - /** - * Returns the {@code i}th vertex (counting row-wise). - */ - protected V getVertex(int i) - { - return v_array.get(i); - } - - /** - * Returns the row in which vertex {@code i} is found. - */ - protected int getRow(int i) - { - return i / row_count; - } - - /** - * Returns the column in which vertex {@code i} is found. - */ - protected int getCol(int i) - { - return i % col_count; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/package.html deleted file mode 100644 index 441922dca5..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Methods for generating new (often random) graphs with various properties. - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java deleted file mode 100644 index 77b419b4a4..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.EvolvingGraphGenerator; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.MultiGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - *

    Simple evolving scale-free random graph generator. At each time - * step, a new vertex is created and is connected to existing vertices - * according to the principle of "preferential attachment", whereby - * vertices with higher degree have a higher probability of being - * selected for attachment.

    - * - *

    At a given timestep, the probability p of creating an edge - * between an existing vertex v and the newly added vertex is - *

    - * p = (degree(v) + 1) / (|E| + |V|);
    - * 
    - * - *

    where |E| and |V| are, respectively, the number - * of edges and vertices currently in the network (counting neither the new - * vertex nor the other edges that are being attached to it).

    - * - *

    Note that the formula specified in the original paper - * (cited below) was - *

    - * p = degree(v) / |E|
    - * 
    - *

    - * - *

    However, this would have meant that the probability of attachment for any existing - * isolated vertex would be 0. This version uses Lagrangian smoothing to give - * each existing vertex a positive attachment probability.

    - * - *

    The graph created may be either directed or undirected (controlled by a constructor - * parameter); the default is undirected. - * If the graph is specified to be directed, then the edges added will be directed - * from the newly added vertex u to the existing vertex v, with probability proportional to the - * indegree of v (number of edges directed towards v). If the graph is specified to be undirected, - * then the (undirected) edges added will connect u to v, with probability proportional to the - * degree of v.

    - * - *

    The parallel constructor parameter specifies whether parallel edges - * may be created.

    - * - * @see "A.-L. Barabasi and R. Albert, Emergence of scaling in random networks, Science 286, 1999." - * @author Scott White - * @author Joshua O'Madadhain - * @author Tom Nelson - adapted to jung2 - */ -public class BarabasiAlbertGenerator implements EvolvingGraphGenerator { - private Graph mGraph = null; - private int mNumEdgesToAttachPerStep; - private int mElapsedTimeSteps; - private Random mRandom; - protected List vertex_index; - protected int init_vertices; - protected Map index_vertex; - protected Factory> graphFactory; - protected Factory vertexFactory; - protected Factory edgeFactory; - - /** - * Constructs a new instance of the generator. - * @param init_vertices number of unconnected 'seed' vertices that the graph should start with - * @param numEdgesToAttach the number of edges that should be attached from the - * new vertex to pre-existing vertices at each time step - * @param directed specifies whether the graph and edges to be created should be directed or not - * @param parallel specifies whether the algorithm permits parallel edges - * @param seed random number seed - */ - public BarabasiAlbertGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int init_vertices, int numEdgesToAttach, - int seed, Set seedVertices) - { - assert init_vertices > 0 : "Number of initial unconnected 'seed' vertices " + - "must be positive"; - assert numEdgesToAttach > 0 : "Number of edges to attach " + - "at each time step must be positive"; - - mNumEdgesToAttachPerStep = numEdgesToAttach; - mRandom = new Random(seed); - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - this.init_vertices = init_vertices; - initialize(seedVertices); - } - - - /** - * Constructs a new instance of the generator, whose output will be an undirected graph, - * and which will use the current time as a seed for the random number generation. - * @param init_vertices number of vertices that the graph should start with - * @param numEdgesToAttach the number of edges that should be attached from the - * new vertex to pre-existing vertices at each time step - */ - public BarabasiAlbertGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int init_vertices, int numEdgesToAttach, Set seedVertices) { - this(graphFactory, vertexFactory, edgeFactory, init_vertices, numEdgesToAttach, (int) System.currentTimeMillis(), seedVertices); - } - - private void initialize(Set seedVertices) { - - mGraph = graphFactory.create(); - - vertex_index = new ArrayList(2*init_vertices); - index_vertex = new HashMap(2*init_vertices); - for (int i = 0; i < init_vertices; i++) { - V v = vertexFactory.create(); - mGraph.addVertex(v); - vertex_index.add(v); - index_vertex.put(v, i); - seedVertices.add(v); - } - - mElapsedTimeSteps = 0; - } - - private void createRandomEdge(Collection preexistingNodes, - V newVertex, Set> added_pairs) { - V attach_point; - boolean created_edge = false; - Pair endpoints; - do { - attach_point = vertex_index.get(mRandom.nextInt(vertex_index.size())); - - endpoints = new Pair(newVertex, attach_point); - - // if parallel edges are not allowed, skip attach_point if - // already exists; note that because of the way edges are added, we only need to check - // the list of candidate edges for duplicates. - if (!(mGraph instanceof MultiGraph)) - { - if (added_pairs.contains(endpoints)) - continue; - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED && - added_pairs.contains(new Pair(attach_point, newVertex))) - continue; - } - - double degree = mGraph.inDegree(attach_point); - - // subtract 1 from numVertices because we don't want to count newVertex - // (which has already been added to the graph, but not to vertex_index) - double attach_prob = (degree + 1) / (mGraph.getEdgeCount() + mGraph.getVertexCount() - 1); - if (attach_prob >= mRandom.nextDouble()) - created_edge = true; - } - while (!created_edge); - - added_pairs.add(endpoints); - - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { - added_pairs.add(new Pair(attach_point, newVertex)); - } - } - - public void evolveGraph(int numTimeSteps) { - - for (int i = 0; i < numTimeSteps; i++) { - evolveGraph(); - mElapsedTimeSteps++; - } - } - - private void evolveGraph() { - Collection preexistingNodes = mGraph.getVertices(); - V newVertex = vertexFactory.create(); - - mGraph.addVertex(newVertex); - - // generate and store the new edges; don't add them to the graph - // yet because we don't want to bias the degree calculations - // (all new edges in a timestep should be added in parallel) - Set> added_pairs = new HashSet>(mNumEdgesToAttachPerStep*3); - - for (int i = 0; i < mNumEdgesToAttachPerStep; i++) - createRandomEdge(preexistingNodes, newVertex, added_pairs); - - for (Pair pair : added_pairs) - { - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - if (mGraph.getDefaultEdgeType() != EdgeType.UNDIRECTED || - !mGraph.isNeighbor(v1, v2)) - mGraph.addEdge(edgeFactory.create(), pair); - } - // now that we're done attaching edges to this new vertex, - // add it to the index - vertex_index.add(newVertex); - index_vertex.put(newVertex, new Integer(vertex_index.size() - 1)); - } - - public int numIterations() { - return mElapsedTimeSteps; - } - - public Graph create() { - return mGraph; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java deleted file mode 100644 index e3bf04b68d..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java +++ /dev/null @@ -1,128 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.GraphGenerator; -import edu.uci.ics.jung.graph.Graph; - -/** - * Graph generator that generates undirected graphs with power-law degree distributions. - * @author Scott White - * @see "A Steady State Model for Graph Power Law by David Eppstein and Joseph Wang" - */ -public class EppsteinPowerLawGenerator implements GraphGenerator { - private int mNumVertices; - private int mNumEdges; - private int mNumIterations; - private double mMaxDegree; - private Random mRandom; - private Factory> graphFactory; - private Factory vertexFactory; - private Factory edgeFactory; - - /** - * Creates an instance with the specified factories and specifications. - * @param graphFactory the factory to use to generate the graph - * @param vertexFactory the factory to use to create vertices - * @param edgeFactory the factory to use to create edges - * @param numVertices the number of vertices for the generated graph - * @param numEdges the number of edges the generated graph will have, should be Theta(numVertices) - * @param r the number of iterations to use; the larger the value the better the graph's degree - * distribution will approximate a power-law - */ - public EppsteinPowerLawGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int numVertices, int numEdges, int r) { - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mNumVertices = numVertices; - mNumEdges = numEdges; - mNumIterations = r; - mRandom = new Random(); - } - - protected Graph initializeGraph() { - Graph graph = null; - graph = graphFactory.create(); - for(int i=0; i vertices = new ArrayList(graph.getVertices()); - while (graph.getEdgeCount() < mNumEdges) { - V u = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - if (!graph.isSuccessor(v,u)) { - graph.addEdge(edgeFactory.create(), u, v); - } - } - - double maxDegree = 0; - for (V v : graph.getVertices()) { - maxDegree = Math.max(graph.degree(v),maxDegree); - } - mMaxDegree = maxDegree; //(maxDegree+1)*(maxDegree)/2; - - return graph; - } - - /** - * Generates a graph whose degree distribution approximates a power-law. - * @return the generated graph - */ - public Graph create() { - Graph graph = initializeGraph(); - - List vertices = new ArrayList(graph.getVertices()); - for (int rIdx = 0; rIdx < mNumIterations; rIdx++) { - - V v = null; - int degree = 0; - do { - v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - degree = graph.degree(v); - - } while (degree == 0); - - List edges = new ArrayList(graph.getIncidentEdges(v)); - E randomExistingEdge = edges.get((int) (mRandom.nextDouble()*degree)); - - // FIXME: look at email thread on a more efficient RNG for arbitrary distributions - - V x = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V y = null; - do { - y = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - - } while (mRandom.nextDouble() > ((graph.degree(y)+1)/mMaxDegree)); - - if (!graph.isSuccessor(y,x) && x != y) { - graph.removeEdge(randomExistingEdge); - graph.addEdge(edgeFactory.create(), x, y); - } - } - - return graph; - } - - /** - * Sets the seed for the random number generator. - * @param seed input to the random number generator. - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java deleted file mode 100644 index 3a33730802..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java +++ /dev/null @@ -1,100 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.GraphGenerator; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Generates a random graph using the Erdos-Renyi binomial model - * (each pair of vertices is connected with probability p). - * - * @author William Giordano, Scott White, Joshua O'Madadhain - */ -public class ErdosRenyiGenerator implements GraphGenerator { - private int mNumVertices; - private double mEdgeConnectionProbability; - private Random mRandom; - Factory> graphFactory; - Factory vertexFactory; - Factory edgeFactory; - - /** - * - * @param numVertices number of vertices graph should have - * @param p Connection's probability between 2 vertices - */ - public ErdosRenyiGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int numVertices,double p) - { - if (numVertices <= 0) { - throw new IllegalArgumentException("A positive # of vertices must be specified."); - } - mNumVertices = numVertices; - if (p < 0 || p > 1) { - throw new IllegalArgumentException("p must be between 0 and 1."); - } - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mEdgeConnectionProbability = p; - mRandom = new Random(); - } - - /** - * Returns a graph in which each pair of vertices is connected by - * an undirected edge with the probability specified by the constructor. - */ - public Graph create() { - UndirectedGraph g = graphFactory.create(); - for(int i=0; i list = new ArrayList(g.getVertices()); - - for (int i = 0; i < mNumVertices-1; i++) { - V v_i = list.get(i); - for (int j = i+1; j < mNumVertices; j++) { - V v_j = list.get(j); - if (mRandom.nextDouble() < mEdgeConnectionProbability) { - g.addEdge(edgeFactory.create(), v_i, v_j); - } - } - } - return g; - } - - /** - * Sets the seed of the internal random number generator to {@code seed}. - * Enables consistent behavior. - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); - } -} - - - - - - - - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java deleted file mode 100644 index de01b69b5a..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java +++ /dev/null @@ -1,184 +0,0 @@ - -package edu.uci.ics.jung.algorithms.generators.random; - -/* -* Copyright (c) 2009, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ - -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.Lattice2DGenerator; -import edu.uci.ics.jung.algorithms.util.WeightedChoice; -import edu.uci.ics.jung.graph.Graph; - -/** - * Graph generator that produces a random graph with small world properties. - * The underlying model is an mxn (optionally toroidal) lattice. Each node u - * has four local connections, one to each of its neighbors, and - * in addition 1+ long range connections to some node v where v is chosen randomly according to - * probability proportional to d^-alpha where d is the lattice distance between u and v and alpha - * is the clustering exponent. - * - * @see "Navigation in a small world J. Kleinberg, Nature 406(2000), 845." - * @author Joshua O'Madadhain - */ -public class KleinbergSmallWorldGenerator extends Lattice2DGenerator { - private double clustering_exponent; - private Random random; - private int num_connections = 1; - - /** - * Creates - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param latticeSize - * @param clusteringExponent - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int latticeSize, double clusteringExponent) - { - this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, clusteringExponent); - } - - /** - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param row_count - * @param col_count - * @param clusteringExponent - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, double clusteringExponent) - { - super(graph_factory, vertex_factory, edge_factory, row_count, col_count, true); - clustering_exponent = clusteringExponent; - initialize(); - } - - /** - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param row_count - * @param col_count - * @param clusteringExponent - * @param isToroidal - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, double clusteringExponent, - boolean isToroidal) - { - super(graph_factory, vertex_factory, edge_factory, row_count, col_count, isToroidal); - clustering_exponent = clusteringExponent; - initialize(); - } - - private void initialize() - { - this.random = new Random(); - } - - /** - * Sets the {@code Random} instance used by this instance. Useful for - * unit testing. - */ - public void setRandom(Random random) - { - this.random = random; - } - - /** - * Sets the seed of the internal random number generator. May be used to provide repeatable - * experiments. - */ - public void setRandomSeed(long seed) - { - random.setSeed(seed); - } - - /** - * Sets the number of new 'small-world' connections (outgoing edges) to be added to each vertex. - */ - public void setConnectionCount(int num_connections) - { - if (num_connections <= 0) - { - throw new IllegalArgumentException("Number of new connections per vertex must be >= 1"); - } - this.num_connections = num_connections; - } - - /** - * Returns the number of new 'small-world' connections to be made to each vertex. - */ - public int getConnectionCount() - { - return this.num_connections; - } - - /** - * Generates a random small world network according to the parameters given - * @return a random small world graph - */ - @Override - public Graph create() - { - Graph graph = super.create(); - - // TODO: For toroidal graphs, we can make this more clever by pre-creating the WeightedChoice object - // and using the output as an offset to the current vertex location. - WeightedChoice weighted_choice; - - // Add long range connections - for (int i = 0; i < graph.getVertexCount(); i++) - { - V source = getVertex(i); - int row = getRow(i); - int col = getCol(i); - int row_offset = row < row_count/2 ? -row_count : row_count; - int col_offset = col < col_count/2 ? -col_count : col_count; - - Map vertex_weights = new HashMap(); - for (int j = 0; j < row_count; j++) - { - for (int k = 0; k < col_count; k++) - { - if (j == row && k == col) - continue; - int v_dist = Math.abs(j - row); - int h_dist = Math.abs(k - col); - if (is_toroidal) - { - v_dist = Math.min(v_dist, Math.abs(j - row+row_offset)); - h_dist = Math.min(h_dist, Math.abs(k - col+col_offset)); - } - int distance = v_dist + h_dist; - if (distance < 2) - continue; - else - vertex_weights.put(getVertex(j,k), (float)Math.pow(distance, -clustering_exponent)); - } - } - - for (int j = 0; j < this.num_connections; j++) { - weighted_choice = new WeightedChoice(vertex_weights, random); - V target = weighted_choice.nextItem(); - graph.addEdge(edge_factory.create(), source, target); - } - } - - return graph; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java deleted file mode 100644 index a39a6404bd..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Jul 2, 2003 - * - */ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * - * Generates a mixed-mode random graph based on the output of BarabasiAlbertGenerator. - * Primarily intended for providing a heterogeneous sample graph for visualization testing, etc. - * - */ -public class MixedRandomGraphGenerator { - - /** - * Equivalent to generateMixedRandomGraph(edge_weight, num_vertices, true). - */ - public static Graph generateMixedRandomGraph( - Factory> graphFactory, - Factory vertexFactory, - Factory edgeFactory, - Map edge_weight, - int num_vertices, Set seedVertices) - { - return generateMixedRandomGraph(graphFactory, vertexFactory, edgeFactory, - edge_weight, num_vertices, true, seedVertices); - } - - /** - * Returns a random mixed-mode graph. Starts with a randomly generated - * Barabasi-Albert (preferential attachment) generator - * (4 initial vertices, 3 edges added at each step, and num_vertices - 4 evolution steps). - * Then takes the resultant graph, replaces random undirected edges with directed - * edges, and assigns random weights to each edge. - */ - public static Graph generateMixedRandomGraph( - Factory> graphFactory, - Factory vertexFactory, - Factory edgeFactory, - Map edge_weights, - int num_vertices, boolean parallel, Set seedVertices) - { - int seed = (int)(Math.random() * 10000); - BarabasiAlbertGenerator bag = - new BarabasiAlbertGenerator(graphFactory, vertexFactory, edgeFactory, - 4, 3, //false, parallel, - seed, seedVertices); - bag.evolveGraph(num_vertices - 4); - Graph ug = bag.create(); - - // create a SparseMultigraph version of g - Graph g = graphFactory.create(); - //new SparseMultigraph(); - for(V v : ug.getVertices()) { - g.addVertex(v); - } - - // randomly replace some of the edges by directed edges to - // get a mixed-mode graph, add random weights - - for(E e : ug.getEdges()) { - V v1 = ug.getEndpoints(e).getFirst(); - V v2 = ug.getEndpoints(e).getSecond(); - - E me = edgeFactory.create(); - g.addEdge(me, v1, v2, Math.random() < .5 ? EdgeType.DIRECTED : EdgeType.UNDIRECTED); - edge_weights.put(me, Math.random()); - } - - return g; - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/package.html deleted file mode 100644 index 9f85614a82..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/generators/random/package.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - -Methods for generating random graphs with various properties. These include: -
      -
    • BarabasiAlbertGenerator: scale-free graphs using the preferential attachment heuristic. -
    • EppsteinPowerLawGenerator: graphs whose degree distribution approximates a power law -
    • ErdosRenyiGenerator: graphs for which edges are created with a specified probability -
    • MixedRandomGraphGenerator: takes the output of BarabasiAlbertGenerator and -perturbs it to generate a mixed-mode analog with both directed and undirected edges. -
    • - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java deleted file mode 100644 index 6ea8bc84a7..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java +++ /dev/null @@ -1,388 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.text.DecimalFormat; -import java.text.Format; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.Graph; - -/** - * Abstract class for algorithms that rank nodes or edges by some "importance" metric. Provides a common set of - * services such as: - *
        - *
      • storing rank scores
      • - *
      • getters and setters for rank scores
      • - *
      • computing default edge weights
      • - *
      • normalizing default or user-provided edge transition weights
      • - *
      • normalizing rank scores
      • - *
      • automatic cleanup of decorations
      • - *
      • creation of Ranking list
      • - *
      • print rankings in sorted order by rank
      • - *
      - *

      - * By default, all rank scores are removed from the vertices (or edges) being ranked. - * @author Scott White - */ -public abstract class AbstractRanker extends IterativeProcess { - private Graph mGraph; - private List> mRankings; - private boolean mRemoveRankScoresOnFinalize; - private boolean mRankNodes; - private boolean mRankEdges; - private boolean mNormalizeRankings; - protected Map> vertexRankScores = - LazyMap.decorate( - new HashMap>(), - new Factory>() { - public Map create() { - return new HashMap(); - }}); - protected Map> edgeRankScores = - LazyMap.decorate( - new HashMap>(), - new Factory>() { - public Map create() { - return new HashMap(); - }}); - private Map edgeWeights = new HashMap(); - - protected void initialize(Graph graph, boolean isNodeRanker, - boolean isEdgeRanker) { - if (!isNodeRanker && !isEdgeRanker) - throw new IllegalArgumentException("Must rank edges, vertices, or both"); - mGraph = graph; - mRemoveRankScoresOnFinalize = true; - mNormalizeRankings = true; - mRankNodes = isNodeRanker; - mRankEdges = isEdgeRanker; - } - - /** - * @return all rankScores - */ - public Map> getVertexRankScores() { - return vertexRankScores; - } - - public Map> getEdgeRankScores() { - return edgeRankScores; - } - - /** - * @return the rankScores - */ - public Map getVertexRankScores(Object key) { - return vertexRankScores.get(key); - } - - public Map getEdgeRankScores(Object key) { - return edgeRankScores.get(key); - } - - protected Collection getVertices() { - return mGraph.getVertices(); - } - - protected int getVertexCount() { - return mGraph.getVertexCount(); - } - - protected Graph getGraph() { - return mGraph; - } - - @Override - public void reset() { - } - - /** - * Returns true if this ranker ranks nodes, and - * false otherwise. - */ - public boolean isRankingNodes() { - return mRankNodes; - } - - /** - * Returns true if this ranker ranks edges, and - * false otherwise. - */ - public boolean isRankingEdges() { - return mRankEdges; - } - - /** - * Instructs the ranker whether or not it should remove the rank scores from the nodes (or edges) once the ranks - * have been computed. - * @param removeRankScoresOnFinalize true if the rank scores are to be removed, false otherwise - */ - public void setRemoveRankScoresOnFinalize(boolean removeRankScoresOnFinalize) { - this.mRemoveRankScoresOnFinalize = removeRankScoresOnFinalize; - } - - protected void onFinalize(Object e) {} - - /** - * The user datum key used to store the rank score. - * @return the key - */ - abstract public Object getRankScoreKey(); - - - @Override - protected void finalizeIterations() { - List> sortedRankings = new ArrayList>(); - - int id = 1; - if (mRankNodes) { - for (V currentVertex : getVertices()) { - Ranking ranking = new Ranking(id,getVertexRankScore(currentVertex),currentVertex); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.vertexRankScores.get(getRankScoreKey()).remove(currentVertex); - } - id++; - onFinalize(currentVertex); - } - } - if (mRankEdges) { - for (E currentEdge : mGraph.getEdges()) { - - Ranking ranking = new Ranking(id,getEdgeRankScore(currentEdge),currentEdge); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.edgeRankScores.get(getRankScoreKey()).remove(currentEdge); - } - id++; - onFinalize(currentEdge); - } - } - - mRankings = sortedRankings; - Collections.sort(mRankings); - } - - /** - * Retrieves the list of ranking instances in descending sorted order by rank score - * If the algorithm is ranking edges, the instances will be of type EdgeRanking, otherwise - * if the algorithm is ranking nodes the instances will be of type NodeRanking - * @return the list of rankings - */ - public List> getRankings() { - return mRankings; - } - - /** - * Return a list of the top k rank scores. - * @param topKRankings the value of k to use - * @return list of rank scores - */ - public List getRankScores(int topKRankings) { - List scores = new ArrayList(); - int count=1; - for (Ranking currentRanking : getRankings()) { - if (count > topKRankings) { - return scores; - } - scores.add(currentRanking.rankScore); - count++; - } - - return scores; - } - - /** - * Given an edge or node, returns the corresponding rank score. This is a default - * implementation of getRankScore which assumes the decorations are of type MutableDouble. - * This method only returns legal values if setRemoveRankScoresOnFinalize(false) was called - * prior to evaluate(). - * @return the rank score value - */ - public double getVertexRankScore(V v) { - Number rankScore = vertexRankScores.get(getRankScoreKey()).get(v); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } - } - - public double getVertexRankScore(V v, Object key) { - return vertexRankScores.get(key).get(v).doubleValue(); - } - - public double getEdgeRankScore(E e) { - Number rankScore = edgeRankScores.get(getRankScoreKey()).get(e); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } - } - - public double getEdgeRankScore(E e, Object key) { - return edgeRankScores.get(key).get(e).doubleValue(); - } - - protected void setVertexRankScore(V v, double rankValue, Object key) { - vertexRankScores.get(key).put(v, rankValue); - } - - protected void setEdgeRankScore(E e, double rankValue, Object key) { - edgeRankScores.get(key).put(e, rankValue); - } - - protected void setVertexRankScore(V v, double rankValue) { - setVertexRankScore(v,rankValue, getRankScoreKey()); - } - - protected void setEdgeRankScore(E e, double rankValue) { - setEdgeRankScore(e, rankValue, getRankScoreKey()); - } - - protected void removeVertexRankScore(V v, Object key) { - vertexRankScores.get(key).remove(v); - } - - protected void removeEdgeRankScore(E e, Object key) { - edgeRankScores.get(key).remove(e); - } - - protected void removeVertexRankScore(V v) { - vertexRankScores.get(getRankScoreKey()).remove(v); - } - - protected void removeEdgeRankScore(E e) { - edgeRankScores.get(getRankScoreKey()).remove(e); - } - - protected double getEdgeWeight(E e) { - return edgeWeights.get(e).doubleValue(); - } - - protected void setEdgeWeight(E e, double weight) { - edgeWeights.put(e, weight); - } - - public void setEdgeWeights(Map edgeWeights) { - this.edgeWeights = edgeWeights; - } - - /** - * @return the edgeWeights - */ - public Map getEdgeWeights() { - return edgeWeights; - } - - protected void assignDefaultEdgeTransitionWeights() { - - for (V currentVertex : getVertices()) { - - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); - - double numOutEdges = outgoingEdges.size(); - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,1.0/numOutEdges); - } - } - } - - protected void normalizeEdgeTransitionWeights() { - - for (V currentVertex : getVertices()) { - - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); - - double totalEdgeWeight = 0; - for (E currentEdge : outgoingEdges) { - totalEdgeWeight += getEdgeWeight(currentEdge); - } - - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,getEdgeWeight(currentEdge)/totalEdgeWeight); - } - } - } - - protected void normalizeRankings() { - if (!mNormalizeRankings) { - return; - } - double totalWeight = 0; - - for (V currentVertex : getVertices()) { - totalWeight += getVertexRankScore(currentVertex); - } - - for (V currentVertex : getVertices()) { - setVertexRankScore(currentVertex,getVertexRankScore(currentVertex)/totalWeight); - } - } - - /** - * Print the rankings to standard out in descending order of rank score - * @param verbose if true, include information about the actual rank order as well as - * the original position of the vertex before it was ranked - * @param printScore if true, include the actual value of the rank score - */ - public void printRankings(boolean verbose,boolean printScore) { - double total = 0; - Format formatter = new DecimalFormat("#0.#######"); - int rank = 1; - - for (Ranking currentRanking : getRankings()) { - double rankScore = currentRanking.rankScore; - if (verbose) { - System.out.print("Rank " + rank + ": "); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.print("\tVertex Id: " + currentRanking.originalPos); - System.out.print(" (" + currentRanking.getRanked() + ")"); - System.out.println(); - } else { - System.out.print(rank + "\t"); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.println("\t" + currentRanking.originalPos); - - } - total += rankScore; - rank++; - } - - if (verbose) { - System.out.println("Total: " + formatter.format(total)); - } - } - - /** - * Allows the user to specify whether or not s/he wants the rankings to be normalized. - * In some cases, this will have no effect since the algorithm doesn't allow normalization - * as an option - * @param normalizeRankings - */ - public void setNormalizeRankings(boolean normalizeRankings) { - mNormalizeRankings = normalizeRankings; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java deleted file mode 100644 index 25906f206a..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java +++ /dev/null @@ -1,190 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Stack; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Computes betweenness centrality for each vertex and edge in the graph. The result is that each vertex - * and edge has a UserData element of type MutableDouble whose key is 'centrality.BetweennessCentrality'. - * Note: Many social network researchers like to normalize the betweenness values by dividing the values by - * (n-1)(n-2)/2. The values given here are unnormalized.

      - * - * A simple example of usage is: - *

      - * BetweennessCentrality ranker = new BetweennessCentrality(someGraph);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - * - * Running time is: O(n^2 + nm). - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." - * @author Scott White - * @author Tom Nelson converted to jung2 - */ - -public class BetweennessCentrality extends AbstractRanker { - - public static final String CENTRALITY = "centrality.BetweennessCentrality"; - - /** - * Constructor which initializes the algorithm - * @param g the graph whose nodes are to be analyzed - */ - public BetweennessCentrality(Graph g) { - initialize(g, true, true); - } - - public BetweennessCentrality(Graph g, boolean rankNodes) { - initialize(g, rankNodes, true); - } - - public BetweennessCentrality(Graph g, boolean rankNodes, boolean rankEdges) - { - initialize(g, rankNodes, rankEdges); - } - - protected void computeBetweenness(Graph graph) { - - Map decorator = new HashMap(); - Map bcVertexDecorator = - vertexRankScores.get(getRankScoreKey()); - bcVertexDecorator.clear(); - Map bcEdgeDecorator = - edgeRankScores.get(getRankScoreKey()); - bcEdgeDecorator.clear(); - - Collection vertices = graph.getVertices(); - - for (V s : vertices) { - - initializeData(graph,decorator); - - decorator.get(s).numSPs = 1; - decorator.get(s).distance = 0; - - Stack stack = new Stack(); - Buffer queue = new UnboundedFifoBuffer(); - queue.add(s); - - while (!queue.isEmpty()) { - V v = queue.remove(); - stack.push(v); - - for(V w : getGraph().getSuccessors(v)) { - - if (decorator.get(w).distance < 0) { - queue.add(w); - decorator.get(w).distance = decorator.get(v).distance + 1; - } - - if (decorator.get(w).distance == decorator.get(v).distance + 1) { - decorator.get(w).numSPs += decorator.get(v).numSPs; - decorator.get(w).predecessors.add(v); - } - } - } - - while (!stack.isEmpty()) { - V w = stack.pop(); - - for (V v : decorator.get(w).predecessors) { - - double partialDependency = (decorator.get(v).numSPs / decorator.get(w).numSPs); - partialDependency *= (1.0 + decorator.get(w).dependency); - decorator.get(v).dependency += partialDependency; - E currentEdge = getGraph().findEdge(v, w); - double edgeValue = bcEdgeDecorator.get(currentEdge).doubleValue(); - edgeValue += partialDependency; - bcEdgeDecorator.put(currentEdge, edgeValue); - } - if (w != s) { - double bcValue = bcVertexDecorator.get(w).doubleValue(); - bcValue += decorator.get(w).dependency; - bcVertexDecorator.put(w, bcValue); - } - } - } - - if(graph instanceof UndirectedGraph) { - for (V v : vertices) { - double bcValue = bcVertexDecorator.get(v).doubleValue(); - bcValue /= 2.0; - bcVertexDecorator.put(v, bcValue); - } - for (E e : graph.getEdges()) { - double bcValue = bcEdgeDecorator.get(e).doubleValue(); - bcValue /= 2.0; - bcEdgeDecorator.put(e, bcValue); - } - } - - for (V vertex : vertices) { - decorator.remove(vertex); - } - } - - private void initializeData(Graph g, Map decorator) { - for (V vertex : g.getVertices()) { - - Map bcVertexDecorator = vertexRankScores.get(getRankScoreKey()); - if(bcVertexDecorator.containsKey(vertex) == false) { - bcVertexDecorator.put(vertex, 0.0); - } - decorator.put(vertex, new BetweennessData()); - } - for (E e : g.getEdges()) { - - Map bcEdgeDecorator = edgeRankScores.get(getRankScoreKey()); - if(bcEdgeDecorator.containsKey(e) == false) { - bcEdgeDecorator.put(e, 0.0); - } - } - } - - /** - * the user datum key used to store the rank scores - * @return the key - */ - @Override - public String getRankScoreKey() { - return CENTRALITY; - } - - @Override - public void step() { - computeBetweenness(getGraph()); - } - - class BetweennessData { - double distance; - double numSPs; - List predecessors; - double dependency; - - BetweennessData() { - distance = -1; - numSPs = 0; - predecessors = new ArrayList(); - dependency = 0; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java deleted file mode 100644 index 9ee4030ce3..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java +++ /dev/null @@ -1,135 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import edu.uci.ics.jung.graph.DirectedGraph; - - -/** - * Algorithm variant of PageRankWithPriors that computes the importance of a node based upon taking fixed-length random - * walks out from the root set and then computing the stationary probability of being at each node. Specifically, it computes - * the relative probability that the markov chain will spend at any particular node, given that it start in the root - * set and ends after k steps. - *

      - * A simple example of usage is: - *

      - * KStepMarkov ranker = new KStepMarkov(someGraph,rootSet,6,null);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - *

      - * - * @author Scott White - * @author Tom Nelson - adapter to jung2 - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class KStepMarkov extends RelativeAuthorityRanker { - public final static String RANK_SCORE = "jung.algorithms.importance.KStepMarkovExperimental.RankScore"; - private final static String CURRENT_RANK = "jung.algorithms.importance.KStepMarkovExperimental.CurrentRank"; - private int mNumSteps; - HashMap mPreviousRankingsMap; - - /** - * Construct the algorihm instance and initializes the algorithm. - * @param graph the graph to be analyzed - * @param priors the set of root nodes - * @param k positive integer parameter which controls the relative tradeoff between a distribution "biased" towards - * R and the steady-state distribution which is independent of where the Markov-process started. Generally values - * between 4-8 are reasonable - * @param edgeWeights the weight for each edge - */ - public KStepMarkov(DirectedGraph graph, Set priors, int k, Map edgeWeights) { - super.initialize(graph,true,false); - mNumSteps = k; - setPriors(priors); - initializeRankings(); - if (edgeWeights == null) { - assignDefaultEdgeTransitionWeights(); - } else { - setEdgeWeights(edgeWeights); - } - normalizeEdgeTransitionWeights(); - } - - /** - * The user datum key used to store the rank scores. - * @return the key - */ - @Override - public String getRankScoreKey() { - return RANK_SCORE; - } - - protected void incrementRankScore(V v, double rankValue) { - double value = getVertexRankScore(v, RANK_SCORE); - value += rankValue; - setVertexRankScore(v, value, RANK_SCORE); - } - - protected double getCurrentRankScore(V v) { - return getVertexRankScore(v, CURRENT_RANK); - } - - protected void setCurrentRankScore(V v, double rankValue) { - setVertexRankScore(v, rankValue, CURRENT_RANK); - } - - protected void initializeRankings() { - mPreviousRankingsMap = new HashMap(); - for (V v : getVertices()) { - Set priors = getPriors(); - double numPriors = priors.size(); - - if (getPriors().contains(v)) { - setVertexRankScore(v, 1.0/ numPriors); - setCurrentRankScore(v, 1.0/ numPriors); - mPreviousRankingsMap.put(v,1.0/numPriors); - } else { - setVertexRankScore(v, 0); - setCurrentRankScore(v, 0); - mPreviousRankingsMap.put(v, 0); - } - } - } - @Override - public void step() { - - for (int i=0;i incomingEdges = getGraph().getInEdges(v); - - double currentPageRankSum = 0; - for (E e : incomingEdges) { - double currentWeight = getEdgeWeight(e); - currentPageRankSum += - mPreviousRankingsMap.get(getGraph().getOpposite(v,e)).doubleValue()*currentWeight; - } - setCurrentRankScore(v,currentPageRankSum); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java deleted file mode 100644 index b96e559341..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java +++ /dev/null @@ -1,77 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - - -/** - * Abstract data container for ranking objects. Stores common data relevant to both node and edge rankings, namely, - * the original position of the instance in the list and the actual ranking score. - * @author Scott White - */ -public class Ranking implements Comparable { - /** - * The original (0-indexed) position of the instance being ranked - */ - public int originalPos; - /** - * The actual rank score (normally between 0 and 1) - */ - public double rankScore; - - /** - * what is being ranked - */ - private V ranked; - - /** - * Constructor which allows values to be set on construction - * @param originalPos The original (0-indexed) position of the instance being ranked - * @param rankScore The actual rank score (normally between 0 and 1) - */ - public Ranking(int originalPos, double rankScore, V ranked) { - this.originalPos = originalPos; - this.rankScore = rankScore; - this.ranked = ranked; - } - - /** - * Compares two ranking based on the rank score. - * @param o The other ranking - * @return -1 if the other ranking is higher, 0 if they are equal, and 1 if this ranking is higher - */ - public int compareTo(Object o) { - - Ranking otherRanking = (Ranking) o; - return Double.compare(otherRanking.rankScore,rankScore); - } - - /** - * Returns the rank score as a string. - * @return the stringified rank score - */ - @Override - public String toString() { - return String.valueOf(rankScore); - } - - /** - * @return the ranked - */ - public V getRanked() { - return ranked; - } - - /** - * @param ranked the ranked to set - */ - public void setRanked(V ranked) { - this.ranked = ranked; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java deleted file mode 100644 index b40ba8d4a6..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - - -/** - * This class provides basic infrastructure for relative authority algorithms that compute the importance of nodes - * relative to one or more root nodes. The services provided are: - *

        - *
      • The set of root nodes (priors) is stored and maintained
      • - *
      • Getters and setters for the prior rank score are provided
      • - *
      - * - * @author Scott White - */ -public abstract class RelativeAuthorityRanker extends AbstractRanker { - private Set mPriors; - /** - * The default key used for the user datum key corresponding to prior rank scores. - */ - - protected Map priorRankScoreMap = new HashMap(); - /** - * Cleans up all of the prior rank scores on finalize. - */ - @Override - protected void finalizeIterations() { - super.finalizeIterations(); - priorRankScoreMap.clear(); - } - - /** - * Retrieves the value of the prior rank score. - * @param v the root node (prior) - * @return the prior rank score - */ - protected double getPriorRankScore(V v) { - return priorRankScoreMap.get(v).doubleValue(); - - } - - /** - * Allows the user to specify a value to set for the prior rank score - * @param v the root node (prior) - * @param value the score to set to - */ - public void setPriorRankScore(V v, double value) { - this.priorRankScoreMap.put(v, value); - } - - /** - * Retrieves the set of priors. - * @return the set of root nodes (priors) - */ - protected Set getPriors() { return mPriors; } - - /** - * Specifies which vertices are root nodes (priors). - * @param priors the root nodes - */ - protected void setPriors(Set priors) { mPriors = priors; } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java deleted file mode 100644 index bd715ce7ed..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java +++ /dev/null @@ -1,194 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.DirectedGraph; - - - -/** - * This algorithm measures the importance of nodes based upon both the number and length of disjoint paths that lead - * to a given node from each of the nodes in the root set. Specifically the formula for measuring the importance of a - * node is given by: I(t|R) = sum_i=1_|P(r,t)|_{alpha^|p_i|} where alpha is the path decay coefficient, p_i is path i - * and P(r,t) is a set of maximum-sized node-disjoint paths from r to t. - *

      - * This algorithm uses heuristic breadth-first search to try and find the maximum-sized set of node-disjoint paths - * between two nodes. As such, it is not guaranteed to give exact answers. - *

      - * A simple example of usage is: - *

      - * WeightedNIPaths ranker = new WeightedNIPaths(someGraph,2.0,6,rootSet);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - * - * @author Scott White - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class WeightedNIPaths extends AbstractRanker { - public final static String WEIGHTED_NIPATHS_KEY = "jung.algorithms.importance.WEIGHTED_NIPATHS_KEY"; - private double mAlpha; - private int mMaxDepth; - private Set mPriors; - private Map pathIndices = new HashMap(); - private Map roots = new HashMap(); - private Map> pathsSeenMap = new HashMap>(); - private Factory vertexFactory; - private Factory edgeFactory; - - /** - * Constructs and initializes the algorithm. - * @param graph the graph whose nodes are being measured for their importance - * @param alpha the path decay coefficient (>= 1); 2 is recommended - * @param maxDepth the maximal depth to search out from the root set - * @param priors the root set (starting vertices) - */ - public WeightedNIPaths(DirectedGraph graph, Factory vertexFactory, - Factory edgeFactory, double alpha, int maxDepth, Set priors) { - super.initialize(graph, true,false); - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mAlpha = alpha; - mMaxDepth = maxDepth; - mPriors = priors; - for (V v : graph.getVertices()) { - super.setVertexRankScore(v, 0.0); - } - } - - protected void incrementRankScore(V v, double rankValue) { - setVertexRankScore(v, getVertexRankScore(v) + rankValue); - } - - protected void computeWeightedPathsFromSource(V root, int depth) { - - int pathIdx = 1; - - for (E e : getGraph().getOutEdges(root)) { - this.pathIndices.put(e, pathIdx); - this.roots.put(e, root); - newVertexEncountered(pathIdx, getGraph().getEndpoints(e).getSecond(), root); - pathIdx++; - } - - List edges = new ArrayList(); - - V virtualNode = vertexFactory.create(); - getGraph().addVertex(virtualNode); - E virtualSinkEdge = edgeFactory.create(); - - getGraph().addEdge(virtualSinkEdge, virtualNode, root); - edges.add(virtualSinkEdge); - - int currentDepth = 0; - while (currentDepth <= depth) { - - double currentWeight = Math.pow(mAlpha, -1.0 * currentDepth); - for (E currentEdge : edges) { - incrementRankScore(getGraph().getEndpoints(currentEdge).getSecond(),// - currentWeight); - } - - if ((currentDepth == depth) || (edges.size() == 0)) break; - - List newEdges = new ArrayList(); - - for (E currentSourceEdge : edges) { //Iterator sourceEdgeIt = edges.iterator(); sourceEdgeIt.hasNext();) { - Number sourcePathIndex = this.pathIndices.get(currentSourceEdge); - - // from the currentSourceEdge, get its opposite end - // then iterate over the out edges of that opposite end - V newDestVertex = getGraph().getEndpoints(currentSourceEdge).getSecond(); - Collection outs = getGraph().getOutEdges(newDestVertex); - for (E currentDestEdge : outs) { - V destEdgeRoot = this.roots.get(currentDestEdge); - V destEdgeDest = getGraph().getEndpoints(currentDestEdge).getSecond(); - - if (currentSourceEdge == virtualSinkEdge) { - newEdges.add(currentDestEdge); - continue; - } - if (destEdgeRoot == root) { - continue; - } - if (destEdgeDest == getGraph().getEndpoints(currentSourceEdge).getFirst()) {//currentSourceEdge.getSource()) { - continue; - } - Set pathsSeen = this.pathsSeenMap.get(destEdgeDest); - - if (pathsSeen == null) { - newVertexEncountered(sourcePathIndex.intValue(), destEdgeDest, root); - } else if (roots.get(destEdgeDest) != root) { - roots.put(destEdgeDest,root); - pathsSeen.clear(); - pathsSeen.add(sourcePathIndex); - } else if (!pathsSeen.contains(sourcePathIndex)) { - pathsSeen.add(sourcePathIndex); - } else { - continue; - } - - this.pathIndices.put(currentDestEdge, sourcePathIndex); - this.roots.put(currentDestEdge, root); - newEdges.add(currentDestEdge); - } - } - - edges = newEdges; - currentDepth++; - } - - getGraph().removeVertex(virtualNode); - } - - private void newVertexEncountered(int sourcePathIndex, V dest, V root) { - Set pathsSeen = new HashSet(); - pathsSeen.add(sourcePathIndex); - this.pathsSeenMap.put(dest, pathsSeen); - roots.put(dest, root); - } - - @Override - public void step() { - for (V v : mPriors) { - computeWeightedPathsFromSource(v, mMaxDepth); - } - - normalizeRankings(); -// return 0; - } - - /** - * Given a node, returns the corresponding rank score. This implementation of getRankScore assumes - * the decoration representing the rank score is of type MutableDouble. - * @return the rank score for this node - */ - @Override - public String getRankScoreKey() { - return WEIGHTED_NIPATHS_KEY; - } - - @Override - protected void onFinalize(Object udc) { - pathIndices.remove(udc); - roots.remove(udc); - pathsSeenMap.remove(udc); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java deleted file mode 100644 index b59dcfaa4c..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 7, 2003 - * - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ChainedTransformer; -import org.apache.commons.collections15.functors.CloneTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Abstract class for implementations of {@code Layout}. It handles some of the - * basic functions: storing coordinates, maintaining the dimensions, initializing - * the locations, maintaining locked vertices. - * - * @author Danyel Fisher, Scott White - * @author Tom Nelson - converted to jung2 - * @param the vertex type - * @param the edge type - */ -abstract public class AbstractLayout implements Layout { - - /** - * a set of vertices that should not move in relation to the - * other vertices - */ - private Set dontmove = new HashSet(); - - protected Dimension size; - protected Graph graph; - protected boolean initialized; - - protected Map locations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public Point2D transform(V arg0) { - return new Point2D.Double(); - }}); - - - /** - * Creates an instance which does not initialize the vertex locations. - * - * @param graph the graph for which the layout algorithm is to be created. - */ - protected AbstractLayout(Graph graph) { - if (graph == null) - { - throw new IllegalArgumentException("Graph must be non-null"); - } - this.graph = graph; - } - - @SuppressWarnings("unchecked") - protected AbstractLayout(Graph graph, Transformer initializer) { - this.graph = graph; - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - initialized = true; - } - - protected AbstractLayout(Graph graph, Dimension size) { - this.graph = graph; - this.size = size; - } - - @SuppressWarnings("unchecked") - protected AbstractLayout(Graph graph, Transformer initializer, Dimension size) { - this.graph = graph; - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - this.size = size; - } - - public void setGraph(Graph graph) { - this.graph = graph; - if(size != null && graph != null) { - initialize(); - } - } - - /** - * When a visualization is resized, it presumably wants to fix the - * locations of the vertices and possibly to reinitialize its data. The - * current method calls initializeLocations followed by initialize_local. - */ - public void setSize(Dimension size) { - - if(size != null && graph != null) { - - Dimension oldSize = this.size; - this.size = size; - initialize(); - - if(oldSize != null) { - adjustLocations(oldSize, size); - } - } - } - - private void adjustLocations(Dimension oldSize, Dimension size) { - - int xOffset = (size.width - oldSize.width) / 2; - int yOffset = (size.height - oldSize.height) / 2; - - // now, move each vertex to be at the new screen center - while(true) { - try { - for(V v : getGraph().getVertices()) { - offsetVertex(v, xOffset, yOffset); - } - break; - } catch(ConcurrentModificationException cme) { - } - } - } - - public boolean isLocked(V v) { - return dontmove.contains(v); - } - - @SuppressWarnings("unchecked") - public void setInitializer(Transformer initializer) { - if(this.equals(initializer)) { - throw new IllegalArgumentException("Layout cannot be initialized with itself"); - } - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - initialized = true; - } - - /** - * Returns the current size of the visualization space, accoring to the - * last call to resize(). - * - * @return the current size of the screen - */ - public Dimension getSize() { - return size; - } - - /** - * Returns the Coordinates object that stores the vertex' x and y location. - * - * @param v - * A Vertex that is a part of the Graph being visualized. - * @return A Coordinates object with x and y locations. - */ - private Point2D getCoordinates(V v) { - return locations.get(v); - } - - public Point2D transform(V v) { - return getCoordinates(v); - } - - /** - * Returns the x coordinate of the vertex from the Coordinates object. - * in most cases you will be better off calling transform(v). - */ - public double getX(V v) { - assert getCoordinates(v) != null : "Cannot getX for an unmapped vertex "+v; - return getCoordinates(v).getX(); - } - - /** - * Returns the y coordinate of the vertex from the Coordinates object. - * In most cases you will be better off calling transform(v). - */ - public double getY(V v) { - assert getCoordinates(v) != null : "Cannot getY for an unmapped vertex "+v; - return getCoordinates(v).getY(); - } - - /** - * @param v - * @param xOffset - * @param yOffset - */ - protected void offsetVertex(V v, double xOffset, double yOffset) { - Point2D c = getCoordinates(v); - c.setLocation(c.getX()+xOffset, c.getY()+yOffset); - setLocation(v, c); - } - - /** - * Accessor for the graph that represets all vertices. - * - * @return the graph that contains all vertices. - */ - public Graph getGraph() { - return graph; - } - - /** - * Forcibly moves a vertex to the (x,y) location by setting its x and y - * locations to the inputted location. Does not add the vertex to the - * "dontmove" list, and (in the default implementation) does not make any - * adjustments to the rest of the graph. - */ - public void setLocation(V picked, double x, double y) { - Point2D coord = getCoordinates(picked); - coord.setLocation(x, y); - } - - public void setLocation(V picked, Point2D p) { - Point2D coord = getCoordinates(picked); - coord.setLocation(p); - } - - /** - * Locks {@code v} in place if {@code state} is {@code true}, otherwise unlocks it. - */ - public void lock(V v, boolean state) { - if(state == true) - dontmove.add(v); - else - dontmove.remove(v); - } - - /** - * Locks all vertices in place if {@code lock} is {@code true}, otherwise unlocks all vertices. - */ - public void lock(boolean lock) { - for(V v : graph.getVertices()) { - lock(v, lock); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java deleted file mode 100644 index 38058377a1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * A {@code Layout} implementation that combines - * multiple other layouts so that they may be manipulated - * as one layout. The relaxer thread will step each layout - * in sequence. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param the vertex type - * @param the edge type - */ -public class AggregateLayout implements Layout, IterativeContext { - - protected Layout delegate; - protected Map,Point2D> layouts = new HashMap,Point2D>(); - - /** - * Creates an instance backed by the specified {@code delegate}. - * @param delegate - */ - public AggregateLayout(Layout delegate) { - this.delegate = delegate; - } - - /** - * @return the delegate - */ - public Layout getDelegate() { - return delegate; - } - - /** - * @param delegate the delegate to set - */ - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } - - /** - * adds the passed layout as a sublayout, also specifying - * the center of where this sublayout should appear - * @param layout - * @param center - */ - public void put(Layout layout, Point2D center) { - layouts.put(layout,center); - } - - /** - * Returns the center of the passed layout. - * @param layout - * @return the center of the passed layout - */ - public Point2D get(Layout layout) { - return layouts.get(layout); - } - - /** - * Removes {@code layout} from this instance. - */ - public void remove(Layout layout) { - layouts.remove(layout); - } - - /** - * Removes all layouts from this instance. - */ - public void removeAll() { - layouts.clear(); - } - - /** - * Returns the graph for which this layout is defined. - * @return the graph for which this layout is defined - * @see edu.uci.ics.jung.algorithms.layout.Layout#getGraph() - */ - public Graph getGraph() { - return delegate.getGraph(); - } - - /** - * Returns the size of the underlying layout. - * @return the size of the underlying layout - * @see edu.uci.ics.jung.algorithms.layout.Layout#getSize() - */ - public Dimension getSize() { - return delegate.getSize(); - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#initialize() - */ - public void initialize() { - delegate.initialize(); - for(Layout layout : layouts.keySet()) { - layout.initialize(); - } - } - - /** - * Override to test if the passed vertex is locked in - * any of the layouts. - * @param v - * @return true if v is locked in any of the layouts, and false otherwise - * @see edu.uci.ics.jung.algorithms.layout.Layout#isLocked(java.lang.Object) - */ - public boolean isLocked(V v) { - boolean locked = false; - for(Layout layout : layouts.keySet()) { - locked |= layout.isLocked(v); - } - locked |= delegate.isLocked(v); - return locked; - } - - /** - * override to lock or unlock this vertex in any layout with - * a subgraph containing it - * @param v - * @param state - * @see edu.uci.ics.jung.algorithms.layout.Layout#lock(java.lang.Object, boolean) - */ - public void lock(V v, boolean state) { - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - layout.lock(v, state); - } - } - delegate.lock(v, state); - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#reset() - */ - public void reset() { - for(Layout layout : layouts.keySet()) { - layout.reset(); - } - delegate.reset(); - } - - /** - * @param graph - * @see edu.uci.ics.jung.algorithms.layout.Layout#setGraph(edu.uci.ics.jung.graph.Graph) - */ - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } - - /** - * @param initializer - * @see edu.uci.ics.jung.algorithms.layout.Layout#setInitializer(org.apache.commons.collections15.Transformer) - */ - public void setInitializer(Transformer initializer) { - delegate.setInitializer(initializer); - } - - /** - * @param v - * @param location - * @see edu.uci.ics.jung.algorithms.layout.Layout#setLocation(java.lang.Object, java.awt.geom.Point2D) - */ - public void setLocation(V v, Point2D location) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - - AffineTransform at = - AffineTransform.getTranslateInstance(-center.getX()+d.width/2,-center.getY()+d.height/2); - Point2D localLocation = at.transform(location, null); - layout.setLocation(v, localLocation); - wasInSublayout = true; - } - } - if(wasInSublayout == false && getGraph().getVertices().contains(v)) { - delegate.setLocation(v, location); - } - } - - /** - * @param d - * @see edu.uci.ics.jung.algorithms.layout.Layout#setSize(java.awt.Dimension) - */ - public void setSize(Dimension d) { - delegate.setSize(d); - } - - /** - * Returns a map from each {@code Layout} instance to its center point. - */ - public Map,Point2D> getLayouts() { - return layouts; - } - - /** - * Returns the location of the vertex. The location is specified first - * by the sublayouts, and then by the base layout if no sublayouts operate - * on this vertex. - * @return the location of the vertex - * @see org.apache.commons.collections15.Transformer#transform(java.lang.Object) - */ - public Point2D transform(V v) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - wasInSublayout = true; - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - AffineTransform at = - AffineTransform.getTranslateInstance(center.getX()-d.width/2, - center.getY()-d.height/2); - return at.transform(layout.transform(v),null); - } - } - if(wasInSublayout == false) { - return delegate.transform(v); - } - return null; - - } - - /** - * Check all sublayouts.keySet() and the delegate layout, returning - * done == true iff all are done. - */ - public boolean done() { - boolean done = true; - for(Layout layout : layouts.keySet()) { - if(layout instanceof IterativeContext) { - done &= ((IterativeContext)layout).done(); - } - } - if(delegate instanceof IterativeContext) { - done &= ((IterativeContext)delegate).done(); - } - return done; - } - - /** - * call step on any sublayout that is also an IterativeContext - * and is not done - */ - public void step() { - for(Layout layout : layouts.keySet()) { - if(layout instanceof IterativeContext) { - IterativeContext context = (IterativeContext)layout; - if(context.done() == false) { - context.step(); - } - } - } - if(delegate instanceof IterativeContext) { - IterativeContext context = (IterativeContext)delegate; - if(context.done() == false) { - context.step(); - } - } - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java deleted file mode 100644 index 1d9f384ae7..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * A {@code Layout} implementation that assigns positions to {@code Tree} or - * {@code Forest} vertices using associations with nested circles ("balloons"). - * A balloon is nested inside another balloon if the first balloon's subtree - * is a subtree of the second balloon's subtree. - * - * @author Tom Nelson - * - */ -public class BalloonLayout extends TreeLayout { - - protected Map polarLocations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public PolarPoint transform(V arg0) { - return new PolarPoint(); - }}); - - protected Map radii = new HashMap(); - - /** - * Creates an instance based on the input forest. - */ - public BalloonLayout(Forest g) - { - super(g); - } - - protected void setRootPolars() - { - List roots = TreeUtils.getRoots(graph); - if(roots.size() == 1) { - // its a Tree - V root = roots.get(0); - setRootPolar(root); - setPolars(new ArrayList(graph.getChildren(root)), - getCenter(), getSize().width/2); - } else if (roots.size() > 1) { - // its a Forest - setPolars(roots, getCenter(), getSize().width/2); - } - } - - protected void setRootPolar(V root) { - PolarPoint pp = new PolarPoint(0,0); - Point2D p = getCenter(); - polarLocations.put(root, pp); - locations.put(root, p); - } - - - protected void setPolars(List kids, Point2D parentLocation, double parentRadius) { - - int childCount = kids.size(); - if(childCount == 0) return; - // handle the 1-child case with 0 limit on angle. - double angle = Math.max(0, Math.PI / 2 * (1 - 2.0/childCount)); - double childRadius = parentRadius*Math.cos(angle) / (1 + Math.cos(angle)); - double radius = parentRadius - childRadius; - - double rand = Math.random(); - - for(int i=0; i< childCount; i++) { - V child = kids.get(i); - double theta = i* 2*Math.PI/childCount + rand; - radii.put(child, childRadius); - - PolarPoint pp = new PolarPoint(theta, radius); - polarLocations.put(child, pp); - - Point2D p = PolarPoint.polarToCartesian(pp); - p.setLocation(p.getX()+parentLocation.getX(), p.getY()+parentLocation.getY()); - locations.put(child, p); - setPolars(new ArrayList(graph.getChildren(child)), p, childRadius); - } - } - - @Override - public void setSize(Dimension size) { - this.size = size; - setRootPolars(); - } - - /** - * Returns the coordinates of {@code v}'s parent, or the - * center of this layout's area if it's a root. - */ - public Point2D getCenter(V v) { - V parent = graph.getParent(v); - if(parent == null) { - return getCenter(); - } - return locations.get(parent); - } - - @Override - public void setLocation(V v, Point2D location) { - Point2D c = getCenter(v); - Point2D pv = new Point2D.Double(location.getX()-c.getX(),location.getY()-c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - polarLocations.get(v).setLocation(newLocation); - - Point2D center = getCenter(v); - pv.setLocation(pv.getX()+center.getX(), pv.getY()+center.getY()); - locations.put(v, pv); - } - - @Override - public Point2D transform(V v) { - return locations.get(v); - } - - /** - * @return the radii - */ - public Map getRadii() { - return radii; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java deleted file mode 100644 index 8cafb77827..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Dec 4, 2003 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * A {@code Layout} implementation that positions vertices equally spaced on a regular circle. - * - * @author Masanori Harada - */ -public class CircleLayout extends AbstractLayout { - - private double radius; - private List vertex_ordered_list; - - Map circleVertexDataMap = - LazyMap.decorate(new HashMap(), - new Factory() { - public CircleVertexData create() { - return new CircleVertexData(); - }}); - - /** - * Creates an instance for the specified graph. - */ - public CircleLayout(Graph g) { - super(g); - } - - /** - * Returns the radius of the circle. - */ - public double getRadius() { - return radius; - } - - /** - * Sets the radius of the circle. Must be called before - * {@code initialize()} is called. - */ - public void setRadius(double radius) { - this.radius = radius; - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * specified by {@code comparator}. - */ - public void setVertexOrder(Comparator comparator) - { - if (vertex_ordered_list == null) - vertex_ordered_list = new ArrayList(getGraph().getVertices()); - Collections.sort(vertex_ordered_list, comparator); - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * of {@code vertex_list}. - */ - public void setVertexOrder(List vertex_list) - { - if (!vertex_list.containsAll(getGraph().getVertices())) - throw new IllegalArgumentException("Supplied list must include " + - "all vertices of the graph"); - this.vertex_ordered_list = vertex_list; - } - - public void reset() { - initialize(); - } - - public void initialize() - { - Dimension d = getSize(); - - if (d != null) - { - if (vertex_ordered_list == null) - setVertexOrder(new ArrayList(getGraph().getVertices())); - - double height = d.getHeight(); - double width = d.getWidth(); - - if (radius <= 0) { - radius = 0.45 * (height < width ? height : width); - } - - int i = 0; - for (V v : vertex_ordered_list) - { - Point2D coord = transform(v); - - double angle = (2 * Math.PI * i) / vertex_ordered_list.size(); - - coord.setLocation(Math.cos(angle) * radius + width / 2, - Math.sin(angle) * radius + height / 2); - - CircleVertexData data = getCircleData(v); - data.setAngle(angle); - i++; - } - } - } - - protected CircleVertexData getCircleData(V v) { - return circleVertexDataMap.get(v); - } - - protected static class CircleVertexData { - private double angle; - - protected double getAngle() { - return angle; - } - - protected void setAngle(double angle) { - this.angle = angle; - } - - @Override - public String toString() { - return "CircleVertexData: angle=" + angle; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java deleted file mode 100644 index 97d3ee6b55..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Dec 4, 2003 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of {@code Layout} suitable for tree-like directed - * acyclic graphs. Parts of it will probably not terminate if the graph is - * cyclic! The layout will result in directed edges pointing generally upwards. - * Any vertices with no successors are considered to be level 0, and tend - * towards the top of the layout. Any vertex has a level one greater than the - * maximum level of all its successors. - * - * - * @author John Yesberg - */ -public class DAGLayout extends SpringLayout { - - /** - * Each vertex has a minimumLevel. Any vertex with no successors has - * minimumLevel of zero. The minimumLevel of any vertex must be strictly - * greater than the minimumLevel of its parents. (Vertex A is a parent of - * Vertex B iff there is an edge from B to A.) Typically, a vertex will - * have a minimumLevel which is one greater than the minimumLevel of its - * parent's. However, if the vertex has two parents, its minimumLevel will - * be one greater than the maximum of the parents'. We need to calculate - * the minimumLevel for each vertex. When we layout the graph, vertices - * cannot be drawn any higher than the minimumLevel. The graphHeight of a - * graph is the greatest minimumLevel that is used. We will modify the - * SpringLayout calculations so that nodes cannot move above their assigned - * minimumLevel. - */ - private Map minLevels = new HashMap(); - // Simpler than the "pair" technique. - static int graphHeight; - static int numRoots; - final double SPACEFACTOR = 1.3; - // How much space do we allow for additional floating at the bottom. - final double LEVELATTRACTIONRATE = 0.8; - - /** - * A bunch of parameters to help work out when to stop quivering. - * - * If the MeanSquareVel(ocity) ever gets below the MSV_THRESHOLD, then we - * will start a final cool-down phase of COOL_DOWN_INCREMENT increments. If - * the MeanSquareVel ever exceeds the threshold, we will exit the cool down - * phase, and continue looking for another opportunity. - */ - final double MSV_THRESHOLD = 10.0; - double meanSquareVel; - boolean stoppingIncrements = false; - int incrementsLeft; - final int COOL_DOWN_INCREMENTS = 200; - - /** - * Creates an instance for the specified graph. - */ - public DAGLayout(Graph g) { - super(g); - } - - /** - * setRoot calculates the level of each vertex in the graph. Level 0 is - * allocated to any vertex with no successors. Level n+1 is allocated to - * any vertex whose successors' maximum level is n. - */ - public void setRoot(Graph g) { - numRoots = 0; - for(V v : g.getVertices()) { - Collection successors = getGraph().getSuccessors(v); - if (successors.size() == 0) { - setRoot(v); - numRoots++; - } - } - } - - /** - * Set vertex v to be level 0. - */ - public void setRoot(V v) { - minLevels.put(v, new Integer(0)); - // set all the levels. - propagateMinimumLevel(v); - } - - /** - * A recursive method for allocating the level for each vertex. Ensures - * that all predecessors of v have a level which is at least one greater - * than the level of v. - * - * @param v - */ - public void propagateMinimumLevel(V v) { - int level = minLevels.get(v).intValue(); - for(V child : getGraph().getPredecessors(v)) { - int oldLevel, newLevel; - Number o = minLevels.get(child); - if (o != null) - oldLevel = o.intValue(); - else - oldLevel = 0; - newLevel = Math.max(oldLevel, level + 1); - minLevels.put(child, new Integer(newLevel)); - - if (newLevel > graphHeight) - graphHeight = newLevel; - propagateMinimumLevel(child); - } - } - - /** - * Sets random locations for a vertex within the dimensions of the space. - * This overrides the method in AbstractLayout - * - * @param coord - * @param d - */ - private void initializeLocation( - V v, - Point2D coord, - Dimension d) { - - int level = minLevels.get(v).intValue(); - int minY = (int) (level * d.getHeight() / (graphHeight * SPACEFACTOR)); - double x = Math.random() * d.getWidth(); - double y = Math.random() * (d.getHeight() - minY) + minY; - coord.setLocation(x,y); - } - - @Override - public void setSize(Dimension size) { - super.setSize(size); - for(V v : getGraph().getVertices()) { - initializeLocation(v,transform(v),getSize()); - } - } - - /** - * Had to override this one as well, to ensure that setRoot() is called. - */ - @Override - public void initialize() { - super.initialize(); - setRoot(getGraph()); - } - - /** - * Override the moveNodes() method from SpringLayout. The only change we - * need to make is to make sure that nodes don't float higher than the minY - * coordinate, as calculated by their minimumLevel. - */ - @Override - protected void moveNodes() { - // Dimension d = currentSize; - double oldMSV = meanSquareVel; - meanSquareVel = 0; - - synchronized (getSize()) { - - for(V v : getGraph().getVertices()) { - if (isLocked(v)) - continue; - SpringLayout.SpringVertexData vd = springVertexData.get(v); - Point2D xyd = transform(v); - - int width = getSize().width; - int height = getSize().height; - - // (JY addition: three lines are new) - int level = - minLevels.get(v).intValue(); - int minY = (int) (level * height / (graphHeight * SPACEFACTOR)); - int maxY = - level == 0 - ? (int) (height / (graphHeight * SPACEFACTOR * 2)) - : height; - - // JY added 2* - double the sideways repulsion. - vd.dx += 2 * vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // JY Addition: Attract the vertex towards it's minimumLevel - // height. - double delta = xyd.getY() - minY; - vd.dy -= delta * LEVELATTRACTIONRATE; - if (level == 0) - vd.dy -= delta * LEVELATTRACTIONRATE; - // twice as much at the top. - - // JY addition: - meanSquareVel += (vd.dx * vd.dx + vd.dy * vd.dy); - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)) , xyd.getY()+Math.max(-5, Math.min(5, vd.dy)) ); - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - - // (JY addition: These two lines replaced 0 with minY) - if (xyd.getY() < minY) { - xyd.setLocation(xyd.getX(), minY); - // (JY addition: replace height with maxY) - } else if (xyd.getY() > maxY) { - xyd.setLocation(xyd.getX(), maxY); - } - - // (JY addition: if there's only one root, anchor it in the - // middle-top of the screen) - if (numRoots == 1 && level == 0) { - xyd.setLocation(width/2, xyd.getY()); - } - } - } - //System.out.println("MeanSquareAccel="+meanSquareVel); - if (!stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) < MSV_THRESHOLD) { - stoppingIncrements = true; - incrementsLeft = COOL_DOWN_INCREMENTS; - } else if ( - stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) <= MSV_THRESHOLD) { - incrementsLeft--; - if (incrementsLeft <= 0) - incrementsLeft = 0; - } - } - - /** - * Override incrementsAreDone so that we can eventually stop. - */ - @Override - public boolean done() { - if (stoppingIncrements && incrementsLeft == 0) - return true; - else - return false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - */ - @Override - public void setLocation(V picked, double x, double y) { - Point2D coord = transform(picked); - coord.setLocation(x,y); - stoppingIncrements = false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - */ - @Override - public void setLocation(V picked, Point2D p) { - Point2D coord = transform(picked); - coord.setLocation(p); - stoppingIncrements = false; - } - - /** - * Overridden relaxEdges. This one reduces the effect of edges between - * greatly different levels. - * - */ - @Override - protected void relaxEdges() { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - // JY addition. - int level1 = - minLevels.get(v1).intValue(); - int level2 = - minLevels.get(v2).intValue(); - - // desiredLen *= Math.pow( 1.1, (v1.degree() + v2.degree()) ); -// double desiredLen = getLength(e); - double desiredLen = lengthFunction.transform(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - // force factor: optimal length minus actual length, - // is made smaller as the current actual length gets larger. - // why? - - // System.out.println("Desired : " + getLength( e )); - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch / 100.0, - (getGraph().degree(v1) + getGraph().degree(v2) -2)); - - // JY addition. If this is an edge which stretches a long way, - // don't be so concerned about it. - if (level1 != level2) - f = f / Math.pow(Math.abs(level2 - level1), 1.5); - - // f= Math.min( 0, f ); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.get(v1); - v2D = springVertexData.get(v2); - -// SpringEdgeData sed = getSpringEdgeData(e); -// sed.f = f; - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java deleted file mode 100644 index c8a2a24a8a..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -/** - * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * - *

      Behavior is determined by the following settable parameters: - *

        - *
      • attraction multiplier: how much edges try to keep their vertices together - *
      • repulsion multiplier: how much vertices try to push each other apart - *
      • maximum iterations: how many iterations this algorithm will use before stopping - *
      - * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. - * - * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" - * @author Scott White, Yan-Biao Boey, Danyel Fisher - */ -public class FRLayout extends AbstractLayout implements IterativeContext { - - private double forceConstant; - - private double temperature; - - private int currentIteration; - - private int mMaxIterations = 700; - - private Map frVertexData = - LazyMap.decorate(new HashMap(), new Factory() { - public FRVertexData create() { - return new FRVertexData(); - }}); - - private double attraction_multiplier = 0.75; - - private double attraction_constant; - - private double repulsion_multiplier = 0.75; - - private double repulsion_constant; - - private double max_dimension; - - /** - * Creates an instance for the specified graph. - */ - public FRLayout(Graph g) { - super(g); - } - - /** - * Creates an instance of size {@code d} for the specified graph. - */ - public FRLayout(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - initialize(); - max_dimension = Math.max(d.height, d.width); - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) { - setInitializer(new RandomLocationTransformer(size)); - } - super.setSize(size); - max_dimension = Math.max(size.height, size.width); - } - - /** - * Sets the attraction multiplier. - */ - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; - } - - /** - * Sets the repulsion multiplier. - */ - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } - - public void reset() { - doInit(); - } - - public void initialize() { - doInit(); - } - - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } - - private double EPSILON = 0.000001D; - - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; - - /** - * Calculate repulsion - */ - while(true) { - - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - - calcAttraction(e); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} - } - cool(); - } - - protected synchronized void calcPositions(V v) { - FRVertexData fvd = getFRData(v); - if(fvd == null) return; - Point2D xyd = transform(v); - double deltaLength = Math.max(EPSILON, fvd.norm()); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - if (Double.isNaN(newXDisp)) { - throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); } - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - xyd.setLocation(xyd.getX()+newXDisp, xyd.getY()+newYDisp); - - double borderWidth = getSize().getWidth() / 50.0; - double newXPos = xyd.getX(); - if (newXPos < borderWidth) { - newXPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newXPos > (getSize().getWidth() - borderWidth)) { - newXPos = getSize().getWidth() - borderWidth - Math.random() - * borderWidth * 2.0; - } - - double newYPos = xyd.getY(); - if (newYPos < borderWidth) { - newYPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newYPos > (getSize().getHeight() - borderWidth)) { - newYPos = getSize().getHeight() - borderWidth - - Math.random() * borderWidth * 2.0; - } - - xyd.setLocation(newXPos, newYPos); - } - - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; - } - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) - + (yDelta * yDelta))); - - double force = (deltaLength * deltaLength) / attraction_constant; - - if (Double.isNaN(force)) { throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [force]"); } - - double dx = (xDelta / deltaLength) * force; - double dy = (yDelta / deltaLength) * force; - if(v1_locked == false) { - FRVertexData fvd1 = getFRData(v1); - fvd1.offset(-dx, -dy); - } - if(v2_locked == false) { - FRVertexData fvd2 = getFRData(v2); - fvd2.offset(dx, dy); - } - } - - protected void calcRepulsion(V v1) { - FRVertexData fvd1 = getFRData(v1); - if(fvd1 == null) - return; - fvd1.setLocation(0, 0); - - try { - for(V v2 : getGraph().getVertices()) { - -// if (isLocked(v2)) continue; - if (v1 != v2) { - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, Math - .sqrt((xDelta * xDelta) + (yDelta * yDelta))); - - double force = (repulsion_constant * repulsion_constant) / deltaLength; - - if (Double.isNaN(force)) { throw new RuntimeException( - "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); } - - fvd1.offset((xDelta / deltaLength) * force, - (yDelta / deltaLength) * force); - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); - } - } - - private void cool() { - temperature *= (1.0 - currentIteration / (double) mMaxIterations); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - mMaxIterations = maxIterations; - } - - protected FRVertexData getFRData(V v) { - return frVertexData.get(v); - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count, - * MAX_ITERATIONS. - */ - public boolean done() { - if (currentIteration > mMaxIterations || temperature < 1.0/max_dimension) - { - return true; - } - return false; - } - - protected static class FRVertexData extends Point2D.Double - { - protected void offset(double x, double y) - { - this.x += x; - this.y += y; - } - - protected double norm() - { - return Math.sqrt(x*x + y*y); - } - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java deleted file mode 100644 index 0f5b05ea85..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * This is an experimental attempt at optimizing {@code FRLayout}; if it is successful - * it will be folded back into {@code FRLayout} (and this class will disappear). - * - *

      Behavior is determined by the following settable parameters: - *

        - *
      • attraction multiplier: how much edges try to keep their vertices together - *
      • repulsion multiplier: how much vertices try to push each other apart - *
      • maximum iterations: how many iterations this algorithm will use before stopping - *
      - * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. - - * - * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf - * - * @author Tom Nelson - * @author Scott White, Yan-Biao Boey, Danyel Fisher - */ -public class FRLayout2 extends AbstractLayout implements IterativeContext { - - private double forceConstant; - - private double temperature; - - private int currentIteration; - - private int maxIterations = 700; - - private Map frVertexData = - LazyMap.decorate(new HashMap(), new Factory() { - public Point2D create() { - return new Point2D.Double(); - }}); - - private double attraction_multiplier = 0.75; - - private double attraction_constant; - - private double repulsion_multiplier = 0.75; - - private double repulsion_constant; - - private double max_dimension; - - private Rectangle2D innerBounds = new Rectangle2D.Double(); - - private boolean checked = false; - - /** - * Creates an instance for the specified graph. - */ - public FRLayout2(Graph g) { - super(g); - } - - /** - * Creates an instance of size {@code d} for the specified graph. - */ - public FRLayout2(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - max_dimension = Math.max(d.height, d.width); - initialize(); - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - double t = size.width/50.0; - innerBounds.setFrameFromDiagonal(t,t,size.width-t,size.height-t); - max_dimension = Math.max(size.height, size.width); - } - - /** - * Sets the attraction multiplier. - */ - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; - } - - /** - * Sets the repulsion multiplier. - */ - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } - - public void reset() { - doInit(); - } - - public void initialize() { - doInit(); - } - - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } - - private double EPSILON = 0.000001D; - - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; - - /** - * Calculate repulsion - */ - while(true) { - - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - calcAttraction(e); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} - } - cool(); - } - - protected synchronized void calcPositions(V v) { - Point2D fvd = this.frVertexData.get(v); - if(fvd == null) return; - Point2D xyd = transform(v); - double deltaLength = Math.max(EPSILON, - Math.sqrt(fvd.getX()*fvd.getX()+fvd.getY()*fvd.getY())); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - assert Double.isNaN(newXDisp) == false : "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"; - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - double newX = xyd.getX()+Math.max(-5, Math.min(5,newXDisp)); - double newY = xyd.getY()+Math.max(-5, Math.min(5,newYDisp)); - - newX = Math.max(innerBounds.getMinX(), Math.min(newX, innerBounds.getMaxX())); - newY = Math.max(innerBounds.getMinY(), Math.min(newY, innerBounds.getMaxY())); - - xyd.setLocation(newX, newY); - - } - - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; - } - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distance(p2)); - - double force = deltaLength / attraction_constant; - - assert Double.isNaN(force) == false : "Unexpected mathematical result in FRLayout:calcPositions [force]"; - - double dx = xDelta * force; - double dy = yDelta * force; - Point2D fvd1 = frVertexData.get(v1); - Point2D fvd2 = frVertexData.get(v2); - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()-2*dx, fvd1.getY()-2*dy); - } else { - fvd1.setLocation(fvd1.getX()-dx, fvd1.getY()-dy); - } - if(v1_locked) { - // double the offset for v2, as v1 will not be moving in - // the opposite direction - fvd2.setLocation(fvd2.getX()+2*dx, fvd2.getY()+2*dy); - } else { - fvd2.setLocation(fvd2.getX()+dx, fvd2.getY()+dy); - } - } - - protected void calcRepulsion(V v1) { - Point2D fvd1 = frVertexData.get(v1); - if(fvd1 == null) return; - fvd1.setLocation(0, 0); - boolean v1_locked = isLocked(v1); - - try { - for(V v2 : getGraph().getVertices()) { - - boolean v2_locked = isLocked(v2); - if (v1_locked && v2_locked) continue; - if (v1 != v2) { - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distanceSq(p2)); - - double force = (repulsion_constant * repulsion_constant);// / deltaLength; - - double forceOverDeltaLength = force / deltaLength; - - assert Double.isNaN(force) == false : "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"; - - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()+2 * xDelta * forceOverDeltaLength, - fvd1.getY()+ 2 * yDelta * forceOverDeltaLength); - } else { - fvd1.setLocation(fvd1.getX()+xDelta * forceOverDeltaLength, - fvd1.getY()+yDelta * forceOverDeltaLength); - } - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); - } - } - - private void cool() { - temperature *= (1.0 - currentIteration / (double) maxIterations); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count, - * MAX_ITERATIONS. - */ - public boolean done() { - if (currentIteration > maxIterations || temperature < 1.0/max_dimension) { - if (!checked) - { -// System.out.println("current iteration: " + currentIteration); -// System.out.println("temperature: " + temperature); - checked = true; - } - return true; - } - return false; - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java deleted file mode 100644 index 4cf1c51c92..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 12, 2005 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Shape; -import java.util.Collection; - -/** - * Interface for coordinate-based selection of graph components. - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public interface GraphElementAccessor -{ - /** - * Returns a vertex which is associated with the - * location (x,y). This is typically determined - * with respect to the vertex's location as specified - * by a Layout. - */ - V getVertex(Layout layout, double x, double y); - - /** - * Returns the vertices contained within {@code rectangle} relative - * to {@code layout}. - */ - Collection getVertices(Layout layout, Shape rectangle); - - /** - * Returns an edge which is associated with the - * location (x,y). This is typically determined - * with respect to the edge's location as specified - * by a {@code Layout}. - */ - E getEdge(Layout layout, double x, double y); - -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java deleted file mode 100644 index bea8edaa46..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java +++ /dev/null @@ -1,231 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Implements a self-organizing map layout algorithm, based on Meyer's - * self-organizing graph methods. - * - * @author Yan Biao Boey - */ -public class ISOMLayout extends AbstractLayout implements IterativeContext { - - Map isomVertexData = - LazyMap.decorate(new HashMap(), - new Factory() { - public ISOMVertexData create() { - return new ISOMVertexData(); - }}); - - private int maxEpoch; - private int epoch; - - private int radiusConstantTime; - private int radius; - private int minRadius; - - private double adaption; - private double initialAdaption; - private double minAdaption; - - protected GraphElementAccessor elementAccessor = - new RadiusGraphElementAccessor(); - - private double coolingFactor; - - private List queue = new ArrayList(); - private String status = null; - - /** - * Returns the current number of epochs and execution status, as a string. - */ - public String getStatus() { - return status; - } - - /** - * Creates an ISOMLayout instance for the specified graph g. - * @param g - */ - public ISOMLayout(Graph g) { - super(g); - } - - public void initialize() { - - setInitializer(new RandomLocationTransformer(getSize())); - maxEpoch = 2000; - epoch = 1; - - radiusConstantTime = 100; - radius = 5; - minRadius = 1; - - initialAdaption = 90.0D / 100.0D; - adaption = initialAdaption; - minAdaption = 0; - - //factor = 0; //Will be set later on - coolingFactor = 2; - - //temperature = 0.03; - //initialJumpRadius = 100; - //jumpRadius = initialJumpRadius; - - //delay = 100; - } - - - /** - * Advances the current positions of the graph elements. - */ - public void step() { - status = "epoch: " + epoch + "; "; - if (epoch < maxEpoch) { - adjust(); - updateParameters(); - status += " status: running"; - - } else { - status += "adaption: " + adaption + "; "; - status += "status: done"; -// done = true; - } - } - - private synchronized void adjust() { - //Generate random position in graph space - Point2D tempXYD = new Point2D.Double(); - - // creates a new XY data location - tempXYD.setLocation(10 + Math.random() * getSize().getWidth(), - 10 + Math.random() * getSize().getHeight()); - - //Get closest vertex to random position - V winner = elementAccessor.getVertex(this, tempXYD.getX(), tempXYD.getY()); - - while(true) { - try { - for(V v : getGraph().getVertices()) { - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = false; - } - break; - } catch(ConcurrentModificationException cme) {} - } - adjustVertex(winner, tempXYD); - } - - private synchronized void updateParameters() { - epoch++; - double factor = Math.exp(-1 * coolingFactor * (1.0 * epoch / maxEpoch)); - adaption = Math.max(minAdaption, factor * initialAdaption); - //jumpRadius = (int) factor * jumpRadius; - //temperature = factor * temperature; - if ((radius > minRadius) && (epoch % radiusConstantTime == 0)) { - radius--; - } - } - - private synchronized void adjustVertex(V v, Point2D tempXYD) { - queue.clear(); - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = true; - queue.add(v); - V current; - - while (!queue.isEmpty()) { - current = queue.remove(0); - ISOMVertexData currData = getISOMVertexData(current); - Point2D currXYData = transform(current); - - double dx = tempXYD.getX() - currXYData.getX(); - double dy = tempXYD.getY() - currXYData.getY(); - double factor = adaption / Math.pow(2, currData.distance); - - currXYData.setLocation(currXYData.getX()+(factor*dx), currXYData.getY()+(factor*dy)); - - if (currData.distance < radius) { - Collection s = getGraph().getNeighbors(current); - while(true) { - try { - for(V child : s) { - ISOMVertexData childData = getISOMVertexData(child); - if (childData != null && !childData.visited) { - childData.visited = true; - childData.distance = currData.distance + 1; - queue.add(child); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - } - } - } - - protected ISOMVertexData getISOMVertexData(V v) { - return isomVertexData.get(v); - } - - /** - * This one is an incremental visualization. - * @return true is the layout algorithm is incremental, false otherwise - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true if the vertex positions are no longer being - * updated. Currently ISOMLayout stops updating vertex - * positions after a certain number of iterations have taken place. - * @return true if the vertex position updates have stopped, - * false otherwise - */ - public boolean done() { - return epoch >= maxEpoch; - } - - protected static class ISOMVertexData { - int distance; - boolean visited; - - protected ISOMVertexData() { - distance = 0; - visited = false; - } - } - - /** - * Resets the layout iteration count to 0, which allows the layout algorithm to - * continue updating vertex positions. - */ - public void reset() { - epoch = 0; - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java deleted file mode 100644 index a1b9f40293..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java +++ /dev/null @@ -1,433 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; -/* - * This source is under the same license with JUNG. - * http://jung.sourceforge.net/license.txt for a description. - */ - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.algorithms.shortestpath.DistanceStatistics; -import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * Implements the Kamada-Kawai algorithm for node layout. - * Does not respect filter calls, and sometimes crashes when the view changes to it. - * - * @see "Tomihisa Kamada and Satoru Kawai: An algorithm for drawing general indirect graphs. Information Processing Letters 31(1):7-15, 1989" - * @see "Tomihisa Kamada: On visualization of abstract objects and relations. Ph.D. dissertation, Dept. of Information Science, Univ. of Tokyo, Dec. 1988." - * - * @author Masanori Harada - */ -public class KKLayout extends AbstractLayout implements IterativeContext { - - private double EPSILON = 0.1d; - - private int currentIteration; - private int maxIterations = 2000; - private String status = "KKLayout"; - - private double L; // the ideal length of an edge - private double K = 1; // arbitrary const number - private double[][] dm; // distance matrix - - private boolean adjustForGravity = true; - private boolean exchangeVertices = true; - - private V[] vertices; - private Point2D[] xydata; - - /** - * Retrieves graph distances between vertices of the visible graph - */ - protected Distance distance; - - /** - * The diameter of the visible graph. In other words, the maximum over all pairs - * of vertices of the length of the shortest path between a and bf the visible graph. - */ - protected double diameter; - - /** - * A multiplicative factor which partly specifies the "preferred" length of an edge (L). - */ - private double length_factor = 0.9; - - /** - * A multiplicative factor which specifies the fraction of the graph's diameter to be - * used as the inter-vertex distance between disconnected vertices. - */ - private double disconnected_multiplier = 0.5; - - /** - * Creates an instance for the specified graph. - */ - public KKLayout(Graph g) - { - this(g, new UnweightedShortestPath(g)); - } - - /** - * Creates an instance for the specified graph and distance metric. - */ - public KKLayout(Graph g, Distance distance){ - super(g); - this.distance = distance; - } - - /** - * Sets a multiplicative factor which - * partly specifies the "preferred" length of an edge (L). - */ - public void setLengthFactor(double length_factor){ - this.length_factor = length_factor; - } - - /** - * Sets a multiplicative factor that specifies the fraction of the graph's diameter to be - * used as the inter-vertex distance between disconnected vertices. - */ - public void setDisconnectedDistanceMultiplier(double disconnected_multiplier){ - this.disconnected_multiplier = disconnected_multiplier; - } - - /** - * Returns a string with information about the current status of the algorithm. - */ - public String getStatus() { - return status + this.getSize(); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count. - */ - public boolean done() { - if (currentIteration > maxIterations) { - return true; - } - return false; - } - - @SuppressWarnings("unchecked") - public void initialize() { - currentIteration = 0; - - if(graph != null && size != null) { - - double height = size.getHeight(); - double width = size.getWidth(); - - int n = graph.getVertexCount(); - dm = new double[n][n]; - vertices = (V[])graph.getVertices().toArray(); - xydata = new Point2D[n]; - - // assign IDs to all visible vertices - while(true) { - try { - int index = 0; - for(V v : graph.getVertices()) { - Point2D xyd = transform(v); - vertices[index] = v; - xydata[index] = xyd; - index++; - } - break; - } catch(ConcurrentModificationException cme) {} - } - - diameter = DistanceStatistics.diameter(graph, distance, true); - - double L0 = Math.min(height, width); - L = (L0 / diameter) * length_factor; // length_factor used to be hardcoded to 0.9 - //L = 0.75 * Math.sqrt(height * width / n); - - for (int i = 0; i < n - 1; i++) { - for (int j = i + 1; j < n; j++) { - Number d_ij = distance.getDistance(vertices[i], vertices[j]); - Number d_ji = distance.getDistance(vertices[j], vertices[i]); - double dist = diameter * disconnected_multiplier; - if (d_ij != null) - dist = Math.min(d_ij.doubleValue(), dist); - if (d_ji != null) - dist = Math.min(d_ji.doubleValue(), dist); - dm[i][j] = dm[j][i] = dist; - } - } - } - } - - public void step() { - try { - currentIteration++; - double energy = calcEnergy(); - status = "Kamada-Kawai V=" + getGraph().getVertexCount() - + "(" + getGraph().getVertexCount() + ")" - + " IT: " + currentIteration - + " E=" + energy - ; - - int n = getGraph().getVertexCount(); - if (n == 0) - return; - - double maxDeltaM = 0; - int pm = -1; // the node having max deltaM - for (int i = 0; i < n; i++) { - if (isLocked(vertices[i])) - continue; - double deltam = calcDeltaM(i); - - if (maxDeltaM < deltam) { - maxDeltaM = deltam; - pm = i; - } - } - if (pm == -1) - return; - - for (int i = 0; i < 100; i++) { - double[] dxy = calcDeltaXY(pm); - xydata[pm].setLocation(xydata[pm].getX()+dxy[0], xydata[pm].getY()+dxy[1]); - - double deltam = calcDeltaM(pm); - if (deltam < EPSILON) - break; - } - - if (adjustForGravity) - adjustForGravity(); - - if (exchangeVertices && maxDeltaM < EPSILON) { - energy = calcEnergy(); - for (int i = 0; i < n - 1; i++) { - if (isLocked(vertices[i])) - continue; - for (int j = i + 1; j < n; j++) { - if (isLocked(vertices[j])) - continue; - double xenergy = calcEnergyIfExchanged(i, j); - if (energy > xenergy) { - double sx = xydata[i].getX(); - double sy = xydata[i].getY(); - xydata[i].setLocation(xydata[j]); - xydata[j].setLocation(sx, sy); - return; - } - } - } - } - } - finally { -// fireStateChanged(); - } - } - - /** - * Shift all vertices so that the center of gravity is located at - * the center of the screen. - */ - public void adjustForGravity() { - Dimension d = getSize(); - double height = d.getHeight(); - double width = d.getWidth(); - double gx = 0; - double gy = 0; - for (int i = 0; i < xydata.length; i++) { - gx += xydata[i].getX(); - gy += xydata[i].getY(); - } - gx /= xydata.length; - gy /= xydata.length; - double diffx = width / 2 - gx; - double diffy = height / 2 - gy; - for (int i = 0; i < xydata.length; i++) { - xydata[i].setLocation(xydata[i].getX()+diffx, xydata[i].getY()+diffy); - } - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.layout.AbstractLayout#setSize(java.awt.Dimension) - */ - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - /** - * Enable or disable gravity point adjusting. - */ - public void setAdjustForGravity(boolean on) { - adjustForGravity = on; - } - - /** - * Returns true if gravity point adjusting is enabled. - */ - public boolean getAdjustForGravity() { - return adjustForGravity; - } - - /** - * Enable or disable the local minimum escape technique by - * exchanging vertices. - */ - public void setExchangeVertices(boolean on) { - exchangeVertices = on; - } - - /** - * Returns true if the local minimum escape technique by - * exchanging vertices is enabled. - */ - public boolean getExchangeVertices() { - return exchangeVertices; - } - - /** - * Determines a step to new position of the vertex m. - */ - private double[] calcDeltaXY(int m) { - double dE_dxm = 0; - double dE_dym = 0; - double d2E_d2xm = 0; - double d2E_dxmdym = 0; - double d2E_dymdxm = 0; - double d2E_d2ym = 0; - - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - double ddd = d * d * d; - - dE_dxm += k_mi * (1 - l_mi / d) * dx; - dE_dym += k_mi * (1 - l_mi / d) * dy; - d2E_d2xm += k_mi * (1 - l_mi * dy * dy / ddd); - d2E_dxmdym += k_mi * l_mi * dx * dy / ddd; - d2E_d2ym += k_mi * (1 - l_mi * dx * dx / ddd); - } - } - // d2E_dymdxm equals to d2E_dxmdym. - d2E_dymdxm = d2E_dxmdym; - - double denomi = d2E_d2xm * d2E_d2ym - d2E_dxmdym * d2E_dymdxm; - double deltaX = (d2E_dxmdym * dE_dym - d2E_d2ym * dE_dxm) / denomi; - double deltaY = (d2E_dymdxm * dE_dxm - d2E_d2xm * dE_dym) / denomi; - return new double[]{deltaX, deltaY}; - } - - /** - * Calculates the gradient of energy function at the vertex m. - */ - private double calcDeltaM(int m) { - double dEdxm = 0; - double dEdym = 0; - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - double common = k_mi * (1 - l_mi / d); - dEdxm += common * dx; - dEdym += common * dy; - } - } - return Math.sqrt(dEdxm * dEdxm + dEdym * dEdym); - } - - /** - * Calculates the energy function E. - */ - private double calcEnergy() { - double energy = 0; - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[i].getX() - xydata[j].getX(); - double dy = xydata[i].getY() - xydata[j].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - /** - * Calculates the energy function E as if positions of the - * specified vertices are exchanged. - */ - private double calcEnergyIfExchanged(int p, int q) { - if (p >= q) - throw new RuntimeException("p should be < q"); - double energy = 0; // < 0 - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - int ii = i; - int jj = j; - if (i == p) ii = q; - if (j == q) jj = p; - - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[ii].getX() - xydata[jj].getX(); - double dy = xydata[ii].getY() - xydata[jj].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - public void reset() { - currentIteration = 0; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java deleted file mode 100644 index 5162ac5972..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java +++ /dev/null @@ -1,93 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A generalized interface is a mechanism for returning (x,y) coordinates - * from vertices. In general, most of these methods are used to both control and - * get information from the layout algorithm. - *

      - * @author danyelf - * @author tom nelson - */ -public interface Layout extends Transformer { - - /** - * Initializes fields in the node that may not have - * been set during the constructor. Must be called before - * the iterations begin. - */ - void initialize(); - - /** - * provides initial locations for all vertices. - * @param initializer - */ - void setInitializer(Transformer initializer); - - /** - * setter for graph - * @param graph - */ - void setGraph(Graph graph); - - /** - * Returns the full graph (the one that was passed in at - * construction time) that this Layout refers to. - * - */ - Graph getGraph(); - - /** - * - * - */ - void reset(); - - /** - * @param d - */ - void setSize(Dimension d); - - /** - * Returns the current size of the visualization's space. - */ - Dimension getSize(); - - - /** - * Sets a flag which fixes this vertex in place. - * - * @param v vertex - */ - void lock(V v, boolean state); - - /** - * Returns true if the position of vertex v - * is locked. - */ - boolean isLocked(V v); - - /** - * set the location of a vertex - * @param v - * @param location - */ - void setLocation(V v, Point2D location); - - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java deleted file mode 100644 index b1f25958cb..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * a pure decorator for the Layout interface. Intended to be overridden - * to provide specific behavior decoration - * - * @author Tom Nelson - * - */ -public abstract class LayoutDecorator implements Layout, IterativeContext { - - protected Layout delegate; - - /** - * Creates an instance backed by the specified delegate layout. - */ - public LayoutDecorator(Layout delegate) { - this.delegate = delegate; - } - - /** - * Returns the backing (delegate) layout. - */ - public Layout getDelegate() { - return delegate; - } - - /** - * Sets the backing (delegate) layout. - */ - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeContext#done() - */ - public void step() { - if(delegate instanceof IterativeContext) { - ((IterativeContext)delegate).step(); - } - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#initialize() - */ - public void initialize() { - delegate.initialize(); - } - - /** - * @param initializer - * @see edu.uci.ics.jung.algorithms.layout.Layout#setInitializer(org.apache.commons.collections15.Transformer) - */ - public void setInitializer(Transformer initializer) { - delegate.setInitializer(initializer); - } - - /** - * @param v - * @param location - * @see edu.uci.ics.jung.algorithms.layout.Layout#setLocation(java.lang.Object, java.awt.geom.Point2D) - */ - public void setLocation(V v, Point2D location) { - delegate.setLocation(v, location); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#getSize() - */ - public Dimension getSize() { - return delegate.getSize(); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#getGraph() - */ - public Graph getGraph() { - return delegate.getGraph(); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#transform(Object) - */ - public Point2D transform(V v) { - return delegate.transform(v); - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeContext#done() - */ - public boolean done() { - if(delegate instanceof IterativeContext) { - return ((IterativeContext)delegate).done(); - } - return true; - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#lock(Object, boolean) - */ - public void lock(V v, boolean state) { - delegate.lock(v, state); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#isLocked(Object) - */ - public boolean isLocked(V v) { - return delegate.isLocked(v); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#setSize(Dimension) - */ - public void setSize(Dimension d) { - delegate.setSize(d); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#reset() - */ - public void reset() { - delegate.reset(); - } - - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java deleted file mode 100644 index aa3dc7b411..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java +++ /dev/null @@ -1,103 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.geom.Point2D; - -/** - * Represents a point in polar coordinates: distance and angle from the origin. - * Includes conversions between polar and Cartesian - * coordinates (Point2D). - * - * @author Tom Nelson - tomnelson@dev.java.net - */ -public class PolarPoint -{ - double theta; - double radius; - - /** - * Creates a new instance with radius and angle each 0. - */ - public PolarPoint() { - this(0,0); - } - - /** - * Creates a new instance with radius {@code radius} and angle {@code theta}. - */ - public PolarPoint(double theta, double radius) { - this.theta = theta; - this.radius = radius; - } - - /** - * Returns the angle for this point. - */ - public double getTheta() { return theta; } - - /** - * Returns the radius for this point. - */ - public double getRadius() { return radius; } - - /** - * Sets the angle for this point to {@code theta}. - */ - public void setTheta(double theta) { this.theta = theta; } - - /** - * Sets the radius for this point to {@code theta}. - */ - public void setRadius(double radius) { this.radius = radius; } - - /** - * Returns the result of converting polar to Cartesian coordinates. - */ - public static Point2D polarToCartesian(PolarPoint polar) { - return polarToCartesian(polar.getTheta(), polar.getRadius()); - } - - /** - * Returns the result of converting (theta, radius) to Cartesian coordinates. - */ - public static Point2D polarToCartesian(double theta, double radius) { - return new Point2D.Double(radius*Math.cos(theta), radius*Math.sin(theta)); - } - - /** - * Returns the result of converting point to polar coordinates. - */ - public static PolarPoint cartesianToPolar(Point2D point) { - return cartesianToPolar(point.getX(), point.getY()); - } - - /** - * Returns the result of converting (x, y) to polar coordinates. - */ - public static PolarPoint cartesianToPolar(double x, double y) { - double theta = Math.atan2(y,x); - double radius = Math.sqrt(x*x+y*y); - return new PolarPoint(theta, radius); - } - - @Override - public String toString() { - return "PolarPoint[" + radius + "," + theta +"]"; - } - - /** - * Sets the angle and radius of this point to those of {@code p}. - */ - public void setLocation(PolarPoint p) { - this.theta = p.getTheta(); - this.radius = p.getRadius(); - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java deleted file mode 100644 index 457bd961f1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Forest; - -/** - * A radial layout for Tree or Forest graphs. - * - * @author Tom Nelson - * - */ -public class RadialTreeLayout extends TreeLayout { - - protected Map polarLocations; - - /** - * Creates an instance for the specified graph with default X and Y distances. - */ - public RadialTreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph and X distance with - * default Y distance. - */ - public RadialTreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph, X distance, and Y distance. - */ - public RadialTreeLayout(Forest g, int distx, int disty) { - super(g, distx, disty); - } - - @Override - protected void buildTree() { - super.buildTree(); - this.polarLocations = new HashMap(); - setRadialLocations(); - } - - @Override - public void setSize(Dimension size) { - this.size = size; - buildTree(); - } - - @Override - protected void setCurrentPositionFor(V vertex) { - locations.get(vertex).setLocation(m_currentPoint); - } - - @Override - public void setLocation(V v, Point2D location) - { - Point2D c = getCenter(); - Point2D pv = new Point2D.Double(location.getX() - c.getX(), - location.getY() - c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - PolarPoint currentLocation = polarLocations.get(v); - if (currentLocation == null) - polarLocations.put(v, newLocation); - else - currentLocation.setLocation(newLocation); - } - - /** - * Returns the map from vertices to their locations in polar coordinates. - */ - public Map getPolarLocations() { - return polarLocations; - } - - @Override - public Point2D transform(V v) { - PolarPoint pp = polarLocations.get(v); - double centerX = getSize().getWidth()/2; - double centerY = getSize().getHeight()/2; - Point2D cartesian = PolarPoint.polarToCartesian(pp); - cartesian.setLocation(cartesian.getX()+centerX,cartesian.getY()+centerY); - return cartesian; - } - - private Point2D getMaxXY() { - double maxx = 0; - double maxy = 0; - for(Point2D p : locations.values()) { - maxx = Math.max(maxx, p.getX()); - maxy = Math.max(maxy, p.getY()); - } - return new Point2D.Double(maxx,maxy); - } - - private void setRadialLocations() { - Point2D max = getMaxXY(); - double maxx = max.getX(); - double maxy = max.getY(); - maxx = Math.max(maxx, size.width); - double theta = 2*Math.PI/maxx; - - double deltaRadius = size.width/2/maxy; - for(Map.Entry entry : locations.entrySet()) { - V v = entry.getKey(); - Point2D p = entry.getValue(); - PolarPoint polarPoint = new PolarPoint(p.getX()*theta, (p.getY() - this.distY)*deltaRadius); - polarLocations.put(v, polarPoint); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java deleted file mode 100644 index 5f12c3ca4e..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 12, 2005 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -import edu.uci.ics.jung.graph.Graph; - - -/** - * Simple implementation of PickSupport that returns the vertex or edge - * that is closest to the specified location. This implementation - * provides the same picking options that were available in - * previous versions of AbstractLayout. - * - *

      No element will be returned that is farther away than the specified - * maximum distance. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public class RadiusGraphElementAccessor implements GraphElementAccessor { - - protected double maxDistance; - - /** - * Creates an instance with an effectively infinite default maximum distance. - */ - public RadiusGraphElementAccessor() { - this(Math.sqrt(Double.MAX_VALUE - 1000)); - } - - /** - * Creates an instance with the specified default maximum distance. - */ - public RadiusGraphElementAccessor(double maxDistance) { - this.maxDistance = maxDistance; - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provde a more efficient implementation. - */ - public V getVertex(Layout layout, double x, double y) { - return getVertex(layout, x, y, this.maxDistance); - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provde a more efficient implementation. - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - */ - public V getVertex(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - V closest = null; - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { - - Point2D p = layout.transform(v); - double dx = p.getX() - x; - double dy = p.getY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - public Collection getVertices(Layout layout, Shape rectangle) { - Set pickedVertices = new HashSet(); - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { - - Point2D p = layout.transform(v); - if(rectangle.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected. - * Calls the longer form of the call. - */ - public E getEdge(Layout layout, double x, double y) { - return getEdge(layout, x, y, this.maxDistance); - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected, - * within a distance of maxDistance, Iterates through all - * visible edges and checks their distance from the click. Override this - * method to provide a more efficient implementation. - * - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - * @return Edge closest to the click. - */ - public E getEdge(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - E closest = null; - while(true) { - try { - for(E e : layout.getGraph().getEdges()) { - - // Could replace all this set stuff with getFrom_internal() etc. - Graph graph = layout.getGraph(); - Collection vertices = graph.getIncidentVertices(e); - Iterator vertexIterator = vertices.iterator(); - V v1 = vertexIterator.next(); - V v2 = vertexIterator.next(); - // Get coords - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - double x1 = p1.getX(); - double y1 = p1.getY(); - double x2 = p2.getX(); - double y2 = p2.getY(); - // Calculate location on line closest to (x,y) - // First, check that v1 and v2 are not coincident. - if (x1 == x2 && y1 == y2) - continue; - double b = - ((y - y1) * (y2 - y1) + (x - x1) * (x2 - x1)) - / ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)); - // - double distance2; // square of the distance - if (b <= 0) - distance2 = (x - x1) * (x - x1) + (y - y1) * (y - y1); - else if (b >= 1) - distance2 = (x - x2) * (x - x2) + (y - y2) * (y - y2); - else { - double x3 = x1 + b * (x2 - x1); - double y3 = y1 + b * (y2 - y1); - distance2 = (x - x3) * (x - x3) + (y - y3) * (y - y3); - } - - if (distance2 < minDistance) { - minDistance = distance2; - closest = e; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java deleted file mode 100644 index d21c2a158e..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.Dimension; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -/** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class SpringLayout extends AbstractLayout implements IterativeContext { - - protected double stretch = 0.70; - protected Transformer lengthFunction; - protected int repulsion_range_sq = 100 * 100; - protected double force_multiplier = 1.0 / 3.0; - - protected Map springVertexData = - LazyMap.decorate(new HashMap(), - new Factory() { - public SpringVertexData create() { - return new SpringVertexData(); - }}); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - */ - @SuppressWarnings("unchecked") - public SpringLayout(Graph g) { - this(g, new ConstantTransformer(30)); - } - - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the {@code Graph} to lay out - * @param length_function provides a length for each edge - */ - public SpringLayout(Graph g, Transformer length_function) - { - super(g); - this.lengthFunction = length_function; - } - - /** - * Returns the current value for the stretch parameter. - * @see #setStretch(double) - */ - public double getStretch() { - return stretch; - } - - /** - * Sets the dimensions of the available space for layout to {@code size}. - */ - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - /** - *

      Sets the stretch parameter for this instance. This value - * specifies how much the degrees of an edge's incident vertices - * should influence how easily the endpoints of that edge - * can move (that is, that edge's tendency to change its length).

      - * - *

      The default value is 0.70. Positive values less than 1 cause - * high-degree vertices to move less than low-degree vertices, and - * values > 1 cause high-degree vertices to move more than - * low-degree vertices. Negative values will have unpredictable - * and inconsistent results.

      - * @param stretch - */ - public void setStretch(double stretch) { - this.stretch = stretch; - } - - /** - * Returns the current value for the node repulsion range. - * @see #setRepulsionRange(int) - */ - public int getRepulsionRange() { - return (int)(Math.sqrt(repulsion_range_sq)); - } - - /** - * Sets the node repulsion range (in drawing area units) for this instance. - * Outside this range, nodes do not repel each other. The default value - * is 100. Negative values are treated as their positive equivalents. - * @param range - */ - public void setRepulsionRange(int range) { - this.repulsion_range_sq = range * range; - } - - /** - * Returns the current value for the edge length force multiplier. - * @see #setForceMultiplier(double) - */ - public double getForceMultiplier() { - return force_multiplier; - } - - /** - * Sets the force multiplier for this instance. This value is used to - * specify how strongly an edge "wants" to be its default length - * (higher values indicate a greater attraction for the default length), - * which affects how much its endpoints move at each timestep. - * The default value is 1/3. A value of 0 turns off any attempt by the - * layout to cause edges to conform to the default length. Negative - * values cause long edges to get longer and short edges to get shorter; use - * at your own risk. - */ - public void setForceMultiplier(double force) { - this.force_multiplier = force; - } - - public void initialize() { - } - - /** - * Relaxation step. Moves all nodes a smidge. - */ - public void step() { - try { - for(V v : getGraph().getVertices()) { - SpringVertexData svd = springVertexData.get(v); - if (svd == null) { - continue; - } - svd.dx /= 4; - svd.dy /= 4; - svd.edgedx = svd.edgedy = 0; - svd.repulsiondx = svd.repulsiondy = 0; - } - } catch(ConcurrentModificationException cme) { - step(); - } - - relaxEdges(); - calculateRepulsion(); - moveNodes(); - } - - protected void relaxEdges() { - try { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - double desiredLen = lengthFunction.transform(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch, (getGraph().degree(v1) + getGraph().degree(v2) - 2)); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.get(v1); - v2D = springVertexData.get(v2); - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } catch(ConcurrentModificationException cme) { - relaxEdges(); - } - } - - protected void calculateRepulsion() { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - - SpringVertexData svd = springVertexData.get(v); - if(svd == null) continue; - double dx = 0, dy = 0; - - for (V v2 : getGraph().getVertices()) { - if (v == v2) continue; - Point2D p = transform(v); - Point2D p2 = transform(v2); - if(p == null || p2 == null) continue; - double vx = p.getX() - p2.getX(); - double vy = p.getY() - p2.getY(); - double distanceSq = p.distanceSq(p2); - if (distanceSq == 0) { - dx += Math.random(); - dy += Math.random(); - } else if (distanceSq < repulsion_range_sq) { - double factor = 1; - dx += factor * vx / distanceSq; - dy += factor * vy / distanceSq; - } - } - double dlen = dx * dx + dy * dy; - if (dlen > 0) { - dlen = Math.sqrt(dlen) / 2; - svd.repulsiondx += dx / dlen; - svd.repulsiondy += dy / dlen; - } - } - } catch(ConcurrentModificationException cme) { - calculateRepulsion(); - } - } - - protected void moveNodes() - { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.get(v); - if(vd == null) continue; - Point2D xyd = transform(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(), 0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } - } - } - - protected static class SpringVertexData { - protected double edgedx; - protected double edgedy; - protected double repulsiondx; - protected double repulsiondy; - - /** movement speed, x */ - protected double dx; - - /** movement speed, y */ - protected double dy; - } - - - /** - * Used for changing the size of the layout in response to a component's size. - */ - public class SpringDimensionChecker extends ComponentAdapter { - @Override - public void componentResized(ComponentEvent e) { - setSize(e.getComponent().getSize()); - } - } - - /** - * This one is an incremental visualization - */ - public boolean isIncremental() { - return true; - } - - /** - * For now, we pretend it never finishes. - */ - public boolean done() { - return false; - } - - /** - * No effect. - */ - public void reset() { - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java deleted file mode 100644 index e62a30c198..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. - * - * - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class SpringLayout2 extends SpringLayout -{ - protected int currentIteration; - protected int averageCounter; - protected int loopCountMax = 4; - protected boolean done; - - protected Point2D averageDelta = new Point2D.Double(); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - */ - @SuppressWarnings("unchecked") - public SpringLayout2(Graph g) { - super(g); - } - - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the {@code Graph} to lay out - * @param length_function provides a length for each edge - */ - public SpringLayout2(Graph g, Transformer length_function) - { - super(g, length_function); - } - - /** - * Relaxation step. Moves all nodes a smidge. - */ - @Override - public void step() { - super.step(); - currentIteration++; - testAverageDeltas(); - } - - private void testAverageDeltas() { - double dx = this.averageDelta.getX(); - double dy = this.averageDelta.getY(); - if(Math.abs(dx) < .001 && Math.abs(dy) < .001) { - done = true; - System.err.println("done, dx="+dx+", dy="+dy); - } - if(currentIteration > loopCountMax) { - this.averageDelta.setLocation(0,0); - averageCounter = 0; - currentIteration = 0; - } - } - - @Override - protected void moveNodes() { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.get(v); - if(vd == null) continue; - Point2D xyd = transform(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - -// int currentCount = currentIteration % this.loopCountMax; -// System.err.println(averageCounter+" --- vd.dx="+vd.dx+", vd.dy="+vd.dy); -// System.err.println("averageDelta was "+averageDelta); - - averageDelta.setLocation( - ((averageDelta.getX() * averageCounter) + vd.dx) / (averageCounter+1), - ((averageDelta.getY() * averageCounter) + vd.dy) / (averageCounter+1) - ); -// System.err.println("averageDelta now "+averageDelta); -// System.err.println(); - averageCounter++; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY());// setX(0); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); //setX(width); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(),0);//setY(0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); //setY(height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } - } - } - - @Override - public boolean done() { - return done; - } - -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java deleted file mode 100644 index 31b32554c2..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Created on Jul 21, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * StaticLayout places the vertices in the locations specified by its Transformer - * initializer. Vertex locations can be placed in a Map and then supplied to - * this layout as follows: - * - Transformer vertexLocations = - TransformerUtils.mapTransformer(map); - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class StaticLayout extends AbstractLayout { - - /** - * Creates an instance for the specified graph, locations, and size. - */ - public StaticLayout(Graph graph, Transformer initializer, Dimension size) { - super(graph, initializer, size); - } - - /** - * Creates an instance for the specified graph and locations, with default size. - */ - public StaticLayout(Graph graph, Transformer initializer) { - super(graph, initializer); - } - - /** - * Creates an instance for the specified graph and default size; vertex locations - * are randomly assigned. - */ - public StaticLayout(Graph graph) { - super(graph); - } - - /** - * Creates an instance for the specified graph and size. - */ - public StaticLayout(Graph graph, Dimension size) { - super(graph, size); - } - - public void initialize() {} - - public void reset() {} - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java deleted file mode 100644 index 4bebd3a9b1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.Point; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * @author Karlheinz Toni - * @author Tom Nelson - converted to jung2 - * - */ - -public class TreeLayout implements Layout { - - protected Dimension size = new Dimension(600,600); - protected Forest graph; - protected Map basePositions = new HashMap(); - - protected Map locations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public Point2D transform(V arg0) { - return new Point2D.Double(); - }}); - - protected transient Set alreadyDone = new HashSet(); - - /** - * The default horizontal vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTX = 50; - - /** - * The default vertical vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTY = 50; - - /** - * The horizontal vertex spacing. Defaults to {@code DEFAULT_XDIST}. - */ - protected int distX = 50; - - /** - * The vertical vertex spacing. Defaults to {@code DEFAULT_YDIST}. - */ - protected int distY = 50; - - protected transient Point m_currentPoint = new Point(); - - /** - * Creates an instance for the specified graph with default X and Y distances. - */ - public TreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph and X distance with - * default Y distance. - */ - public TreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph, X distance, and Y distance. - */ - public TreeLayout(Forest g, int distx, int disty) { - if (g == null) - throw new IllegalArgumentException("Graph must be non-null"); - if (distx < 1 || disty < 1) - throw new IllegalArgumentException("X and Y distances must each be positive"); - this.graph = g; - this.distX = distx; - this.distY = disty; - buildTree(); - } - - protected void buildTree() { - this.m_currentPoint = new Point(0, 20); - Collection roots = TreeUtils.getRoots(graph); - if (roots.size() > 0 && graph != null) { - calculateDimensionX(roots); - for(V v : roots) { - calculateDimensionX(v); - m_currentPoint.x += this.basePositions.get(v)/2 + this.distX; - buildTree(v, this.m_currentPoint.x); - } - } - int width = 0; - for(V v : roots) { - width += basePositions.get(v); - } - } - - protected void buildTree(V v, int x) { - - if (!alreadyDone.contains(v)) { - alreadyDone.add(v); - - //go one level further down - this.m_currentPoint.y += this.distY; - this.m_currentPoint.x = x; - - this.setCurrentPositionFor(v); - - int sizeXofCurrent = basePositions.get(v); - - int lastX = x - sizeXofCurrent / 2; - - int sizeXofChild; - int startXofChild; - - for (V element : graph.getSuccessors(v)) { - sizeXofChild = this.basePositions.get(element); - startXofChild = lastX + sizeXofChild / 2; - buildTree(element, startXofChild); - lastX = lastX + sizeXofChild + distX; - } - this.m_currentPoint.y -= this.distY; - } - } - - private int calculateDimensionX(V v) { - - int size = 0; - int childrenNum = graph.getSuccessors(v).size(); - - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); - - return size; - } - - private int calculateDimensionX(Collection roots) { - - int size = 0; - for(V v : roots) { - int childrenNum = graph.getSuccessors(v).size(); - - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); - } - - return size; - } - - /** - * This method is not supported by this class. The size of the layout - * is determined by the topology of the tree, and by the horizontal - * and vertical spacing (optionally set by the constructor). - */ - public void setSize(Dimension size) { - throw new UnsupportedOperationException("Size of TreeLayout is set" + - " by vertex spacing in constructor"); - } - - protected void setCurrentPositionFor(V vertex) { - int x = m_currentPoint.x; - int y = m_currentPoint.y; - if(x < 0) size.width -= x; - - if(x > size.width-distX) - size.width = x + distX; - - if(y < 0) size.height -= y; - if(y > size.height-distY) - size.height = y + distY; - locations.get(vertex).setLocation(m_currentPoint); - - } - - public Graph getGraph() { - return graph; - } - - public Dimension getSize() { - return size; - } - - public void initialize() { - - } - - public boolean isLocked(V v) { - return false; - } - - public void lock(V v, boolean state) { - } - - public void reset() { - } - - public void setGraph(Graph graph) { - if(graph instanceof Forest) { - this.graph = (Forest)graph; - buildTree(); - } else { - throw new IllegalArgumentException("graph must be a Forest"); - } - } - - public void setInitializer(Transformer initializer) { - } - - /** - * Returns the center of this layout's area. - */ - public Point2D getCenter() { - return new Point2D.Double(size.getWidth()/2,size.getHeight()/2); - } - - public void setLocation(V v, Point2D location) { - locations.get(v).setLocation(location); - } - - public Point2D transform(V v) { - return locations.get(v); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/package.html deleted file mode 100644 index a5ed0d05a2..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/package.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - -Algorithms for assigning 2D coordinates (typically used for graph visualizations) -to vertices. -Current layout algorithms include: -
        -
      • Layout, AbstractLayout: interface and abstract class defining the Layout contract and handling -some common implementation details -
      • AggregateLayout: allows multiple layouts to be combined and manipulated as one layout -
      • BalloonLayout: places vertices on nested circles (trees/forests only) -
      • CircleLayout: places vertices on a circle -
      • DAGLayout: places vertices in a hierarchy (directed acyclic graphs only) -
      • FRLayout: Fruchterman-Reingold algorithm (force-directed) -
      • ISOMLayout: self-organizing map layout -
      • KKLayout: Kamada-Kawai algorithm (tries to maintain specified distances) -
      • RadialTreeLayout: places vertices on concentric circles (trees only) -
      • SpringLayout: simple force-directed layout -
      • StaticLayout: places vertices at user-specified locations -
      • TreeLayout: simple tree/forest layout -
      - -Rendering and other aspects of visualization are handled in the visualization package. - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java deleted file mode 100644 index 34428b18fb..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Created on Jul 19, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout.util; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.Date; -import java.util.Random; - -import org.apache.commons.collections15.Transformer; - -/** - * Transforms the input type into a random location within - * the bounds of the Dimension property. - * This is used as the backing Transformer for the LazyMap - * for many Layouts, - * and provides a random location for unmapped vertices - * the first time they are accessed. - * - * @author Tom Nelson - * - * @param - */ -public class RandomLocationTransformer implements Transformer { - - Dimension d; - Random random; - - /** - * Creates an instance with the specified size which uses the current time - * as the random seed. - */ - public RandomLocationTransformer(Dimension d) { - this(d, new Date().getTime()); - } - - /** - * Creates an instance with the specified dimension and random seed. - * @param d - * @param seed - */ - public RandomLocationTransformer(final Dimension d, long seed) { - this.d = d; - this.random = new Random(seed); - } - - public Point2D transform(V v) { - return new Point2D.Double(random.nextDouble() * d.width, random.nextDouble() * d.height); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java deleted file mode 100644 index a31113f190..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java +++ /dev/null @@ -1,43 +0,0 @@ -package edu.uci.ics.jung.algorithms.layout.util; - -/** - * Interface for operating the relax iterations on a layout. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public interface Relaxer { - - /** - * Execute a loop of steps in a new Thread, - * firing an event after each step. - */ - void relax(); - - /** - * Execute a loop of steps in the calling - * thread, firing no events. - */ - void prerelax(); - - /** - * Make the relaxer thread wait. - */ - void pause(); - - /** - * Make the relaxer thread resume. - * - */ - void resume(); - - /** - * Set flags to stop the relaxer thread. - */ - void stop(); - - /** - * Sets the sleep time. - */ - void setSleepTime(long i); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java deleted file mode 100644 index 14f6dfc86c..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.algorithms.layout.util; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; - -/** - * - * Implementation of a relaxer thread for layouts. - * Extracted from the {@code VisualizationModel} in previous - * versions of JUNG. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class VisRunner implements Relaxer, Runnable { - - protected boolean running; - protected IterativeContext process; - protected boolean stop; - protected boolean manualSuspend; - protected Thread thread; - - /** - * how long the relaxer thread pauses between iteration loops. - */ - protected long sleepTime = 100L; - - - /** - * Creates an instance for the specified process. - */ - public VisRunner(IterativeContext process) { - this.process = process; - } - - /** - * @return the relaxerThreadSleepTime - */ - public long getSleepTime() { - return sleepTime; - } - - /** - * @param sleepTime the sleep time to set for this thread - */ - public void setSleepTime(long sleepTime) { - this.sleepTime = sleepTime; - } - - public void prerelax() { - manualSuspend = true; - long timeNow = System.currentTimeMillis(); - while (System.currentTimeMillis() - timeNow < 500 && !process.done()) { - process.step(); - } - manualSuspend = false; - } - - public void pause() { - manualSuspend = true; - } - - public void relax() { - // in case its running - stop(); - stop = false; - thread = new Thread(this); - thread.setPriority(Thread.MIN_PRIORITY); - thread.start(); - } - - /** - * Used for synchronization. - */ - public Object pauseObject = new String("PAUSE OBJECT"); - - public void resume() { - manualSuspend = false; - if(running == false) { - prerelax(); - relax(); - } else { - synchronized(pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public synchronized void stop() { - if(thread != null) { - manualSuspend = false; - stop = true; - // interrupt the relaxer, in case it is paused or sleeping - // this should ensure that visRunnerIsRunning gets set to false - try { thread.interrupt(); } - catch(Exception ex) { - // the applet security manager may have prevented this. - // just sleep for a second to let the thread stop on its own - try { Thread.sleep(1000); } - catch(InterruptedException ie) {} // ignore - } - synchronized (pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public void run() { - running = true; - try { - while (!process.done() && !stop) { - synchronized (pauseObject) { - while (manualSuspend && !stop) { - try { - pauseObject.wait(); - } catch (InterruptedException e) { - // ignore - } - } - } - process.step(); - - if (stop) - return; - - try { - Thread.sleep(sleepTime); - } catch (InterruptedException ie) { - // ignore - } - } - - } finally { - running = false; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/package.html deleted file mode 100644 index 356f7d5f32..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/layout/util/package.html +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - -Utility classes for updating layout positions. - - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java deleted file mode 100644 index 1124bdf0b0..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.matrix; - -import java.util.Map; - - -/** - * An interface for specifying the behavior of graph/matrix operations - * for a particular element type. - *

      - * Graph/matrix multiplication requires the definition of two operations: - *

      - *

        - *
      1. - * Calculating an aggregate property of paths of length 2 between two - * vertices v1 and v2 (analogous to element multiplication in matrix - * arithmetic); this is handled by computePathData(). - *
      2. - *
      3. - * Aggregating the properties of all such paths, and assigning the result to - * a new edge in the output graph (analogous to element addition in matrix - * arithmetic); this is handled by mergePaths(). - *
      4. - *
      - *

      - * Together, computePathData() and mergePaths() specify how the equivalent of - * the vector inner (dot) product is to function. - *

      - * For instance, to implement the equivalent of standard matrix multiplication - * on two graphs, computePathData() should return the products of the - * weights of a two-edge path, and mergePaths() should add - * the output of computePathData() to an existing edge (or possibly create such - * an edge if none exists). - * - * @author Joshua O'Madadhain - */ -public interface MatrixElementOperations -{ - /** - * If either e or pathData is null, the effect of mergePaths() is - * implementation-dependent. - * - * @param e (possibly) existing edge in the output graph which - * represents a path in the input graph(s) - * - * @param pathData data (which represents another path with the same source - * and destination as e in the input graphs) which is to be merged into e - */ - public void mergePaths(E e, Object pathData); - - /** - * If either e1 or e2 is null, the Object reference returned should be null. - * - * @param e1 first edge from 2-edge path in input graph(s) - * @param e2 second edge from 2-edge path in input graph(s) - * @return aggregation of data from the edges of the 2-edge path - * (from source of e1 to destination of e2) comprised of (e1, e2) - */ - public Number computePathData(E e1, E e2); - - /** - * Returns a map from edges to values. - */ - public Map getEdgeData(); -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java deleted file mode 100644 index ada1406c48..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java +++ /dev/null @@ -1,68 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.matrix; - - -import java.util.HashMap; -import java.util.Map; - -/** - * Implements the basic matrix operations on double-precision values. Assumes - * that the edges have a MutableDouble value. - * - * @author Joshua O'Madadhain - */ -public class RealMatrixElementOperations implements MatrixElementOperations -{ - private Map edgeData = new HashMap(); - - /** - * Creates an instance using the specified edge values. - */ - public RealMatrixElementOperations(Map edgeData) - { - this.edgeData = edgeData; - } - - /** - * @see MatrixElementOperations#mergePaths(Object, Object) - */ - public void mergePaths(E e, Object pathData) - { - - Number pd = (Number)pathData; - Number ed = edgeData.get(e); - if (ed == null) { - edgeData.put(e, pd); - - } else { - edgeData.put(e, ed.doubleValue()+pd.doubleValue()); - - } - - } - - /** - * @see MatrixElementOperations#computePathData(Object, Object) - */ - public Number computePathData(E e1, E e2) - { - double d1 = edgeData.get(e1).doubleValue(); - double d2 = edgeData.get(e2).doubleValue(); - return d1*d2; - } - - /** - * @return the edgeData - */ - public Map getEdgeData() { - return edgeData; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/package.html deleted file mode 100644 index 6025a412dc..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/matrix/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Mechanisms for dealing with graphs as matrices. These include conversion to and -from Colt matrices, and some matrix algorithms. - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java deleted file mode 100644 index 1dfcf12309..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jun 7, 2008 - * - */ -package edu.uci.ics.jung.algorithms.metrics; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A class consisting of static methods for calculating graph metrics. - */ -public class Metrics -{ - /** - * Returns a Map of vertices to their clustering coefficients. - * The clustering coefficient cc(v) of a vertex v is defined as follows: - *

        - *
      • degree(v) == {0,1}: 0 - *
      • degree(v) == n, n >= 2: given S, the set of neighbors - * of v: cc(v) = (the sum over all w in S of the number of - * other elements of w that are neighbors of w) / ((|S| * (|S| - 1) / 2). - * Less formally, the fraction of v's neighbors that are also - * neighbors of each other. - *

        Note: This algorithm treats its argument as an undirected graph; - * edge direction is ignored. - * @param graph the graph whose clustering coefficients are to be calculated - * @see "The structure and function of complex networks, M.E.J. Newman, aps.arxiv.org/abs/cond-mat/0303516" - */ - public static Map clusteringCoefficients(Graph graph) - { - Map coefficients = new HashMap(); - - for (V v : graph.getVertices()) - { - int n = graph.getNeighborCount(v); - if (n < 2) - coefficients.put(v, new Double(0)); - else - { - // how many of v's neighbors are connected to each other? - ArrayList neighbors = new ArrayList(graph.getNeighbors(v)); - double edge_count = 0; - for (int i = 0; i < n; i++) - { - V w = neighbors.get(i); - for (int j = i+1; j < n; j++ ) - { - V x = neighbors.get(j); - edge_count += graph.isNeighbor(w, x) ? 1 : 0; - } - } - double possible_edges = (n * (n - 1))/2.0; - coefficients.put(v, new Double(edge_count / possible_edges)); - } - } - - return coefficients; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java deleted file mode 100644 index aec84b9b8c..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Created on Sep 19, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.metrics; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Calculates some of the measures from Burt's text "Structural Holes: - * The Social Structure of Competition". - * - *

        Notes: - *

          - *
        • Each of these measures assumes that each edge has an associated - * non-null weight whose value is accessed through the specified - * Transformer instance. - *
        • Nonexistent edges are treated as edges with weight 0 for purposes - * of edge weight calculations. - *
        - * - *

        Based on code donated by Jasper Voskuilen and - * Diederik van Liere of the Department of Information and Decision Sciences - * at Erasmus University.

        - * - * @author Joshua O'Madadhain - * @author Jasper Voskuilen - * @see "Ronald Burt, Structural Holes: The Social Structure of Competition" - * @author Tom Nelson - converted to jung2 - */ -public class StructuralHoles { - - protected Transformer edge_weight; - protected Graph g; - - /** - * Creates a StructuralHoles instance based on the - * edge weights specified by nev. - */ - public StructuralHoles(Graph graph, Transformer nev) - { - this.g = graph; - this.edge_weight = nev; - } - - /** - * Burt's measure of the effective size of a vertex's network. Essentially, the - * number of neighbors minus the average degree of those in v's neighbor set, - * not counting ties to v. Formally: - *
        -     * effectiveSize(v) = v.degree() - (sum_{u in N(v)} sum_{w in N(u), w !=u,v} p(v,w)*m(u,w))
        -     * 
        - * where - *
          - *
        • N(a) = a.getNeighbors() - *
        • p(v,w) = normalized mutual edge weight of v and w - *
        • m(u,w) = maximum-scaled mutual edge weight of u and w - *
        - * @see #normalizedMutualEdgeWeight(Object, Object) - * @see #maxScaledMutualEdgeWeight(Object, Object) - */ - public double effectiveSize(V v) - { - double result = g.degree(v); - for(V u : g.getNeighbors(v)) { - - for(V w : g.getNeighbors(u)) { - - if (w != v && w != u) - result -= normalizedMutualEdgeWeight(v,w) * - maxScaledMutualEdgeWeight(u,w); - } - } - return result; - } - - /** - * Returns the effective size of v divided by the number of - * alters in v's network. (In other words, - * effectiveSize(v) / v.degree().) - * If v.degree() == 0, returns 0. - */ - public double efficiency(V v) { - double degree = g.degree(v); - - if (degree == 0) - return 0; - else - return effectiveSize(v) / degree; - } - - /** - * Burt's constraint measure (equation 2.4, page 55 of Burt, 1992). Essentially a - * measure of the extent to which v is invested in people who are invested in - * other of v's alters (neighbors). The "constraint" is characterized - * by a lack of primary holes around each neighbor. Formally: - *
        -     * constraint(v) = sum_{w in MP(v), w != v} localConstraint(v,w)
        -     * 
        - * where MP(v) is the subset of v's neighbors that are both predecessors and successors of v. - * @see #localConstraint(Object, Object) - */ - public double constraint(V v) { - double result = 0; - for(V w : g.getSuccessors(v)) { - - if (v != w && g.isPredecessor(v,w)) - { - result += localConstraint(v, w); - } - } - - return result; - } - - - /** - * Calculates the hierarchy value for a given vertex. Returns NaN when - * v's degree is 0, and 1 when v's degree is 1. - * Formally: - *
        -     * hierarchy(v) = (sum_{v in N(v), w != v} s(v,w) * log(s(v,w))}) / (v.degree() * Math.log(v.degree()) 
        -     * 
        - * where - *
          - *
        • N(v) = v.getNeighbors() - *
        • s(v,w) = localConstraint(v,w) / (aggregateConstraint(v) / v.degree()) - *
        - * @see #localConstraint(Object, Object) - * @see #aggregateConstraint(Object) - */ - public double hierarchy(V v) - { - double v_degree = g.degree(v); - - if (v_degree == 0) - return Double.NaN; - if (v_degree == 1) - return 1; - - double v_constraint = aggregateConstraint(v); - - double numerator = 0; - for (V w : g.getNeighbors(v)) { - - if (v != w) - { - double sl_constraint = localConstraint(v, w) / (v_constraint / v_degree); - numerator += sl_constraint * Math.log(sl_constraint); - } - } - - return numerator / (v_degree * Math.log(v_degree)); - } - - /** - * Returns the local constraint on v from a lack of primary holes - * around its neighbor v2. - * Based on Burt's equation 2.4. Formally: - *
        -     * localConstraint(v1, v2) = ( p(v1,v2) + ( sum_{w in N(v)} p(v1,w) * p(w, v2) ) )^2
        -     * 
        - * where - *
          - *
        • N(v) = v.getNeighbors() - *
        • p(v,w) = normalized mutual edge weight of v and w - *
        - * @see #normalizedMutualEdgeWeight(Object, Object) - */ - public double localConstraint(V v1, V v2) - { - double nmew_vw = normalizedMutualEdgeWeight(v1, v2); - double inner_result = 0; - for (V w : g.getNeighbors(v1)) { - - inner_result += normalizedMutualEdgeWeight(v1,w) * - normalizedMutualEdgeWeight(w,v2); - } - return (nmew_vw + inner_result) * (nmew_vw + inner_result); - } - - /** - * The aggregate constraint on v. Based on Burt's equation 2.7. - * Formally: - *
        -     * aggregateConstraint(v) = sum_{w in N(v)} localConstraint(v,w) * O(w)
        -     * 
        - * where - *
          - *
        • N(v) = v.getNeighbors() - *
        • O(w) = organizationalMeasure(w) - *
        - */ - public double aggregateConstraint(V v) - { - double result = 0; - for (V w : g.getNeighbors(v)) { - - result += localConstraint(v, w) * organizationalMeasure(g, w); - } - return result; - } - - /** - * A measure of the organization of individuals within the subgraph - * centered on v. Burt's text suggests that this is - * in some sense a measure of how "replaceable" v is by - * some other element of this subgraph. Should be a number in the - * closed interval [0,1]. - * - *

        This implementation returns 1. Users may wish to override this - * method in order to define their own behavior.

        - */ - protected double organizationalMeasure(Graph g, V v) { - return 1.0; - } - - - /** - * Returns the proportion of v1's network time and energy invested - * in the relationship with v2. Formally: - *
        -     * normalizedMutualEdgeWeight(a,b) = mutual_weight(a,b) / (sum_c mutual_weight(a,c))
        -     * 
        - * Returns 0 if either numerator or denominator = 0, or if v1 == v2. - * @see #mutualWeight(Object, Object) - */ - protected double normalizedMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V v : g.getNeighbors(v1)) { - denominator += mutualWeight(v1, v); - } - if (denominator == 0) - return 0; - - return numerator / denominator; - } - - /** - * Returns the weight of the edge from v1 to v2 - * plus the weight of the edge from v2 to v1; - * if either edge does not exist, it is treated as an edge with weight 0. - * Undirected edges are treated as two antiparallel directed edges (that - * is, if there is one undirected edge with weight w connecting - * v1 to v2, the value returned is 2w). - * Ignores parallel edges; if there are any such, one is chosen at random. - * Throws NullPointerException if either edge is - * present but not assigned a weight by the constructor-specified - * NumberEdgeValue. - */ - protected double mutualWeight(V v1, V v2) - { - E e12 = g.findEdge(v1,v2); - E e21 = g.findEdge(v2,v1); - double w12 = (e12 != null ? edge_weight.transform(e12).doubleValue() : 0); - double w21 = (e21 != null ? edge_weight.transform(e21).doubleValue() : 0); - - return w12 + w21; - } - - /** - * The marginal strength of v1's relation with contact vertex2. - * Formally: - *
        -     * normalized_mutual_weight = mutual_weight(a,b) / (max_c mutual_weight(a,c))
        -     * 
        - * Returns 0 if either numerator or denominator is 0, or if v1 == v2. - * @see #mutualWeight(Object, Object) - */ - protected double maxScaledMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V w : g.getNeighbors(v1)) { - - if (v2 != w) - denominator = Math.max(numerator, mutualWeight(v1, w)); - } - - if (denominator == 0) - return 0; - - return numerator / denominator; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java deleted file mode 100644 index 634eb3bcdf..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.metrics; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.collections15.CollectionUtils; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; - - -/** - * TriadicCensus is a standard social network tool that counts, for each of the - * different possible configurations of three vertices, the number of times - * that that configuration occurs in the given graph. - * This may then be compared to the set of expected counts for this particular - * graph or to an expected sample. This is often used in p* modeling. - *

        - * To use this class, - *

        - * long[] triad_counts = TriadicCensus(dg);
        - * 
        - * where dg is a DirectedGraph. - * ith element of the array (for i in [1,16]) is the number of - * occurrences of the corresponding triad type. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * To get the name of the ith triad (e.g. "003"), - * look at the global constant array c.TRIAD_NAMES[i] - *

        - * Triads are named as - * (number of pairs that are mutually tied) - * (number of pairs that are one-way tied) - * (number of non-tied pairs) - * in the triple. Since there are be only three pairs, there is a finite - * set of these possible triads. - *

        - * In fact, there are exactly 16, conventionally sorted by the number of - * realized edges in the triad: - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
        Number Configuration Notes
        1003The empty triad
        2012
        3102
        4021D"Down": the directed edges point away
        5021U"Up": the directed edges meet
        6021C"Circle": one in, one out
        7111D"Down": 021D but one edge is mutual
        8111U"Up": 021U but one edge is mutual
        9030T"Transitive": two point to the same vertex
        10030C"Circle": A->B->C->A
        11201
        12120D"Down": 021D but the third edge is mutual
        13120U"Up": 021U but the third edge is mutual
        14120C"Circle": 021C but the third edge is mutual
        15210
        16300The complete
        - *

        - * This implementation takes O( m ), m is the number of edges in the graph. - *
        - * It is based on - * - * A subquadratic triad census algorithm for large sparse networks - * with small maximum degree - * Vladimir Batagelj and Andrej Mrvar, University of Ljubljana - * Published in Social Networks. - * @author Danyel Fisher - * @author Tom Nelson - converted to jung2 - * - */ -public class TriadicCensus { - - // NOTE THAT THIS RETURNS STANDARD 1-16 COUNT! - - // and their types - public static final String[] TRIAD_NAMES = { "N/A", "003", "012", "102", "021D", - "021U", "021C", "111D", "111U", "030T", "030C", "201", "120D", - "120U", "120C", "210", "300" }; - - public static final int MAX_TRIADS = TRIAD_NAMES.length; - - /** - * Returns an array whose ith element (for i in [1,16]) is the number of - * occurrences of the corresponding triad type in g. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * - * @param g - */ - public static long[] getCounts(DirectedGraph g) { - long[] count = new long[MAX_TRIADS]; - - List id = new ArrayList(g.getVertices()); - - // apply algorithm to each edge, one at at time - for (int i_v = 0; i_v < g.getVertexCount(); i_v++) { - V v = id.get(i_v); - for(V u : g.getNeighbors(v)) { - int triType = -1; - if (id.indexOf(u) <= i_v) - continue; - Set neighbors = new HashSet(CollectionUtils.union(g.getNeighbors(u), g.getNeighbors(v))); - neighbors.remove(u); - neighbors.remove(v); - if (g.isSuccessor(v,u) && g.isSuccessor(u,v)) { - triType = 3; - } else { - triType = 2; - } - count[triType] += g.getVertexCount() - neighbors.size() - 2; - for (V w : neighbors) { - if (shouldCount(g, id, u, v, w)) { - count [ triType ( triCode(g, u, v, w) ) ] ++; - } - } - } - } - int sum = 0; - for (int i = 2; i <= 16; i++) { - sum += count[i]; - } - int n = g.getVertexCount(); - count[1] = n * (n-1) * (n-2) / 6 - sum; - return count; - } - - /** - * This is the core of the technique in the paper. Returns an int from 0 to - * 65 based on: WU -> 32 UW -> 16 WV -> 8 VW -> 4 UV -> 2 VU -> 1 - * - */ - public static int triCode(Graph g, V u, V v, V w) { - int i = 0; - i += link(g, v, u ) ? 1 : 0; - i += link(g, u, v ) ? 2 : 0; - i += link(g, v, w ) ? 4 : 0; - i += link(g, w, v ) ? 8 : 0; - i += link(g, u, w ) ? 16 : 0; - i += link(g, w, u ) ? 32 : 0; - return i; - } - - protected static boolean link(Graph g, V a, V b) { - return g.isPredecessor(b, a); - } - - - /** - * Simply returns the triCode. - * @param triCode - * @return the string code associated with the numeric type - */ - public static int triType( int triCode ) { - return codeToType[ triCode ]; - } - - /** - * For debugging purposes, this is copied straight out of the paper which - * means that they refer to triad types 1-16. - */ - protected static final int[] codeToType = { 1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, - 7, 11, 2, 6, 4, 8, 5, 9, 9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, - 6, 7, 6, 9, 10, 14, 4, 9, 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, - 14, 15, 8, 14, 13, 15, 11, 15, 15, 16 }; - - /** - * Make sure we have a canonical ordering: Returns true if u < w, or v < w < - * u and v doesn't link to w - * - * @param id - * @param u - * @param v - * @param w - * @return true if u < w, or if v < w < u and v doesn't link to w; false otherwise - */ - protected static boolean shouldCount(Graph g, List id, V u, V v, V w) { - int i_u = id.indexOf(u); - int i_w = id.indexOf(w); - if (i_u < i_w) - return true; - int i_v = id.indexOf(v); - if ((i_v < i_w) && (i_w < i_u) && (!g.isNeighbor(w,v))) - return true; - return false; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/package.html deleted file mode 100644 index ce5144b956..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/metrics/package.html +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - -Specialized measures for graph properties. These currently include: - -

          -
        • StructuralHoles: calculates some of Burt's 'structural holes' -measures (e.g. efficiency, hierarchy, constraint). -
        • TriadicCensus: returns counts for each triad type found in a -graph. -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/package.html deleted file mode 100644 index f9d2e2509e..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/package.html +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - -

        Algorithms for graphs and networks.

        - -

        These algorithms are divided into categories as follows: -

          -
        • blockmodel: dividing graph elements (typically vertices) into -equivalence classes, -generally by topological properties (e.g. structural equivalence) -
        • cluster: identifying coherent (not necessarily disjoint) groups of elements -(e.g. weakly connected components, edge betweenness clustering) -
        • filters: removing parts of a graph according to specified criteria -
        • flows: calculating properties relating to network flows -(e.g. max flow/min cut) -
        • generators: creating graphs with certain properties -
        • importance (deprecated): assigning values to vertices/edges -based on topological properties -
        • layout: arrangement of graph elements, generally for visualization -
        • metrics: calculating structural properties (triad census, structural -holes) -
        • scoring: assigning values (denoting significance, influence, -centrality, etc.) to vertices/edges based on topological properties, -e.g. PageRank, HITS, betweenness centrality (replaces "importance", above) -
        • shortestpath: calculation of shortest paths between vertices -
        • util: low-level utility classes used in a variety of algorithms -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java deleted file mode 100644 index 70d677b518..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.DelegateToEdgeTransformer; -import edu.uci.ics.jung.algorithms.scoring.util.VEPair; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * An abstract class for algorithms that assign scores to vertices based on iterative methods. - * Generally, any (concrete) subclass will function by creating an instance, and then either calling - * evaluate (if the user wants to iterate until the algorithms is 'done') or - * repeatedly call step (if the user wants to observe the values at each step). - */ -public abstract class AbstractIterativeScorer implements IterativeContext, VertexScorer -{ - /** - * Maximum number of iterations to use before terminating. Defaults to 100. - */ - protected int max_iterations; - - /** - * Minimum change from one step to the next; if all changes are <= tolerance, - * no further updates will occur. - * Defaults to 0.001. - */ - protected double tolerance; - - /** - * The graph on which the calculations are to be made. - */ - protected Hypergraph graph; - - /** - * The total number of iterations used so far. - */ - protected int total_iterations; - - /** - * The edge weights used by this algorithm. - */ - protected Transformer, ? extends Number> edge_weights; - - /** - * Indicates whether the output and current values are in a 'swapped' state. - * Intended for internal use only. - */ - protected boolean output_reversed; - - /** - * The map in which the output values are stored. - */ - private Map output; - - /** - * The map in which the current values are stored. - */ - private Map current_values; - - /** - * A flag representing whether this instance tolerates disconnected graphs. - * Instances that do not accept disconnected graphs may have unexpected behavior - * on disconnected graphs; they are not guaranteed to do an explicit check. - * Defaults to true. - */ - private boolean accept_disconnected_graph; - - - protected boolean hyperedges_are_self_loops = false; - - /** - * Sets the output value for this vertex. - * @param v the vertex whose output value is to be set - * @param value the value to set - */ - protected void setOutputValue(V v, T value) - { - output.put(v, value); - } - - /** - * Gets the output value for this vertex. - * @param v the vertex whose output value is to be retrieved - * @return the output value for this vertex - */ - protected T getOutputValue(V v) - { - return output.get(v); - } - - /** - * Gets the current value for this vertex - * @param v the vertex whose current value is to be retrieved - * @return the current value for this vertex - */ - protected T getCurrentValue(V v) - { - return current_values.get(v); - } - - /** - * Sets the current value for this vertex. - * @param v the vertex whose current value is to be set - * @param value the current value to set - */ - protected void setCurrentValue(V v, T value) - { - current_values.put(v, value); - } - - /** - * The largest change seen so far among all vertex scores. - */ - protected double max_delta; - - /** - * Creates an instance for the specified graph and edge weights. - * @param g the graph for which the instance is to be created - * @param edge_weights the edge weights for this instance - */ - public AbstractIterativeScorer(Hypergraph g, Transformer edge_weights) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - setEdgeWeights(edge_weights); - } - - /** - * Creates an instance for the specified graph g. - * NOTE: This constructor does not set the internal - * edge_weights variable. If this variable is used by - * the subclass which invoked this constructor, it must be initialized - * by that subclass. - * @param g the graph for which the instance is to be created - */ - public AbstractIterativeScorer(Hypergraph g) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - } - - /** - * Initializes the internal state for this instance. - */ - protected void initialize() - { - this.total_iterations = 0; - this.max_delta = Double.MIN_VALUE; - this.output_reversed = true; - this.current_values = new HashMap(); - this.output = new HashMap(); - } - - /** - * Steps through this scoring algorithm until a termination condition is reached. - */ - public void evaluate() - { - do - step(); - while (!done()); - } - - /** - * Returns true if the total number of iterations is greater than or equal to - * max_iterations - * or if the maximum value change observed is less than tolerance. - */ - public boolean done() - { - return total_iterations >= max_iterations || max_delta < tolerance; - } - - /** - * Performs one step of this algorithm; updates the state (value) for each vertex. - */ - public void step() - { - swapOutputForCurrent(); - - for (V v : graph.getVertices()) - { - double diff = update(v); - updateMaxDelta(v, diff); - } - total_iterations++; - afterStep(); - } - - /** - * - */ - protected void swapOutputForCurrent() - { - Map tmp = output; - output = current_values; - current_values = tmp; - output_reversed = !output_reversed; - } - - /** - * Updates the value for v. - * This is the key - * @param v the vertex whose value is to be updated - * @return - */ - protected abstract double update(V v); - - protected void updateMaxDelta(V v, double diff) - { - max_delta = Math.max(max_delta, diff); - } - - protected void afterStep() {} - - public T getVertexScore(V v) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + " not an element of this graph"); - - return output.get(v); - } - - /** - * Returns the maximum number of iterations that this instance will use. - * @return the maximum number of iterations that evaluate will use - * prior to terminating - */ - public int getMaxIterations() - { - return max_iterations; - } - - /** - * Returns the number of iterations that this instance has used so far. - * @return the number of iterations that this instance has used so far - */ - public int getIterations() - { - return total_iterations; - } - - /** - * Sets the maximum number of times that evaluate will call step. - * @param max_iterations the maximum - */ - public void setMaxIterations(int max_iterations) - { - this.max_iterations = max_iterations; - } - - /** - * Gets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. Once all changes are less than this value, - * evaluate will terminate. - * @return the size of the largest change that evaluate() will permit - */ - public double getTolerance() - { - return tolerance; - } - - /** - * Sets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. - * @param tolerance the size of the largest change that evaluate() will permit - */ - public void setTolerance(double tolerance) - { - this.tolerance = tolerance; - } - - /** - * Returns the Transformer that this instance uses to associate edge weights with each edge. - * @return the Transformer that associates an edge weight with each edge - */ - public Transformer, ? extends Number> getEdgeWeights() - { - return edge_weights; - } - - /** - * Sets the Transformer that this instance uses to associate edge weights with each edge - * @param edge_weights the Transformer to use to associate an edge weight with each edge - * @see edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight - */ - public void setEdgeWeights(Transformer edge_weights) - { - this.edge_weights = new DelegateToEdgeTransformer(edge_weights); - } - - /** - * Gets the edge weight for e in the context of its (incident) vertex v. - * @param v the vertex incident to e as a context in which the edge weight is to be calculated - * @param e the edge whose weight is to be returned - * @return the edge weight for e in the context of its (incident) vertex v - */ - protected Number getEdgeWeight(V v, E e) - { - return edge_weights.transform(new VEPair(v,e)); - } - - /** - * Collects the 'potential' from v (its current value) if it has no outgoing edges; this - * can then be redistributed among the other vertices as a means of normalization. - * @param v - */ - protected void collectDisappearingPotential(V v) {} - - /** - * Specifies whether this instance should accept vertices with no outgoing edges. - * @param accept true if this instance should accept vertices with no outgoing edges, false otherwise - */ - public void acceptDisconnectedGraph(boolean accept) - { - this.accept_disconnected_graph = accept; - } - - /** - * Returns true if this instance accepts vertices with no outgoing edges, and false otherwise. - * @return true if this instance accepts vertices with no outgoing edges, otherwise false - */ - public boolean isDisconnectedGraphOK() - { - return this.accept_disconnected_graph; - } - - /** - * Specifies whether hyperedges are to be treated as self-loops. If they - * are, then potential will flow along a hyperedge a vertex to itself, - * just as it does to all other vertices incident to that hyperedge. - * @param arg if {@code true}, hyperedges are treated as self-loops - */ - public void setHyperedgesAreSelfLoops(boolean arg) - { - this.hyperedges_are_self_loops = arg; - } - - /** - * Returns the effective number of vertices incident to this edge. If - * the graph is a binary relation or if hyperedges are treated as self-loops, - * the value returned is {@code graph.getIncidentCount(e)}; otherwise it is - * {@code graph.getIncidentCount(e) - 1}. - */ - protected int getAdjustedIncidentCount(E e) - { - return graph.getIncidentCount(e) - (hyperedges_are_self_loops ? 0 : 1); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java deleted file mode 100644 index 6883e26384..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Created on Jul 14, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * An abstract class for iterative random-walk-based vertex scoring algorithms - * that have a - * fixed probability, for each vertex, of 'jumping' to that vertex at each - * step in the algorithm (rather than following a link out of that vertex). - * - * @param the vertex type - * @param the edge type - * @param the score type - */ -public abstract class AbstractIterativeScorerWithPriors extends - AbstractIterativeScorer implements VertexScorer -{ - /** - * The prior probability of each vertex being visited on a given - * 'jump' (non-link-following) step. - */ - protected Transformer vertex_priors; - - /** - * The probability of making a 'jump' at each step. - */ - protected double alpha; - - /** - * Creates an instance for the specified graph, edge weights, vertex - * priors, and jump probability. - * @param g the graph whose vertices are to be assigned scores - * @param edge_weights the edge weights to use in the score assignment - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(g, edge_weights); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } - - /** - * Creates an instance for the specified graph, vertex priors, and jump - * probability, with edge weights specified by the subclass. - * @param g the graph whose vertices are to be assigned scores - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Transformer vertex_priors, double alpha) - { - super(g); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } - - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - // initialize output values to priors - // (output and current are swapped before each step(), so current will - // have priors when update()s start happening) - for (V v : graph.getVertices()) - setOutputValue(v, getVertexPrior(v)); - } - - /** - * Returns the prior probability for v. - * @param v the vertex whose prior probability is being queried - * @return the prior probability for v - */ - protected S getVertexPrior(V v) - { - return vertex_priors.transform(v); - } - - /** - * Returns a Transformer which maps each vertex to its prior probability. - * @return a Transformer which maps each vertex to its prior probability - */ - public Transformer getVertexPriors() - { - return vertex_priors; - } - - /** - * Returns the probability of making a 'jump' (non-link-following step). - * @return the probability of making a 'jump' (non-link-following step) - */ - public double getAlpha() - { - return alpha; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java deleted file mode 100644 index 1c9c178c55..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex according to the sum of its distances to all other vertices. - */ -public class BarycenterScorer extends DistanceCentralityScorer -{ - /** - * Creates an instance with the specified graph and distance metric. - * @param graph the input graph - * @param distance the distance metric to use - */ - public BarycenterScorer(Hypergraph graph, Distance distance) - { - super(graph, distance, false); - } - - /** - * Creates an instance with the specified graph and edge weights. - * Will generate a Distance metric internally based on the edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to use to calculate vertex/vertex distances - */ - public BarycenterScorer(Hypergraph graph, Transformer edge_weights) - { - super(graph, edge_weights, false); - } - - /** - * Creates an instance with the specified graph. - * Will generate a Distance metric internally assuming that the - * graph is unweighted. - * @param graph the input graph - */ - public BarycenterScorer(Hypergraph graph) - { - super(graph, false); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java deleted file mode 100644 index 5cfeb1647b..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java +++ /dev/null @@ -1,351 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Sep 16, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Stack; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Computes betweenness centrality for each vertex and edge in the graph. - * - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." - */ -public class BetweennessCentrality - implements VertexScorer, EdgeScorer -{ - protected Graph graph; - protected Map vertex_scores; - protected Map edge_scores; - protected Map vertex_data; - - /** - * Calculates betweenness scores based on the all-pairs unweighted shortest paths - * in the graph. - * @param graph the graph for which the scores are to be calculated - */ - @SuppressWarnings("unchecked") - public BetweennessCentrality(Graph graph) - { - initialize(graph); - computeBetweenness(new LinkedList(), new ConstantTransformer(1)); - } - - /** - * Calculates betweenness scores based on the all-pairs weighted shortest paths in the - * graph. - * - *

        NOTE: This version of the algorithm may not work correctly on all graphs; we're still - * working out the bugs. Use at your own risk. - * @param graph the graph for which the scores are to be calculated - * @param edge_weights the edge weights to be used in the path length calculations - */ - public BetweennessCentrality(Graph graph, - Transformer edge_weights) - { - // reject negative-weight edges up front - for (E e : graph.getEdges()) - { - double e_weight = edge_weights.transform(e).doubleValue(); - if (e_weight < 0) - throw new IllegalArgumentException(String.format( - "Weight for edge '%s' is < 0: %d", e, e_weight)); - } - - initialize(graph); - computeBetweenness(new MapBinaryHeap(new BetweennessComparator()), - edge_weights); - } - - protected void initialize(Graph graph) - { - this.graph = graph; - this.vertex_scores = new HashMap(); - this.edge_scores = new HashMap(); - this.vertex_data = new HashMap(); - - for (V v : graph.getVertices()) - this.vertex_scores.put(v, 0.0); - - for (E e : graph.getEdges()) - this.edge_scores.put(e, 0.0); - } - - protected void computeBetweenness(Queue queue, - Transformer edge_weights) - { - for (V v : graph.getVertices()) - { - // initialize the betweenness data for this new vertex - for (V s : graph.getVertices()) - this.vertex_data.put(s, new BetweennessData()); - -// if (v.equals(new Integer(0))) -// System.out.println("pause"); - - vertex_data.get(v).numSPs = 1; - vertex_data.get(v).distance = 0; - - Stack stack = new Stack(); -// Buffer queue = new UnboundedFifoBuffer(); -// queue.add(v); - queue.offer(v); - - while (!queue.isEmpty()) - { -// V w = queue.remove(); - V w = queue.poll(); - stack.push(w); - BetweennessData w_data = vertex_data.get(w); - - for (E e : graph.getOutEdges(w)) - { - // TODO (jrtom): change this to getOtherVertices(w, e) - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double wx_weight = edge_weights.transform(e).doubleValue(); - - -// for(V x : graph.getSuccessors(w)) -// { -// if (x.equals(w)) -// continue; - - // FIXME: the other problem is that I need to - // keep putting the neighbors of things we've just - // discovered in the queue, if they're undiscovered or - // at greater distance. - - // FIXME: this is the problem, right here, I think: - // need to update position in queue if distance changes - // (which can only happen with weighted edges). - // for each outgoing edge e from w, get other end x - // if x not already visited (dist x < 0) - // set x's distance to w's dist + edge weight - // add x to queue; pri in queue is x's dist - // if w's dist + edge weight < x's dist - // update x's dist - // update x in queue (MapBinaryHeap) - // clear x's incoming edge list - // if w's dist + edge weight = x's dist - // add e to x's incoming edge list - - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + wx_weight; - - if (x_data.distance < 0) - { -// queue.add(x); -// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; - x_data.distance = x_potential_dist; - queue.offer(x); - } - - // note: - // (1) this can only happen with weighted edges - // (2) x's SP count and incoming edges are updated below - if (x_data.distance > x_potential_dist) - { - x_data.distance = x_potential_dist; - // invalidate previously identified incoming edges - // (we have a new shortest path distance to x) - x_data.incomingEdges.clear(); - // update x's position in queue - ((MapBinaryHeap)queue).update(x); - } -// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) - // -// if (x_data.distance == x_potential_dist) -// { -// x_data.numSPs += w_data.numSPs; -//// vertex_data.get(x).predecessors.add(w); -// x_data.incomingEdges.add(e); -// } - } - for (E e: graph.getOutEdges(w)) - { - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double e_weight = edge_weights.transform(e).doubleValue(); - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + e_weight; - if (x_data.distance == x_potential_dist) - { - x_data.numSPs += w_data.numSPs; -// vertex_data.get(x).predecessors.add(w); - x_data.incomingEdges.add(e); - } - } - } - while (!stack.isEmpty()) - { - V x = stack.pop(); - -// for (V w : vertex_data.get(x).predecessors) - for (E e : vertex_data.get(x).incomingEdges) - { - V w = graph.getOpposite(x, e); - double partialDependency = - vertex_data.get(w).numSPs / vertex_data.get(x).numSPs * - (1.0 + vertex_data.get(x).dependency); - vertex_data.get(w).dependency += partialDependency; -// E w_x = graph.findEdge(w, x); -// double w_x_score = edge_scores.get(w_x).doubleValue(); -// w_x_score += partialDependency; -// edge_scores.put(w_x, w_x_score); - double e_score = edge_scores.get(e).doubleValue(); - edge_scores.put(e, e_score + partialDependency); - } - if (!x.equals(v)) - { - double x_score = vertex_scores.get(x).doubleValue(); - x_score += vertex_data.get(x).dependency; - vertex_scores.put(x, x_score); - } - } - } - - if(graph instanceof UndirectedGraph) - { - for (V v : graph.getVertices()) { - double v_score = vertex_scores.get(v).doubleValue(); - v_score /= 2.0; - vertex_scores.put(v, v_score); - } - for (E e : graph.getEdges()) { - double e_score = edge_scores.get(e).doubleValue(); - e_score /= 2.0; - edge_scores.put(e, e_score); - } - } - - vertex_data.clear(); - } - -// protected void computeWeightedBetweenness(Transformer edge_weights) -// { -// for (V v : graph.getVertices()) -// { -// // initialize the betweenness data for this new vertex -// for (V s : graph.getVertices()) -// this.vertex_data.put(s, new BetweennessData()); -// vertex_data.get(v).numSPs = 1; -// vertex_data.get(v).distance = 0; -// -// Stack stack = new Stack(); -//// Buffer queue = new UnboundedFifoBuffer(); -// SortedSet pqueue = new TreeSet(new BetweennessComparator()); -//// queue.add(v); -// pqueue.add(v); -// -//// while (!queue.isEmpty()) -// while (!pqueue.isEmpty()) -// { -//// V w = queue.remove(); -// V w = pqueue.first(); -// pqueue.remove(w); -// stack.push(w); -// -//// for(V x : graph.getSuccessors(w)) -// for (E e : graph.getOutEdges(w)) -// { -// // TODO (jrtom): change this to getOtherVertices(w, e) -// V x = graph.getOpposite(w, e); -// if (x.equals(w)) -// continue; -// double e_weight = edge_weights.transform(e).doubleValue(); -// -// if (vertex_data.get(x).distance < 0) -// { -//// queue.add(x); -// pqueue.add(v); -//// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; -// vertex_data.get(x).distance = -// vertex_data.get(w).distance + e_weight; -// } -// -//// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) -// if (vertex_data.get(x).distance == -// vertex_data.get(w).distance + e_weight) -// { -// vertex_data.get(x).numSPs += vertex_data.get(w).numSPs; -// vertex_data.get(x).predecessors.add(w); -// } -// } -// } -// updateScores(v, stack); -// } -// -// if(graph instanceof UndirectedGraph) -// adjustUndirectedScores(); -// -// vertex_data.clear(); -// } - - public Double getVertexScore(V v) - { - return vertex_scores.get(v); - } - - public Double getEdgeScore(E e) - { - return edge_scores.get(e); - } - - private class BetweennessData - { - double distance; - double numSPs; -// List predecessors; - List incomingEdges; - double dependency; - - BetweennessData() - { - distance = -1; - numSPs = 0; -// predecessors = new ArrayList(); - incomingEdges = new ArrayList(); - dependency = 0; - } - - @Override - public String toString() - { - return "[d:" + distance + ", sp:" + numSPs + - ", p:" + incomingEdges + ", d:" + dependency + "]\n"; -// ", p:" + predecessors + ", d:" + dependency + "]\n"; - } - } - - private class BetweennessComparator implements Comparator - { - public int compare(V v1, V v2) - { - return vertex_data.get(v1).distance > vertex_data.get(v2).distance ? 1 : -1; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java deleted file mode 100644 index d64f01ed41..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex based on the mean distance to each other vertex. - * - * @author Joshua O'Madadhain - */ -public class ClosenessCentrality extends DistanceCentralityScorer -{ - - /** - * Creates an instance using the specified vertex/vertex distance metric. - * @param graph the input - * @param distance the vertex/vertex distance metric. - */ - public ClosenessCentrality(Hypergraph graph, Distance distance) - { - super(graph, distance, true); - } - - /** - * Creates an instance which measures distance using the specified edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to be used to determine vertex/vertex distances - */ - public ClosenessCentrality(Hypergraph graph, Transformer edge_weights) - { - super(graph, edge_weights, true); - } - - /** - * Creates an instance which measures distance on the graph without edge weights. - * @param graph - */ - public ClosenessCentrality(Hypergraph graph) - { - super(graph, true); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java deleted file mode 100644 index 2ec3148104..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns a score to each vertex equal to its degree. - * - * @param the vertex type - */ -public class DegreeScorer implements VertexScorer -{ - /** - * The graph for which scores are to be generated. - */ - protected Hypergraph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the input graph - */ - public DegreeScorer(Hypergraph graph) - { - this.graph = graph; - } - - /** - * Returns the degree of the vertex. - * @return the degree of the vertex - */ - public Integer getVertexScore(V v) - { - return graph.degree(v); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java deleted file mode 100644 index 16dd86219d..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Created on Jul 10, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance; -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to vertices based on their distances to each other vertex - * in the graph. - * - * This class optionally normalizes its results based on the value of its - * 'averaging' constructor parameter. If it is true, - * then the value returned for vertex v is 1 / (_average_ distance from v to all other vertices); - * this is sometimes called closeness centrality. - * If it is false, then the value returned is 1 / (_total_ distance from - * v to all other vertices); this is sometimes referred to as barycenter centrality. - * (If the average/total distance is 0, the value returned is {@code Double.POSITIVE_INFINITY}.) - * - * @see BarycenterScorer - * @see ClosenessCentrality - */ -public class DistanceCentralityScorer implements VertexScorer -{ - /** - * The graph on which the vertex scores are to be calculated. - */ - protected Hypergraph graph; - - /** - * The metric to use for specifying the distance between pairs of vertices. - */ - protected Distance distance; - - /** - * The cache for the output results. Null encodes "not yet calculated", - * < 0 encodes "no such distance exists". - */ - protected Map output; - - /** - * Specifies whether the values returned are the sum of the v-distances - * or the mean v-distance. - */ - protected boolean averaging; - - /** - * Specifies whether, for a vertex v with missing (null) distances, - * v's score should ignore the missing values or be set to 'null'. - * Defaults to 'true'. - */ - protected boolean ignore_missing; - - /** - * Specifies whether the values returned should ignore self-distances - * (distances from v to itself). - * Defaults to 'true'. - */ - protected boolean ignore_self_distances; - - /** - * Creates an instance with the specified graph, distance metric, and - * averaging behavior. - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging, boolean ignore_missing, - boolean ignore_self_distances) - { - this.graph = graph; - this.distance = distance; - this.averaging = averaging; - this.ignore_missing = ignore_missing; - this.ignore_self_distances = ignore_self_distances; - this.output = new HashMap(); - } - - /** - * Equivalent to this(graph, distance, averaging, true, true). - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging) - { - this(graph, distance, averaging, true, true); - } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated based on the specified edge - * weights. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, - Transformer edge_weights, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - ignore_missing, ignore_self_distances); - } - - /** - * Equivalent to this(graph, edge_weights, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, - Transformer edge_weights, boolean averaging) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - true, true); - } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated on the unweighted graph. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new UnweightedShortestPath(graph), averaging, - ignore_missing, ignore_self_distances); - } - - /** - * Equivalent to this(graph, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging) - { - this(graph, new UnweightedShortestPath(graph), averaging, true, true); - } - - /** - * Calculates the score for the specified vertex. Returns {@code null} if - * there are missing distances and such are not ignored by this instance. - */ - public Double getVertexScore(V v) - { - Double value = output.get(v); - if (value != null) - { - if (value < 0) - return null; - return value; - } - - Map v_distances = new HashMap(distance.getDistanceMap(v)); - if (ignore_self_distances) - v_distances.remove(v); - - // if we don't ignore missing distances and there aren't enough - // distances, output null (shortcut) - if (!ignore_missing) - { - int num_dests = graph.getVertexCount() - - (ignore_self_distances ? 1 : 0); - if (v_distances.size() != num_dests) - { - output.put(v, -1.0); - return null; - } - } - - Double sum = 0.0; - for (V w : graph.getVertices()) - { - if (w.equals(v) && ignore_self_distances) - continue; - Number w_distance = v_distances.get(w); - if (w_distance == null) - if (ignore_missing) - continue; - else - { - output.put(v, -1.0); - return null; - } - else - sum += w_distance.doubleValue(); - } - value = sum; - if (averaging) - value /= v_distances.size(); - - double score = value == 0 ? - Double.POSITIVE_INFINITY : - 1.0 / value; - output.put(v, score); - - return score; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java deleted file mode 100644 index 7e648746d0..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - - -/** - * An interface for algorithms that assign scores to edges. - * - * @param the edge type - * @param the score type - */ -public interface EdgeScorer -{ - /** - * Returns the algorithm's score for this edge. - * @return the algorithm's score for this edge - */ - public S getEdgeScore(E e); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java deleted file mode 100644 index 87d7f3ae10..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Calculates eigenvector centrality for each vertex in the graph. - * The 'eigenvector centrality' for a vertex is defined as the fraction of - * time that a random walk(er) will spend at that vertex over an infinite - * time horizon. - * Assumes that the graph is strongly connected. - */ -public class EigenvectorCentrality extends PageRank -{ - /** - * Creates an instance with the specified graph and edge weights. - * The outgoing edge weights for each edge must sum to 1. - * (See UniformDegreeWeight for one way to handle this for - * undirected graphs.) - * @param graph the graph for which the centrality is to be calculated - * @param edge_weights the edge weights - */ - public EigenvectorCentrality(Hypergraph graph, - Transformer edge_weights) - { - super(graph, edge_weights, 0); - acceptDisconnectedGraph(false); - } - - /** - * Creates an instance with the specified graph and default edge weights. - * (Default edge weights: UniformDegreeWeight.) - * @param graph the graph for which the centrality is to be calculated. - */ - public EigenvectorCentrality(Hypergraph graph) - { - super(graph, 0); - acceptDisconnectedGraph(false); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java deleted file mode 100644 index b1b4f42899..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Created on Jul 15, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Graph; - -import org.apache.commons.collections15.Transformer; - -/** - * Assigns hub and authority scores to each vertex depending on the topology of - * the network. The essential idea is that a vertex is a hub to the extent - * that it links to authoritative vertices, and is an authority to the extent - * that it links to 'hub' vertices. - * - *

        The classic HITS algorithm essentially proceeds as follows: - *

        - * assign equal initial hub and authority values to each vertex
        - * repeat
        - *   for each vertex w:
        - *     w.hub = sum over successors x of x.authority
        - *     w.authority = sum over predecessors v of v.hub
        - *   normalize hub and authority scores so that the sum of the squares of each = 1
        - * until scores converge
        - * 
        - * - * HITS is somewhat different from random walk/eigenvector-based algorithms - * such as PageRank in that: - *
          - *
        • there are two mutually recursive scores being calculated, rather than - * a single value - *
        • the edge weights are effectively all 1, i.e., they can't be interpreted - * as transition probabilities. This means that the more inlinks and outlinks - * that a vertex has, the better, since adding an inlink (or outlink) does - * not dilute the influence of the other inlinks (or outlinks) as in - * random walk-based algorithms. - *
        • the scores cannot be interpreted as posterior probabilities (due to the different - * normalization) - *
        - * - * This implementation has the classic behavior by default. However, it has - * been generalized somewhat so that it can act in a more "PageRank-like" fashion: - *
          - *
        • this implementation has an optional 'random jump probability' parameter analogous - * to the 'alpha' parameter used by PageRank. Varying this value between 0 and 1 - * allows the user to vary between the classic HITS behavior and one in which the - * scores are smoothed to a uniform distribution. - * The default value for this parameter is 0 (no random jumps possible). - *
        • the edge weights can be set to anything the user likes, and in - * particular they can be set up (e.g. using UniformDegreeWeight) - * so that the weights of the relevant edges incident to a vertex sum to 1. - *
        • The vertex score normalization has been factored into its own method - * so that it can be overridden by a subclass. Thus, for example, - * since the vertices' values are set to sum to 1 initially, if the weights of the - * relevant edges incident to a vertex sum to 1, then the vertices' values - * will continue to sum to 1 if the "sum-of-squares" normalization code - * is overridden to a no-op. (Other normalization methods may also be employed.) - *
        - * - * @param the vertex type - * @param the edge type - * - * @see "'Authoritative sources in a hyperlinked environment' by Jon Kleinberg, 1997" - */ -public class HITS extends HITSWithPriors -{ - - /** - * Creates an instance for the specified graph, edge weights, and alpha - * (random jump probability) parameter. - * @param g the input graph - * @param edge_weights the weights to use for each edge - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, Transformer edge_weights, double alpha) - { - super(g, edge_weights, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph and alpha (random jump probability) - * parameter. The edge weights are all set to 1. - * @param g the input graph - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, double alpha) - { - super(g, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph. The edge weights are all set to 1 - * and alpha is set to 0. - * @param g the input graph - */ - public HITS(Graph g) - { - this(g, 0.0); - } - - - /** - * Maintains hub and authority score information for a vertex. - */ - public static class Scores - { - /** - * The hub score for a vertex. - */ - public double hub; - - /** - * The authority score for a vertex. - */ - public double authority; - - /** - * Creates an instance with the specified hub and authority score. - */ - public Scores(double hub, double authority) - { - this.hub = hub; - this.authority = authority; - } - - @Override - public String toString() - { - return String.format("[h:%.4f,a:%.4f]", this.hub, this.authority); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java deleted file mode 100644 index 51ba71903e..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Created on Jul 14, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A generalization of HITS that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' (hub or authority score) - * that is assigned to that vertex out of the portion that is assigned according - * to random jumps. - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class HITSWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * The sum of the potential, at each step, associated with vertices with no outedges (authority) - * or no inedges (hub). - */ - protected HITS.Scores disappearing_potential; - - /** - * Creates an instance for the specified graph, edge weights, vertex prior probabilities, - * and random jump probability (alpha). - * @param g the input graph - * @param edge_weights the edge weights - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - public HITSWithPriors(Hypergraph g, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(g, edge_weights, vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); - } - - /** - * Creates an instance for the specified graph, vertex priors, and random - * jump probability (alpha). The edge weights default to 1.0. - * @param g the input graph - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - @SuppressWarnings("unchecked") - public HITSWithPriors(Hypergraph g, - Transformer vertex_priors, double alpha) - { - super(g, new ConstantTransformer(1.0), vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); - } - - /** - * Updates the value for this vertex. - */ - @Override - protected double update(V v) - { - collectDisappearingPotential(v); - - double v_auth = 0; - for (E e : graph.getInEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_auth += (getCurrentValue(w).hub * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V w = graph.getOpposite(v, e); -// auth += (getCurrentValue(w).hub * getEdgeWeight(w, e).doubleValue()); - } - - double v_hub = 0; - for (E e : graph.getOutEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_hub += (getCurrentValue(w).authority * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V x = graph.getOpposite(v,e); -// hub += (getCurrentValue(x).authority * getEdgeWeight(x, e).doubleValue()); - } - - // modify total_input according to alpha - if (alpha > 0) - { - v_auth = v_auth * (1 - alpha) + getVertexPrior(v).authority * alpha; - v_hub = v_hub * (1 - alpha) + getVertexPrior(v).hub * alpha; - } - setOutputValue(v, new HITS.Scores(v_hub, v_auth)); - - return Math.max(Math.abs(getCurrentValue(v).hub - v_hub), - Math.abs(getCurrentValue(v).authority - v_auth)); - } - - /** - * Code which is executed after each step. In this case, deals with the - * 'disappearing potential', normalizes the scores, and then calls - * super.afterStep(). - * @see #collectDisappearingPotential(Object) - */ - @Override - protected void afterStep() - { - if (disappearing_potential.hub > 0 || disappearing_potential.authority > 0) - { - for (V v : graph.getVertices()) - { - double new_hub = getOutputValue(v).hub + - (1 - alpha) * (disappearing_potential.hub * getVertexPrior(v).hub); - double new_auth = getOutputValue(v).authority + - (1 - alpha) * (disappearing_potential.authority * getVertexPrior(v).authority); - setOutputValue(v, new HITS.Scores(new_hub, new_auth)); - } - disappearing_potential.hub = 0; - disappearing_potential.authority = 0; - } - - normalizeScores(); - - super.afterStep(); - } - - /** - * Normalizes scores so that sum of their squares = 1. - * This method may be overridden so as to yield different - * normalizations. - */ - protected void normalizeScores() { - double hub_ssum = 0; - double auth_ssum = 0; - for (V v : graph.getVertices()) - { - double hub_val = getOutputValue(v).hub; - double auth_val = getOutputValue(v).authority; - hub_ssum += (hub_val * hub_val); - auth_ssum += (auth_val * auth_val); - } - - hub_ssum = Math.sqrt(hub_ssum); - auth_ssum = Math.sqrt(auth_ssum); - - for (V v : graph.getVertices()) - { - HITS.Scores values = getOutputValue(v); - setOutputValue(v, new HITS.Scores( - values.hub / hub_ssum, - values.authority / auth_ssum)); - } - } - - /** - * Collects the "disappearing potential" associated with vertices that have either - * no incoming edges, no outgoing edges, or both. Vertices that have no incoming edges - * do not directly contribute to the hub scores of other vertices; similarly, vertices - * that have no outgoing edges do not directly contribute to the authority scores of - * other vertices. These values are collected at each step and then distributed across all vertices - * as a part of the normalization process. (This process is not required for, and does - * not affect, the 'sum-of-squares'-style normalization.) - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.hub += getCurrentValue(v).authority; - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } - if (graph.inDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.authority += getCurrentValue(v).hub; - else - throw new IllegalArgumentException("Indegree of " + v + " must be > 0"); - } - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java deleted file mode 100644 index e640b1b3f3..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java +++ /dev/null @@ -1,156 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Aug 22, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A special case of {@code PageRankWithPriors} in which the final scores - * represent a probability distribution over position assuming a random (Markovian) - * walk of exactly k steps, based on the initial distribution specified by the priors. - * - *

        NOTE: The version of {@code KStepMarkov} in {@code algorithms.importance} - * (and in JUNG 1.x) is believed to be incorrect: rather than returning - * a score which represents a probability distribution over position assuming - * a k-step random walk, it returns a score which represents the sum over all steps - * of the probability for each step. If you want that behavior, set the - * 'cumulative' flag as follows before calling {@code evaluate()}: - *

        - *     KStepMarkov ksm = new KStepMarkov(...);
        - *     ksm.setCumulative(true);
        - *     ksm.evaluate();
        - * 
        - * - * By default, the 'cumulative' flag is set to false. - * - * NOTE: THIS CLASS IS NOT YET COMPLETE. USE AT YOUR OWN RISK. (The original behavior - * is captured by the version still available in {@code algorithms.importance}.) - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - * @see PageRank - * @see PageRankWithPriors - */ -public class KStepMarkov extends PageRankWithPriors -{ - private boolean cumulative; - - /** - * Creates an instance based on the specified graph, edge weights, vertex - * priors (initial scores), and number of steps to take. - * @param graph the input graph - * @param edge_weights the edge weights (transition probabilities) - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Transformer edge_weights, - Transformer vertex_priors, int steps) - { - super(graph, edge_weights, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph, vertex - * priors (initial scores), and number of steps to take. The edge - * weights (transition probabilities) are set to default values (a uniform - * distribution over all outgoing edges). - * @param graph the input graph - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Transformer vertex_priors, int steps) - { - super(graph, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph and number of steps to - * take. The edge weights (transition probabilities) and vertex initial scores - * (prior probabilities) are set to default values (a uniform - * distribution over all outgoing edges, and a uniform distribution over - * all vertices, respectively). - * @param graph the input graph - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, int steps) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), 0); - initialize(steps); - } - - private void initialize(int steps) - { - this.acceptDisconnectedGraph(false); - - if (steps <= 0) - throw new IllegalArgumentException("Number of steps must be > 0"); - - this.max_iterations = steps; - this.tolerance = -1.0; - - this.cumulative = false; - } - - /** - * Specifies whether this instance should assign a score to each vertex - * based on the - * @param cumulative - */ - public void setCumulative(boolean cumulative) - { - this.cumulative = cumulative; - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - if (!cumulative) - return super.update(v); - - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value + getCurrentValue(v)); - - // FIXME: DO WE NEED TO CHANGE HOW DISAPPEARING IS COUNTED? NORMALIZE? - - return Math.abs(getCurrentValue(v) - new_value); - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java deleted file mode 100644 index ca7266d58f..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex according to the PageRank algorithm. - * - *

        PageRank is an eigenvector-based algorithm. The score for a given vertex may be thought of - * as the fraction of time spent 'visiting' that vertex (measured over all time) - * in a random walk over the vertices (following outgoing edges from each vertex). - * PageRank modifies this random walk by adding to the model a probability (specified as 'alpha' - * in the constructor) of jumping to any vertex. If alpha is 0, this is equivalent to the - * eigenvector centrality algorithm; if alpha is 1, all vertices will receive the same score - * (1/|V|). Thus, alpha acts as a sort of score smoothing parameter. - * - *

        The original algorithm assumed that, for a given vertex, the probability of following any - * outgoing edge was the same; this is the default if edge weights are not specified. - * This implementation generalizes the original by permitting - * the user to specify edge weights; in order to maintain the original semantics, however, - * the weights on the outgoing edges for a given vertex must represent transition probabilities; - * that is, they must sum to 1. - * - *

        If a vertex has no outgoing edges, then the probability of taking a random jump from that - * vertex is (by default) effectively 1. If the user wishes to instead throw an exception when this happens, - * call acceptDisconnectedGraph(false) on this instance. - * - *

        Typical values for alpha (according to the original paper) are in the range [0.1, 0.2] - * but may be any value between 0 and 1 inclusive. - * - * @see "The Anatomy of a Large-Scale Hypertextual Web Search Engine by L. Page and S. Brin, 1999" - */ -public class PageRank extends PageRankWithPriors -{ - - /** - * Creates an instance for the specified graph, edge weights, and random jump probability. - * @param graph the input graph - * @param edge_weight the edge weights (transition probabilities) - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, Transformer edge_weight, double alpha) - { - super(graph, edge_weight, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph and random jump probability; the probability - * of following any outgoing edge from a given vertex is the same. - * @param graph the input graph - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, double alpha) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java deleted file mode 100644 index 717d5eafe1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A generalization of PageRank that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' that is assigned to that - * vertex at each step out of the portion that is assigned according - * to random jumps (this portion is specified by 'alpha'). - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - * @see PageRank - */ -public class PageRankWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * Maintains the amount of potential associated with vertices with no out-edges. - */ - protected double disappearing_potential = 0.0; - - /** - * Creates an instance with the specified graph, edge weights, vertex priors, and - * 'random jump' probability (alpha). - * @param graph the input graph - * @param edge_weights the edge weights, denoting transition probabilities from source to destination - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(graph, edge_weights, vertex_priors, alpha); - } - - /** - * Creates an instance with the specified graph, vertex priors, and - * 'random jump' probability (alpha). The outgoing edge weights for each - * vertex will be equal and sum to 1. - * @param graph the input graph - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Transformer vertex_priors, double alpha) - { - super(graph, vertex_priors, alpha); - this.edge_weights = new UniformDegreeWeight(graph); - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value); - - return Math.abs(getCurrentValue(v) - new_value); - } - - /** - * Cleans up after each step. In this case that involves allocating the disappearing - * potential (thus maintaining normalization of the scores) according to the vertex - * probability priors, and then calling - * super.afterStep. - */ - @Override - protected void afterStep() - { - // distribute disappearing potential according to priors - if (disappearing_potential > 0) - { - for (V v : graph.getVertices()) - { - setOutputValue(v, getOutputValue(v) + - (1 - alpha) * (disappearing_potential * getVertexPrior(v))); - } - disappearing_potential = 0; - } - - super.afterStep(); - } - - /** - * Collects the "disappearing potential" associated with vertices that have - * no outgoing edges. Vertices that have no outgoing edges do not directly - * contribute to the scores of other vertices. These values are collected - * at each step and then distributed across all vertices - * as a part of the normalization process. - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential += getCurrentValue(v); - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java deleted file mode 100644 index 610de6b5a6..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - - -/** - * An interface for algorithms that assign scores to vertices. - * - * @param the vertex type - * @param the score type - */ -public interface VertexScorer -{ - /** - * Returns the algorithm's score for this vertex. - * @return the algorithm's score for this vertex - */ - public S getVertexScore(V v); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java deleted file mode 100644 index f05b911091..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Created on Jul 15, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to vertices according to their 'voltage' in an approximate - * solution to the Kirchoff equations. This is accomplished by tying "source" - * vertices to specified positive voltages, "sink" vertices to 0 V, and - * iteratively updating the voltage of each other vertex to the (weighted) - * average of the voltages of its neighbors. - * - *

        The resultant voltages will all be in the range [0, max] - * where max is the largest voltage of any source vertex (in the - * absence of negative source voltages; see below). - * - *

        A few notes about this algorithm's interpretation of the graph data: - *

          - *
        • Higher edge weights are interpreted as indicative of greater - * influence/effect than lower edge weights. - *
        • Negative edge weights (and negative "source" voltages) invalidate - * the interpretation of the resultant values as voltages. However, this - * algorithm will not reject graphs with negative edge weights or source voltages. - *
        • Parallel edges are equivalent to a single edge whose weight is the - * sum of the weights on the parallel edges. - *
        • Current flows along undirected edges in both directions, - * but only flows along directed edges in the direction of the edge. - *
        - *

        - */ -public class VoltageScorer extends AbstractIterativeScorer - implements VertexScorer -{ - protected Map source_voltages; - protected Collection sinks; - - /** - * Creates an instance with the specified graph, edge weights, source voltages, - * and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - Map source_voltages, Collection sinks) - { - super(g, edge_weights); - this.source_voltages = source_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - Collection sources, Collection sinks) - { - super(g, edge_weights); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Collection sources, Collection sinks) - { - super(g); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, source voltages, - * and sinks. The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Map source_voltages, - Collection sinks) - { - super(g); - this.source_voltages = source_voltages; - this.sinks = sinks; - this.edge_weights = new UniformDegreeWeight(g); - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - V source, V sink) - { - this(g, edge_weights, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, V source, V sink) - { - this(g, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); - } - - - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - - // sanity check - if (source_voltages.isEmpty() || sinks.isEmpty()) - throw new IllegalArgumentException("Both sources and sinks (grounds) must be defined"); - - if (source_voltages.size() + sinks.size() > graph.getVertexCount()) - throw new IllegalArgumentException("Source/sink sets overlap, or contain vertices not in graph"); - - for (Map.Entry entry : source_voltages.entrySet()) - { - V v = entry.getKey(); - if (sinks.contains(v)) - throw new IllegalArgumentException("Vertex " + v + " is incorrectly specified as both source and sink"); - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Source vertex " + v + " has negative voltage"); - } - - // set up initial voltages - for (V v : graph.getVertices()) - { - if (source_voltages.containsKey(v)) - setOutputValue(v, source_voltages.get(v).doubleValue()); - else - setOutputValue(v, 0.0); - } - } - - /** - * @see edu.uci.ics.jung.algorithms.scoring.AbstractIterativeScorer#update(Object) - */ - @Override - public double update(V v) - { - // if it's a voltage source or sink, we're done - Number source_volts = source_voltages.get(v); - if (source_volts != null) - { - setOutputValue(v, source_volts.doubleValue()); - return 0.0; - } - if (sinks.contains(v)) - { - setOutputValue(v, 0.0); - return 0.0; - } - - Collection edges = graph.getInEdges(v); - double voltage_sum = 0; - double weight_sum = 0; - for (E e: edges) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - { - double weight = getEdgeWeight(w,e).doubleValue() / incident_count; - voltage_sum += getCurrentValue(w).doubleValue() * weight; - weight_sum += weight; - } - } -// V w = graph.getOpposite(v, e); -// double weight = getEdgeWeight(w,e).doubleValue(); -// voltage_sum += getCurrentValue(w).doubleValue() * weight; -// weight_sum += weight; - } - - // if either is 0, new value is 0 - if (voltage_sum == 0 || weight_sum == 0) - { - setOutputValue(v, 0.0); - return getCurrentValue(v).doubleValue(); - } - - setOutputValue(v, voltage_sum / weight_sum); - return Math.abs(getCurrentValue(v).doubleValue() - voltage_sum / weight_sum); - } - -} - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/package.html deleted file mode 100644 index a1f8196ebf..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/package.html +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - -Mechanisms for assigning values (denoting significance, influence, centrality, etc.) -to graph elements based on topological properties. These include: - -
          -
        • BarycenterScorer: assigns a score to each vertex according to -the sum of the distances to all other vertices -
        • ClosenessCentrality: assigns a score to each vertex based on -the mean distance to each other vertex -
        • DegreeScorer: assigns a score to each vertex based on its degree -
        • EigenvectorCentrality: assigns vertex scores based on -long-term probabilities of random walks passing through the vertex at time t -
        • PageRank: like EigenvectorCentrality, but with -a constant probability of the -random walk restarting at a uniform-randomly chosen vertex -
        • PageRankWithPriors: like PageRank, but with a -constant probability of the random -walk restarting at a vertex drawn from an arbitrary distribution -
        • HITS: assigns hubs-and-authorities scores to vertices based on -complementary random walk processes -
        • HITSWithPriors: analogous to HITS -(see PageRankWithPriors) -
        • VoltageScorer: assigns scores to vertices based on simulated -current flow along edges -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java deleted file mode 100644 index f836a9b428..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Created on Jul 11, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -/** - * A Transformer that delegates its operation to a - * Transformer. Mainly useful for technical reasons inside - * AbstractIterativeScorer; in essence it allows the edge weight instance - * variable to be of type VEPair,W even if the edge weight - * Transformer only operates on edges. - */ -public class DelegateToEdgeTransformer implements - Transformer,Number> -{ - /** - * The transformer to which this instance delegates its function. - */ - protected Transformer delegate; - - /** - * Creates an instance with the specified delegate transformer. - * @param delegate the Transformer to which this instance will delegate - */ - public DelegateToEdgeTransformer(Transformer delegate) - { - this.delegate = delegate; - } - - /** - * @see Transformer#transform(Object) - */ - public Number transform(VEPair arg0) - { - return delegate.transform(arg0.getE()); - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java deleted file mode 100644 index 793944b295..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import java.util.Collection; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.HITS; - -/** - * Methods for assigning values (to be interpreted as prior probabilities) to vertices in the context - * of random-walk-based scoring algorithms. - */ -public class ScoringUtils -{ - /** - * Assigns a probability of 1/roots.size() to each of the elements of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero prior probabilities - * @return - */ - public static Transformer getUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Transformer distribution = new Transformer() - { - public Double transform(V input) - { - if (inner_roots.contains(input)) - return new Double(1.0 / inner_roots.size()); - else - return 0.0; - } - }; - - return distribution; - } - - /** - * Returns a Transformer that hub and authority values of 1/roots.size() to each - * element of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero scores - * @return a Transformer that assigns uniform prior hub/authority probabilities to each root - */ - public static Transformer getHITSUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Transformer distribution = - new Transformer() - { - public HITS.Scores transform(V input) - { - if (inner_roots.contains(input)) - return new HITS.Scores(1.0 / inner_roots.size(), 1.0 / inner_roots.size()); - else - return new HITS.Scores(0.0, 0.0); - } - }; - return distribution; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java deleted file mode 100644 index f22bfcc891..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jul 14, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * An edge weight function that assigns weights as uniform - * transition probabilities. - * For undirected edges, returns 1/degree(v) (where 'v' is the - * vertex in the VEPair. - * For directed edges, returns 1/outdegree(source(e)) (where 'e' - * is the edge in the VEPair). - * Throws an IllegalArgumentException if the input - * edge is neither EdgeType.UNDIRECTED nor EdgeType.DIRECTED. - * - */ -public class UniformDegreeWeight implements - Transformer, Double> -{ - private Hypergraph graph; - - /** - * Creates an instance for the specified graph. - */ - public UniformDegreeWeight(Hypergraph graph) - { - this.graph = graph; - } - - /** - * @see org.apache.commons.collections15.Transformer#transform(java.lang.Object) - */ - public Double transform(VEPair ve_pair) - { - E e = ve_pair.getE(); - V v = ve_pair.getV(); - EdgeType edge_type = graph.getEdgeType(e); - if (edge_type == EdgeType.UNDIRECTED) - return 1.0 / graph.degree(v); - if (edge_type == EdgeType.DIRECTED) - return 1.0 / graph.outDegree(graph.getSource(e)); - throw new IllegalArgumentException("can't handle edge type: " + edge_type); - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java deleted file mode 100644 index 7853f002fd..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Created on Jul 11, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * Assigns weights to directed edges (the edge of the vertex/edge pair) depending on - * whether the vertex is the edge's source or its destination. - * If the vertex v is the edge's source, assigns 1/outdegree(v). - * Otherwise, assigns 1/indegree(w). - * Throws IllegalArgumentException if the edge is not directed. - */ -public class UniformInOut implements Transformer, Double> -{ - /** - * The graph for which the edge weights are defined. - */ - protected Graph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the graph for which the edge weights will be defined - */ - public UniformInOut(Graph graph) - { - this.graph = graph; - } - - /** - * @see org.apache.commons.collections15.Transformer#transform(Object) - * @throws IllegalArgumentException - */ - public Double transform(VEPair ve_pair) - { - V v = ve_pair.getV(); - E e = ve_pair.getE(); - if (graph.getEdgeType(e) != EdgeType.DIRECTED) - throw new IllegalArgumentException("This transformer only" + - " operates on directed edges"); - return 1.0 / (graph.isSource(v, e) ? - graph.outDegree(v) : - graph.inDegree(v)); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java deleted file mode 100644 index ad9029359b..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Created on Jul 8, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -/** - * Convenience class for associating a vertex and an edge. Used, for example, - * in contexts in which it is necessary to know the origin for an edge traversal - * (that is, the direction in which an (undirected) edge is being traversed). - * - * @param the vertex type - * @param the edge type - */ -public class VEPair -{ - private V v; - private E e; - - /** - * Creates an instance with the specified vertex and edge - * @param v the vertex to add - * @param e the edge to add - */ - public VEPair(V v, E e) - { - if (v == null || e == null) - throw new IllegalArgumentException("elements must be non-null"); - - this.v = v; - this.e = e; - } - - /** - * Returns the vertex of this pair. - */ - public V getV() - { - return v; - } - - /** - * Returns the edge of this pair. - */ - public E getE() - { - return e; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java deleted file mode 100644 index 851c08eef8..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Created on Jul 18, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.VertexScorer; - -/** - * A Transformer convenience wrapper around VertexScorer. - */ -public class VertexScoreTransformer implements Transformer -{ - /** - * The VertexScorer instance that provides the values returned by transform. - */ - protected VertexScorer vs; - - /** - * Creates an instance based on the specified VertexScorer. - */ - public VertexScoreTransformer(VertexScorer vs) - { - this.vs = vs; - } - - /** - * Returns the score for this vertex. - */ - public S transform(V v) - { - return vs.getVertexScore(v); - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/package.html deleted file mode 100644 index 3bf18f3584..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/package.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - -Utility functions for assigning scores to graph elements. These include: -
          -
        • EdgeWeight: interface for classes that associate numeric values -with edges -
        • ScoringUtils: methods for calculating transition probabilities -for random-walk-based algorithms. -
        • UniformOut: an edge weight function that assigns weights as uniform -transition probabilities to all outgoing edges of a vertex. -
        • UniformIncident: an edge weight function that assigns -weights as uniform transition probabilities to all incident edges of a -vertex (useful for undirected graphs). -
        • VEPair: analogous to Pair but specifically -containing an associated vertex and edge. -
        • VertexEdgeWeight: a subtype of EdgeWeight that -assigns edge weights with respect to a specified 'source' vertex. -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java deleted file mode 100644 index 38d3b001d6..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java +++ /dev/null @@ -1,169 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Labels each node in the graph according to the BFS distance from the start node(s). If nodes are unreachable, then - * they are assigned a distance of -1. - * All nodes traversed at step k are marked as predecessors of their successors traversed at step k+1. - *

        - * Running time is: O(m) - * @author Scott White - */ -public class BFSDistanceLabeler { - - private Map distanceDecorator = new HashMap(); - private List mCurrentList; - private Set mUnvisitedVertices; - private List mVerticesInOrderVisited; - private Map> mPredecessorMap; - - /** - * Creates a new BFS labeler for the specified graph and root set - * The distances are stored in the corresponding Vertex objects and are of type MutableInteger - */ - public BFSDistanceLabeler() { - mPredecessorMap = new HashMap>(); - } - - /** - * Returns the list of vertices visited in order of traversal - * @return the list of vertices - */ - public List getVerticesInOrderVisited() { - return mVerticesInOrderVisited; - } - - /** - * Returns the set of all vertices that were not visited - * @return the list of unvisited vertices - */ - public Set getUnvisitedVertices() { - return mUnvisitedVertices; - } - - /** - * Given a vertex, returns the shortest distance from any node in the root set to v - * @param v the vertex whose distance is to be retrieved - * @return the shortest distance from any node in the root set to v - */ - public int getDistance(Hypergraph g, V v) { - if (!g.getVertices().contains(v)) { - throw new IllegalArgumentException("Vertex is not contained in the graph."); - } - - return distanceDecorator.get(v).intValue(); - } - - /** - * Returns set of predecessors of the given vertex - * @param v the vertex whose predecessors are to be retrieved - * @return the set of predecessors - */ - public Set getPredecessors(V v) { - return mPredecessorMap.get(v); - } - - protected void initialize(Hypergraph g, Set rootSet) { - mVerticesInOrderVisited = new ArrayList(); - mUnvisitedVertices = new HashSet(); - for(V currentVertex : g.getVertices()) { - mUnvisitedVertices.add(currentVertex); - mPredecessorMap.put(currentVertex,new HashSet()); - } - - mCurrentList = new ArrayList(); - for(V v : rootSet) { - distanceDecorator.put(v, new Integer(0)); - mCurrentList.add(v); - mUnvisitedVertices.remove(v); - mVerticesInOrderVisited.add(v); - } - } - - private void addPredecessor(V predecessor,V sucessor) { - HashSet predecessors = mPredecessorMap.get(sucessor); - predecessors.add(predecessor); - } - - /** - * Computes the distances of all the node from the starting root nodes. If there is more than one root node - * the minimum distance from each root node is used as the designated distance to a given node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param rootSet the set of starting vertices to traverse from - */ - public void labelDistances(Hypergraph graph, Set rootSet) { - - initialize(graph,rootSet); - - int distance = 1; - while (true) { - List newList = new ArrayList(); - for(V currentVertex : mCurrentList) { - if(graph.containsVertex(currentVertex)) { - for(V next : graph.getSuccessors(currentVertex)) { - visitNewVertex(currentVertex,next, distance, newList); - } - } - } - if (newList.size() == 0) break; - mCurrentList = newList; - distance++; - } - - for(V v : mUnvisitedVertices) { - distanceDecorator.put(v,new Integer(-1)); - } - } - - /** - * Computes the distances of all the node from the specified root node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param root the single starting vertex to traverse from - */ - public void labelDistances(Hypergraph graph, V root) { - labelDistances(graph, Collections.singleton(root)); - } - - private void visitNewVertex(V predecessor, V neighbor, int distance, List newList) { - if (mUnvisitedVertices.contains(neighbor)) { - distanceDecorator.put(neighbor, new Integer(distance)); - newList.add(neighbor); - mVerticesInOrderVisited.add(neighbor); - mUnvisitedVertices.remove(neighbor); - } - int predecessorDistance = distanceDecorator.get(predecessor).intValue(); - int successorDistance = distanceDecorator.get(neighbor).intValue(); - if (predecessorDistance < successorDistance) { - addPredecessor(predecessor,neighbor); - } - } - - /** - * Returns a map from vertices to minimum distances from the original source(s). - * Must be called after {@code labelDistances} in order to contain valid data. - */ - public Map getDistanceDecorator() { - return distanceDecorator; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java deleted file mode 100644 index 3e99e16096..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java +++ /dev/null @@ -1,582 +0,0 @@ -/* - * Created on Jul 9, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.util.BasicMapEntry; -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - *

        Calculates distances in a specified graph, using - * Dijkstra's single-source-shortest-path algorithm. All edge weights - * in the graph must be nonnegative; if any edge with negative weight is - * found in the course of calculating distances, an - * IllegalArgumentException will be thrown. - * (Note: this exception will only be thrown when such an edge would be - * used to update a given tentative distance; - * the algorithm does not check for negative-weight edges "up front".) - * - *

        Distances and partial results are optionally cached (by this instance) - * for later reference. Thus, if the 10 closest vertices to a specified source - * vertex are known, calculating the 20 closest vertices does not require - * starting Dijkstra's algorithm over from scratch.

        - * - *

        Distances are stored as double-precision values. - * If a vertex is not reachable from the specified source vertex, no - * distance is stored. This is new behavior with version 1.4; - * the previous behavior was to store a value of - * Double.POSITIVE_INFINITY. This change gives the algorithm - * an approximate complexity of O(kD log k), where k is either the number of - * requested targets or the number of reachable vertices (whichever is smaller), - * and D is the average degree of a vertex.

        - * - *

        The elements in the maps returned by getDistanceMap - * are ordered (that is, returned - * by the iterator) by nondecreasing distance from source.

        - * - *

        Users are cautioned that distances calculated should be assumed to - * be invalidated by changes to the graph, and should invoke reset() - * when appropriate so that the distances can be recalculated.

        - * - * @author Joshua O'Madadhain - * @author Tom Nelson converted to jung2 - */ -public class DijkstraDistance implements Distance -{ - protected Hypergraph g; - protected Transformer nev; - protected Map sourceMap; // a map of source vertices to an instance of SourceData - protected boolean cached; - protected double max_distance; - protected int max_targets; - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraDistance(Hypergraph g, Transformer nev, boolean cached) { - this.g = g; - this.nev = nev; - this.sourceMap = new HashMap(); - this.cached = cached; - this.max_distance = Double.POSITIVE_INFINITY; - this.max_targets = Integer.MAX_VALUE; - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraDistance(Hypergraph g, Transformer nev) { - this(g, nev, true); - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - @SuppressWarnings("unchecked") - public DijkstraDistance(Hypergraph g) { - this(g, new ConstantTransformer(1), true); - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - @SuppressWarnings("unchecked") - public DijkstraDistance(Hypergraph g, boolean cached) { - this(g, new ConstantTransformer(1), cached); - } - - /** - * Implements Dijkstra's single-source shortest-path algorithm for - * weighted graphs. Uses a MapBinaryHeap as the priority queue, - * which gives this algorithm a time complexity of O(m lg n) (m = # of edges, n = - * # of vertices). - * This algorithm will terminate when any of the following have occurred (in order - * of priority): - *

          - *
        • the distance to the specified target (if any) has been found - *
        • no more vertices are reachable - *
        • the specified # of distances have been found, or the maximum distance - * desired has been exceeded - *
        • all distances have been found - *
        - * - * @param source the vertex from which distances are to be measured - * @param numDests the number of distances to measure - * @param targets the set of vertices to which distances are to be measured - * @param regular boolean is true if we want regular SP dijkstra. False for MT. - */ - private LinkedHashMap singleSourceShortestPath(V source, Collection targets, int numDests, boolean regular) - { - SourceData sd = getSourceData(source); - - Set to_get = new HashSet(); - if (targets != null) { - to_get.addAll(targets); - Set existing_dists = sd.distances.keySet(); - for(V o : targets) { - if (existing_dists.contains(o)) - to_get.remove(o); - } - } - - // if we've exceeded the max distance or max # of distances we're willing to calculate, or - // if we already have all the distances we need, - // terminate - if (sd.reached_max || - (targets != null && to_get.isEmpty()) || - (sd.distances.size() >= numDests)) - { - return sd.distances; - } - - while (!sd.unknownVertices.isEmpty() && (sd.distances.size() < numDests || !to_get.isEmpty())) - { - Map.Entry p = sd.getNextVertex(); - V v = p.getKey(); - double v_dist = p.getValue().doubleValue(); - to_get.remove(v); - if (v_dist > this.max_distance) - { - // we're done; put this vertex back in so that we're not including - // a distance beyond what we specified - sd.restoreVertex(v, v_dist); - sd.reached_max = true; - break; - } - sd.dist_reached = v_dist; - - if (sd.distances.size() >= this.max_targets) - { - sd.reached_max = true; - break; - } - - for (E e : getEdgesToCheck(v) ) - { - for (V w : g.getIncidentVertices(e)) - { - if (!sd.distances.containsKey(w)) - { - double edge_weight = nev.transform(e).doubleValue(); - if (edge_weight < 0) - throw new IllegalArgumentException("Edges weights must be non-negative"); - double new_dist; - if (regular == true) { - new_dist = v_dist + edge_weight; - } else { - if (v_dist <= edge_weight) { - new_dist = edge_weight; - } else { - new_dist = v_dist; - } - } - if (!sd.estimatedDistances.containsKey(w)) - { - sd.createRecord(w, e, new_dist); - } - else - { - double w_dist = ((Double)sd.estimatedDistances.get(w)).doubleValue(); - if (new_dist < w_dist) // update tentative distance & path for w - sd.update(w, e, new_dist); - } - } - } - } - } - return sd.distances; - } - - /** - * Implements Dijkstra's single-source shortest-path algorithm for - * weighted graphs. Uses a MapBinaryHeap as the priority queue, - * which gives this algorithm a time complexity of O(m lg n) (m = # of edges, n = - * # of vertices). - * This algorithm will terminate when any of the following have occurred (in order - * of priority): - *
          - *
        • the distance to the specified target (if any) has been found - *
        • no more vertices are reachable - *
        • the specified # of distances have been found, or the maximum distance - * desired has been exceeded - *
        • all distances have been found - *
        - * - * @param source the vertex from which distances are to be measured - * @param numDests the number of distances to measure - * @param targets the set of vertices to which distances are to be measured - */ - protected LinkedHashMap singleSourceShortestPath(V source, Collection targets, int numDests) - { - return singleSourceShortestPath(source, targets, numDests, true); - } - - /** - * Implements Dijkstra's single-source shortest-path algorithm for - * weighted graphs. Uses a MapBinaryHeap as the priority queue, - * which gives this algorithm a time complexity of O(m lg n) (m = # of edges, n = - * # of vertices). - * This algorithm will terminate when any of the following have occurred (in order - * of priority): - *
          - *
        • the distance to the specified target (if any) has been found - *
        • no more vertices are reachable - *
        • the specified # of distances have been found, or the maximum distance - * desired has been exceeded - *
        • all distances have been found - *
        - * - * @param source the vertex from which distances are to be measured - * @param numDests the number of distances to measure - * @param targets the set of vertices to which distances are to be measured - */ - protected LinkedHashMap singleSourceMaxThroughputPath(V source, Collection targets, int numDests) - { - return singleSourceShortestPath(source, targets, numDests, false); - } - - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourceData(source); - return sd; - } - - /** - * Returns the set of edges incident to v that should be tested. - * By default, this is the set of outgoing edges for instances of Graph, - * the set of incident edges for instances of Hypergraph, - * and is otherwise undefined. - */ - protected Collection getEdgesToCheck(V v) - { - if (g instanceof Graph) - return ((Graph)g).getOutEdges(v); - else - return g.getIncidentEdges(v); - - } - - - /** - * Returns the length of a shortest path from the source to the target vertex, - * or null if the target is not reachable from the source. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - * - * @see #getDistanceMap(Object) - * @see #getDistanceMap(Object,int) - */ - public Number getDistance(V source, V target) - { - if (g.containsVertex(target) == false) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - Set targets = new HashSet(); - targets.add(target); - Map distanceMap = getDistanceMap(source, targets); - return distanceMap.get(target); - } - - - /** - * Returns a {@code Map} from each element {@code t} of {@code targets} to the - * shortest-path distance from {@code source} to {@code t}. - */ - public Map getDistanceMap(V source, Collection targets) - { - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - if (targets.size() > max_targets) - throw new IllegalArgumentException("size of target set exceeds maximum " + - "number of targets allowed: " + this.max_targets); - - Map distanceMap = - singleSourceShortestPath(source, targets, - Math.min(g.getVertexCount(), max_targets)); - if (!cached) - reset(source); - - return distanceMap; - } - - /** - *

        Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source.

        - * - *

        The size of the map returned will be the number of - * vertices reachable from source.

        - * - * @see #getDistanceMap(Object,int) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - */ - public Map getDistanceMap(V source) - { - return getDistanceMap(source, Math.min(g.getVertexCount(), max_targets)); - } - - - - /** - *

        Returns a LinkedHashMap which maps each of the closest - * numDist vertices to the source vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph.

        - * - *

        The size of the map returned will be the smaller of - * numDests and the number of vertices reachable from - * source. - * - * @see #getDistanceMap(Object) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertices for which to measure distances - */ - public LinkedHashMap getDistanceMap(V source, int numDests) - { - - if(g.getVertices().contains(source) == false) { - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - } - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); - - if (numDests > max_targets) - throw new IllegalArgumentException("numDests must be <= the maximum " + - "number of targets allowed: " + this.max_targets); - - LinkedHashMap distanceMap = - singleSourceShortestPath(source, null, numDests); - - if (!cached) - reset(source); - - return distanceMap; - } - - /** - * Allows the user to specify the maximum distance that this instance will calculate. - * Any vertices past this distance will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any vertices which - * are farther away than this distance. A negative value for max_dist - * will ensure that no further distances are calculated. - * - *

        This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large.

        - * - *

        Note: if this instance has already calculated distances greater than max_dist, - * and the results are cached, those results will still be valid and available; this limit - * applies only to subsequent distance calculations.

        - * @see #setMaxTargets(int) - */ - public void setMaxDistance(double max_dist) - { - this.max_distance = max_dist; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } - } - - /** - * Allows the user to specify the maximum number of target vertices per source vertex - * for which this instance will calculate distances. Once this threshold is reached, - * any further vertices will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any more vertices. - * A negative value for max_targets will ensure that no further distances are calculated. - * - *

        This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large.

        - * - *

        Note: if this instance has already calculated distances to a greater number of - * targets than max_targets, and the results are cached, those results - * will still be valid and available; this limit applies only to subsequent distance - * calculations.

        - * @see #setMaxDistance(double) - */ - public void setMaxTargets(int max_targets) - { - this.max_targets = max_targets; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } - } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - sourceMap = new HashMap(); - } - - /** - * Specifies whether or not this instance of DijkstraShortestPath - * should cache its results (final and partial) for future reference. - * - * @param enable true if the results are to be cached, and - * false otherwise - */ - public void enableCaching(boolean enable) - { - this.cached = enable; - } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @see #reset() - */ - public void reset(V source) - { - sourceMap.put(source, null); - } - - /** - * Compares according to distances, so that the BinaryHeap knows how to - * order the tree. - */ - protected static class VertexComparator implements Comparator - { - private Map distances; - - protected VertexComparator(Map distances) - { - this.distances = distances; - } - - public int compare(V o1, V o2) - { - return ((Double) distances.get(o1)).compareTo((Double) distances.get(o2)); - } - } - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimated distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourceData - { - protected LinkedHashMap distances; - protected Map estimatedDistances; - protected MapBinaryHeap unknownVertices; - protected boolean reached_max = false; - protected double dist_reached = 0; - - protected SourceData(V source) - { - distances = new LinkedHashMap(); - estimatedDistances = new HashMap(); - unknownVertices = new MapBinaryHeap(new VertexComparator(estimatedDistances)); - - sourceMap.put(source, this); - - // initialize priority queue - estimatedDistances.put(source, new Double(0)); // distance from source to itself is 0 - unknownVertices.add(source); - reached_max = false; - dist_reached = 0; - } - - protected Map.Entry getNextVertex() - { - V v = unknownVertices.remove(); - Double dist = (Double)estimatedDistances.remove(v); - distances.put(v, dist); - return new BasicMapEntry(v, dist); - } - - protected void update(V dest, E tentative_edge, double new_dist) - { - estimatedDistances.put(dest, new_dist); - unknownVertices.update(dest); - } - - protected void createRecord(V w, E e, double new_dist) - { - estimatedDistances.put(w, new_dist); - unknownVertices.add(w); - } - - protected void restoreVertex(V v, double dist) - { - estimatedDistances.put(v, dist); - unknownVertices.add(v); - distances.remove(v); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java deleted file mode 100644 index 05c008d11f..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java +++ /dev/null @@ -1,314 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - *

        Calculates distances and shortest paths using Dijkstra's - * single-source-shortest-path algorithm. This is a lightweight - * extension of DijkstraDistance that also stores - * path information, so that the shortest paths can be reconstructed.

        - * - *

        The elements in the maps returned by - * getIncomingEdgeMap are ordered (that is, returned - * by the iterator) by nondecreasing distance from source.

        - * - * @author Joshua O'Madadhain - * @author Tom Nelson converted to jung2 - * @see DijkstraDistance - */ -public class DijkstraShortestPath extends DijkstraDistance implements ShortestPath -{ - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, Transformer nev, boolean cached) - { - super(g, nev, cached); - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraShortestPath(Graph g, Transformer nev) - { - super(g, nev); - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - public DijkstraShortestPath(Graph g) - { - super(g); - } - - /** - *

        Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, boolean cached) - { - super(g, cached); - } - - @Override - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourcePathData(source); - return sd; - } - - /** - *

        Returns the last edge on a shortest path from source - * to target, or null if target is not - * reachable from source.

        - * - *

        If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException.

        - */ - public E getIncomingEdge(V source, V target) - { - if (!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if (!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - - Set targets = new HashSet(); - targets.add(target); - singleSourceShortestPath(source, targets, g.getVertexCount()); - Map incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - E incomingEdge = incomingEdgeMap.get(target); - - if (!cached) - reset(source); - - return incomingEdge; - } - - /** - *

        Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to the last edge on the shortest path from the - * source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source.

        - * - * @see DijkstraDistance#getDistanceMap(Object,int) - * @see DijkstraDistance#getDistance(Object,Object) - * @param source the vertex from which distances are measured - */ - public Map getIncomingEdgeMap(V source) - { - return getIncomingEdgeMap(source, g.getVertexCount()); - } - - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - */ - private List getPath(V source, V target, boolean spath) - { - if(!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if(!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - - LinkedList path = new LinkedList(); - - // collect path data; must use internal method rather than - // calling getIncomingEdge() because getIncomingEdge() may - // wipe out results if results are not cached - Set targets = new HashSet(); - targets.add(target); - if (spath == true) { - singleSourceShortestPath(source, targets, g.getVertexCount()); - } else { - singleSourceMaxThroughputPath(source, targets, g.getVertexCount()); - } - Map incomingEdges = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - current = ((Graph)g).getOpposite(current, incoming); - } - return path; - } - - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - */ - public List getPath(V source, V target) - { - - return getPath(source,target, true); - } - - /** - * Returns a List of the edges on the Max Througput Shortest - * path from source to target, in order of their - * their occurrence on this path. - * Important - Transformer fn should return the appropriate edge weight - * for this API to return the Path Correctly. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - */ - public List getMaxThroughputPath(V source, V target) - { - - return getPath(source,target, false); - } - - - /** - *

        Returns a LinkedHashMap which maps each of the closest - * numDist vertices to the source vertex - * in the graph (including the source vertex) - * to the incoming edge along the path from that vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph. - * - * @see #getIncomingEdgeMap(Object) - * @see #getPath(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertices for which to measure distances - */ - public LinkedHashMap getIncomingEdgeMap(V source, int numDests) - { - if (g.getVertices().contains(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); - - singleSourceShortestPath(source, null, numDests); - - LinkedHashMap incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (!cached) - reset(source); - - return incomingEdgeMap; - } - - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimaed distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourcePathData extends SourceData - { - protected Map tentativeIncomingEdges; - protected LinkedHashMap incomingEdges; - - protected SourcePathData(V source) - { - super(source); - incomingEdges = new LinkedHashMap(); - tentativeIncomingEdges = new HashMap(); - } - - @Override - public void update(V dest, E tentative_edge, double new_dist) - { - super.update(dest, tentative_edge, new_dist); - tentativeIncomingEdges.put(dest, tentative_edge); - } - - @Override - public Map.Entry getNextVertex() - { - Map.Entry p = super.getNextVertex(); - V v = p.getKey(); - E incoming = tentativeIncomingEdges.remove(v); - incomingEdges.put(v, incoming); - return p; - } - - @Override - public void restoreVertex(V v, double dist) - { - super.restoreVertex(v, dist); - E incoming = incomingEdges.get(v); - tentativeIncomingEdges.put(v, incoming); - } - - @Override - public void createRecord(V w, E e, double new_dist) - { - super.createRecord(w, e, new_dist); - tentativeIncomingEdges.put(w, e); - } - - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java deleted file mode 100644 index 85820d14b8..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Created on Apr 2, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Map; - - -/** - * An interface for classes which calculate the distance between - * one vertex and another. - * - * @author Joshua O'Madadhain - */ -public interface Distance -{ - /** - * Returns the distance from the source vertex - * to the target vertex. If target - * is not reachable from source, returns null. - */ - Number getDistance(V source, V target); - - /** - *

        Returns a Map which maps each vertex - * in the graph (including the source vertex) - * to its distance (represented as a Number) - * from source. If any vertex - * is not reachable from source, no - * distance is stored for that vertex. - */ - Map getDistanceMap(V source); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java deleted file mode 100644 index f0f20a3241..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java +++ /dev/null @@ -1,136 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality; -import edu.uci.ics.jung.algorithms.scoring.util.VertexScoreTransformer; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Statistics relating to vertex-vertex distances in a graph. - * - *

        Formerly known as GraphStatistics in JUNG 1.x.

        - * - * @author Scott White - * @author Joshua O'Madadhain - */ -public class DistanceStatistics -{ - /** - * For each vertex v in graph, - * calculates the average shortest path length from v - * to all other vertices in graph using the metric - * specified by d, and returns the results in a - * Map from vertices to Double values. - * If there exists an ordered pair <u,v> - * for which d.getDistance(u,v) returns null, - * then the average distance value for u will be stored - * as Double.POSITIVE_INFINITY). - * - *

        Does not include self-distances (path lengths from v - * to v). - * - *

        To calculate the average distances, ignoring edge weights if any: - *

        -     * Map distances = DistanceStatistics.averageDistances(g, new UnweightedShortestPath(g));
        -     * 
        - * To calculate the average distances respecting edge weights: - *
        -     * DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev);
        -     * Map distances = DistanceStatistics.averageDistances(g, dsp);
        -     * 
        - * where nev is an instance of Transformer that - * is used to fetch the weight for each edge. - * - * @see edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath - * @see edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance - */ - public static Transformer averageDistances(Hypergraph graph, Distance d) - { - final ClosenessCentrality cc = new ClosenessCentrality(graph, d); - return new VertexScoreTransformer(cc); - } - - /** - * For each vertex v in g, - * calculates the average shortest path length from v - * to all other vertices in g, ignoring edge weights. - * @see #diameter(Hypergraph) - * @see edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality - */ - public static Transformer averageDistances(Hypergraph g) - { - final ClosenessCentrality cc = new ClosenessCentrality(g, - new UnweightedShortestPath(g)); - return new VertexScoreTransformer(cc); - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v. If the graph is disconnected (that is, not - * all pairs of vertices are reachable from one another), the - * value returned will depend on use_max: - * if use_max == true, the value returned - * will be the the maximum shortest path length over all pairs of connected - * vertices; otherwise it will be Double.POSITIVE_INFINITY. - */ - public static double diameter(Hypergraph g, Distance d, boolean use_max) - { - double diameter = 0; - Collection vertices = g.getVertices(); - for(V v : vertices) { - for(V w : vertices) { - - if (v.equals(w) == false) // don't include self-distances - { - Number dist = d.getDistance(v, w); - if (dist == null) - { - if (!use_max) - return Double.POSITIVE_INFINITY; - } - else - diameter = Math.max(diameter, dist.doubleValue()); - } - } - } - return diameter; - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v, or Double.POSITIVE_INFINITY - * if any of these distances do not exist. - * @see #diameter(Hypergraph, Distance, boolean) - */ - public static double diameter(Hypergraph g, Distance d) - { - return diameter(g, d, false); - } - - /** - * Returns the diameter of g, ignoring edge weights. - * @see #diameter(Hypergraph, Distance, boolean) - */ - public static double diameter(Hypergraph g) - { - return diameter(g, new UnweightedShortestPath(g)); - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java deleted file mode 100644 index 18cb0fe026..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java +++ /dev/null @@ -1,165 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.functors.ConstantTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class MinimumSpanningForest { - - protected Graph graph; - protected Forest forest; - protected Map weights; - - /** - * Creates a Forest from the supplied Graph and supplied Factory, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitrary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created. - * @param graph the input graph - * @param factory the factory to use to create the new forest - * @param root the vertex of the graph to be used as the root of the forest - * @param weights edge weights - */ - public MinimumSpanningForest(Graph graph, Factory> factory, - V root, Map weights) { - this(graph, factory.create(), root, weights); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - * @param weights edge weights, may be null - */ - public MinimumSpanningForest(Graph graph, Forest forest, - V root, Map weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = weights; - } - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); - } - updateForest(forest.getVertices(), unfinishedEdges); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - */ - @SuppressWarnings("unchecked") - public MinimumSpanningForest(Graph graph, Forest forest, - V root) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - this.weights = LazyMap.decorate(new HashMap(), - new ConstantTransformer(1.0)); - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); - } - updateForest(forest.getVertices(), unfinishedEdges); - } - - /** - * Returns the generated forest. - */ - public Forest getForest() { - return forest; - } - - protected void updateForest(Collection tv, Collection unfinishedEdges) { - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(forest.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if(tv.contains(first) == true && tv.contains(second) == false) { - if(weights.get(e) < minCost) { - minCost = weights.get(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } - if(graph.getEdgeType(e) == EdgeType.UNDIRECTED && - tv.contains(second) == true && tv.contains(first) == false) { - if(weights.get(e) < minCost) { - minCost = weights.get(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - forest.addEdge(nextEdge, currentVertex, nextVertex); - updateForest(forest.getVertices(), unfinishedEdges); - } - Collection leftovers = new HashSet(graph.getVertices()); - leftovers.removeAll(forest.getVertices()); - if(leftovers.size() > 0) { - V anotherRoot = leftovers.iterator().next(); - forest.addVertex(anotherRoot); - updateForest(forest.getVertices(), unfinishedEdges); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java deleted file mode 100644 index 13e800c7d5..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java +++ /dev/null @@ -1,104 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.cluster.WeakComponentClusterer; -import edu.uci.ics.jung.algorithms.filters.FilterUtils; -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Tree; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -@SuppressWarnings("unchecked") -public class MinimumSpanningForest2 { - - protected Graph graph; - protected Forest forest; - protected Transformer weights = - (Transformer)new ConstantTransformer(1.0); - - /** - * create a Forest from the supplied Graph and supplied Factory, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph - * @param factory - * @param weights - */ - public MinimumSpanningForest2(Graph graph, - Factory> factory, - Factory> treeFactory, - Transformer weights) { - this(graph, factory.create(), - treeFactory, - weights); - } - - /** - * create a forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param weights edge weights, may be null - */ - public MinimumSpanningForest2(Graph graph, - Forest forest, - Factory> treeFactory, - Transformer weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = weights; - } - - WeakComponentClusterer wcc = - new WeakComponentClusterer(); - Set> component_vertices = wcc.transform(graph); - Collection> components = - FilterUtils.createAllInducedSubgraphs(component_vertices, graph); - - for(Graph component : components) { - PrimMinimumSpanningTree mst = - new PrimMinimumSpanningTree(treeFactory, this.weights); - Graph subTree = mst.transform(component); - if(subTree instanceof Tree) { - TreeUtils.addSubTree(forest, (Tree)subTree, null, null); - } - } - } - - /** - * Returns the generated forest. - */ - public Forest getForest() { - return forest; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java deleted file mode 100644 index b029dda747..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java +++ /dev/null @@ -1,116 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param the vertex type - * @param the edge type - */ -@SuppressWarnings("unchecked") -public class PrimMinimumSpanningTree implements Transformer,Graph> { - - protected Factory> treeFactory; - protected Transformer weights; - - /** - * Creates an instance which generates a minimum spanning tree assuming constant edge weights. - */ - public PrimMinimumSpanningTree(Factory> factory) { - this(factory, new ConstantTransformer(1.0)); - } - - /** - * Creates an instance which generates a minimum spanning tree using the input edge weights. - */ - public PrimMinimumSpanningTree(Factory> factory, - Transformer weights) { - this.treeFactory = factory; - if(weights != null) { - this.weights = weights; - } - } - - /** - * @param graph the Graph to find MST in - */ - public Graph transform(Graph graph) { - Set unfinishedEdges = new HashSet(graph.getEdges()); - Graph tree = treeFactory.create(); - V root = findRoot(graph); - if(graph.getVertices().contains(root)) { - tree.addVertex(root); - } else if(graph.getVertexCount() > 0) { - // pick an arbitrary vertex to make root - tree.addVertex(graph.getVertices().iterator().next()); - } - updateTree(tree, graph, unfinishedEdges); - - return tree; - } - - protected V findRoot(Graph graph) { - for(V v : graph.getVertices()) { - if(graph.getInEdges(v).size() == 0) { - return v; - } - } - // if there is no obvious root, pick any vertex - if(graph.getVertexCount() > 0) { - return graph.getVertices().iterator().next(); - } - // this graph has no vertices - return null; - } - - protected void updateTree(Graph tree, Graph graph, Collection unfinishedEdges) { - Collection tv = tree.getVertices(); - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(tree.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if((tv.contains(first) == true && tv.contains(second) == false)) { - if(weights.transform(e) < minCost) { - minCost = weights.transform(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } else if((tv.contains(second) == true && tv.contains(first) == false)) { - if(weights.transform(e) < minCost) { - minCost = weights.transform(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - tree.addEdge(nextEdge, currentVertex, nextVertex); - updateTree(tree, graph, unfinishedEdges); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java deleted file mode 100644 index a922cdd329..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java +++ /dev/null @@ -1,29 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -* -* Created on Feb 12, 2004 -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Map; - - -/** - * An interface for algorithms that calculate shortest paths. - */ -public interface ShortestPath -{ - /** - *

        Returns a Map which maps each vertex - * in the graph (including the source vertex) - * to the last edge on the shortest path from the - * source vertex. - */ - Map getIncomingEdgeMap(V source); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java deleted file mode 100644 index d3e59ebec9..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Created on Jul 10, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Utilities relating to the shortest paths in a graph. - */ -public class ShortestPathUtils -{ - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - */ - public static List getPath(Graph graph, ShortestPath sp, V source, V target) - { - LinkedList path = new LinkedList(); - - Map incomingEdges = sp.getIncomingEdgeMap(source); - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - Pair endpoints = graph.getEndpoints(incoming); - if(endpoints.getFirst().equals(current)) { - current = endpoints.getSecond(); - } else { - current = endpoints.getFirst(); - } - //incoming.getOpposite(current); - } - return path; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java deleted file mode 100644 index 1d3390c058..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java +++ /dev/null @@ -1,151 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Computes the shortest path distances for graphs whose edges are not weighted (using BFS). - * - * @author Scott White - */ -public class UnweightedShortestPath - implements ShortestPath, Distance -{ - private Map> mDistanceMap; - private Map> mIncomingEdgeMap; - private Hypergraph mGraph; - private Map distances = new HashMap(); - - /** - * Constructs and initializes algorithm - * @param g the graph - */ - public UnweightedShortestPath(Hypergraph g) - { - mDistanceMap = new HashMap>(); - mIncomingEdgeMap = new HashMap>(); - mGraph = g; - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistance(Object, Object) - */ - public Number getDistance(V source, V target) - { - Map sourceSPMap = getDistanceMap(source); - return sourceSPMap.get(target); - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistanceMap(Object) - */ - public Map getDistanceMap(V source) - { - Map sourceSPMap = mDistanceMap.get(source); - if (sourceSPMap == null) - { - computeShortestPathsFromSource(source); - sourceSPMap = mDistanceMap.get(source); - } - return sourceSPMap; - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.ShortestPath#getIncomingEdgeMap(Object) - */ - public Map getIncomingEdgeMap(V source) - { - Map sourceIEMap = mIncomingEdgeMap.get(source); - if (sourceIEMap == null) - { - computeShortestPathsFromSource(source); - sourceIEMap = mIncomingEdgeMap.get(source); - } - return sourceIEMap; - } - - - /** - * Computes the shortest path distances from a given node to all other nodes. - * @param source the source node - */ - private void computeShortestPathsFromSource(V source) - { - BFSDistanceLabeler labeler = new BFSDistanceLabeler(); - labeler.labelDistances(mGraph, source); - distances = labeler.getDistanceDecorator(); - Map currentSourceSPMap = new HashMap(); - Map currentSourceEdgeMap = new HashMap(); - - for(V vertex : mGraph.getVertices()) { - - Number distanceVal = distances.get(vertex); - // BFSDistanceLabeler uses -1 to indicate unreachable vertices; - // don't bother to store unreachable vertices - if (distanceVal != null && distanceVal.intValue() >= 0) - { - currentSourceSPMap.put(vertex, distanceVal); - int minDistance = distanceVal.intValue(); - for(E incomingEdge : mGraph.getInEdges(vertex)) - { - for (V neighbor : mGraph.getIncidentVertices(incomingEdge)) - { - if (neighbor.equals(vertex)) - continue; -// V neighbor = mGraph.getOpposite(vertex, incomingEdge); - - Number predDistanceVal = distances.get(neighbor); - - int pred_distance = predDistanceVal.intValue(); - if (pred_distance < minDistance && pred_distance >= 0) - { - minDistance = predDistanceVal.intValue(); - currentSourceEdgeMap.put(vertex, incomingEdge); - } - } - } - } - } - mDistanceMap.put(source, currentSourceSPMap); - mIncomingEdgeMap.put(source, currentSourceEdgeMap); - } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - mDistanceMap.clear(); - mIncomingEdgeMap.clear(); - } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @see #reset() - */ - public void reset(V v) - { - mDistanceMap.remove(v); - mIncomingEdgeMap.remove(v); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/package.html deleted file mode 100644 index 01f27b5f36..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/package.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - -Provides interfaces and classes for calculating (geodesic) distances and shortest paths. Currently includes: -

          -
        • DijkstraDistance: finds the distances from a specified source vertex to other vertices in a -weighted graph with no negative cycles -
        • DijkstraShortestPath: extends DijkstraDistance, also finds shortest paths -
        • Distance: an interface for defining vertex-vertex distances -
        • PrimMinimumSpanningTree: identifies the spanning tree for a graph of least total edge weight -
        • ShortestPath: an interface for shortest-path algorithms -
        • ShortestPathUtils: utility functions for manipulating shortest paths -
        • UnweightedShortestPath: finds the distances from a specified source vertex to other vertices in an -unweighted graph -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java deleted file mode 100644 index 8fa33b882d..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Apr 21, 2004 - */ -package edu.uci.ics.jung.algorithms.transformation; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - *

        Functions for transforming graphs into directed or undirected graphs.

        - * - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class DirectionTransformer -{ - - /** - * Transforms graph (which may be of any directionality) - * into an undirected graph. (This may be useful for - * visualization tasks). - * Specifically: - *
          - *
        • Vertices are copied from graph. - *
        • Directed edges are 'converted' into a single new undirected edge in the new graph. - *
        • Each undirected edge (if any) in graph is 'recreated' with a new undirected edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
        - * - * @param graph the graph to be transformed - * @param create_new specifies whether existing undirected edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @return the transformed Graph - */ - public static UndirectedGraph toUndirected(Graph graph, - Factory> graph_factory, - Factory edge_factory, boolean create_new) - { - UndirectedGraph out = graph_factory.create(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - E to_add; - if (graph.getEdgeType(e) == EdgeType.DIRECTED || create_new) - to_add = edge_factory.create(); - else - to_add = e; - out.addEdge(to_add, v1, v2, EdgeType.UNDIRECTED); - } - return out; - } - - /** - * Transforms graph (which may be of any directionality) - * into a directed graph. - * Specifically: - *
          - *
        • Vertices are copied from graph. - *
        • Undirected edges are 'converted' into two new antiparallel directed edges in the new graph. - *
        • Each directed edge (if any) in graph is 'recreated' with a new edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
        - * - * @param graph the graph to be transformed - * @param create_new specifies whether existing directed edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @return the transformed Graph - */ - public static Graph toDirected(Graph graph, Factory> graph_factory, - Factory edge_factory, boolean create_new) - { - DirectedGraph out = graph_factory.create(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) - { - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - out.addEdge(edge_factory.create(), v1, v2, EdgeType.DIRECTED); - out.addEdge(edge_factory.create(), v2, v1, EdgeType.DIRECTED); - } - else // if the edge is directed, just add it - { - V source = graph.getSource(e); - V dest = graph.getDest(e); - E to_add = create_new ? edge_factory.create() : e; - out.addEdge(to_add, source, dest, EdgeType.DIRECTED); - } - - } - return out; - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java deleted file mode 100644 index 2193319484..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Apr 21, 2004 - */ -package edu.uci.ics.jung.algorithms.transformation; - -import java.util.ArrayList; -import java.util.Collection; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Hypergraph; -import edu.uci.ics.jung.graph.KPartiteGraph; - -/** - * Methods for creating a "folded" graph based on a k-partite graph or a - * hypergraph. - * - *

        A "folded" graph is derived from a k-partite graph by identifying - * a partition of vertices which will become the vertices of the new graph, copying - * these vertices into the new graph, and then connecting those vertices whose - * original analogues were connected indirectly through elements - * of other partitions.

        - * - *

        A "folded" graph is derived from a hypergraph by creating vertices based on - * either the vertices or the hyperedges of the original graph, and connecting - * vertices in the new graph if their corresponding vertices/hyperedges share a - * connection with a common hyperedge/vertex.

        - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class FoldingTransformer -{ - - /** - * Converts g into a unipartite graph whose vertex set is the - * vertices of g's partition p. For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

        The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * created by the input edge Factory.

        - * - *

        If there is more than 1 such vertex c for a given pair - * (a,b), the type of the output graph will determine whether - * it will contain parallel edges or not.

        - * - *

        This function will not create self-loops.

        - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory factory used to create the output graph - * @param edge_factory factory used to create the edges in the new graph - * @return a copy of the input graph folded with respect to the input partition - */ - public static Graph foldKPartiteGraph(KPartiteGraph g, Predicate p, - Factory> graph_factory, Factory edge_factory) - { - Graph newGraph = graph_factory.create(); - - // get vertices for the specified partition - Collection vertices = g.getVertices(p); - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - newGraph.addEdge(edge_factory.create(), v, t); - } - } - } - return newGraph; - } - - /** - * Converts g into a unipartite graph whose vertices are the - * vertices of g's partition p, and whose edges - * consist of collections of the intermediate vertices from other partitions. - * For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

        The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * collections of the intermediate vertices c.

        - * - *

        This function will not create self-loops.

        - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory factory used to create the output graph - * @return the result of folding g into unipartite graph whose vertices - * are those of the p partition of g - */ - public static Graph> foldKPartiteGraph(KPartiteGraph g, Predicate p, - Factory>> graph_factory) - { - Graph> newGraph = graph_factory.create(); - - // get vertices for the specified partition, copy into new graph - Collection vertices = g.getVertices(p); - - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - Collection v_coll = newGraph.findEdge(v, t); - if (v_coll == null) - { - v_coll = new ArrayList(); - newGraph.addEdge(v_coll, v, t); - } - v_coll.add(s); - } - } - } - return newGraph; - } - - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

        The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph.

        - * - *

        The edges of the new graph consist of collections of each hyperedge that connected - * the corresponding vertex pair in the original graph.

        - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph> foldHypergraphEdges(Hypergraph h, - Factory>> graph_factory) - { - Graph> target = graph_factory.create(); - - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - populateTarget(target, e, incident); - } - return target; - } - - - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

        The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph.

        - * - *

        The edges of the new graph are generated by the specified edge factory.

        - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @param edge_factory factory used to create the new edges - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph foldHypergraphEdges(Hypergraph h, - Factory> graph_factory, Factory edge_factory) - { - Graph target = graph_factory.create(); - - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.create(), incident.get(i), incident.get(j)); - } - return target; - } - - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

        The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph.

        - * - *

        The edges of the new graph are created by the specified factory.

        - * - * @param vertex type - * @param input edge type - * @param output edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @param edge_factory factory used to generate the output edges - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public static Graph foldHypergraphVertices(Hypergraph h, - Factory> graph_factory, Factory edge_factory) - { - Graph target = graph_factory.create(); - - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.create(), incident.get(i), incident.get(j)); - } - - return target; - } - - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

        The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph.

        - * - *

        The edges of the new graph consist of collections of each vertex incident to - * the corresponding hyperedge pair in the original graph.

        - * - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public Graph> foldHypergraphVertices(Hypergraph h, - Factory>> graph_factory) - { - Graph> target = graph_factory.create(); - - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - populateTarget(target, v, incident); - } - return target; - } - - /** - * @param target - * @param e - * @param incident - */ - private static void populateTarget(Graph> target, T e, - ArrayList incident) - { - for (int i = 0; i < incident.size(); i++) - { - S v1 = incident.get(i); - for (int j = i+1; j < incident.size(); j++) - { - S v2 = incident.get(j); - Collection e_coll = target.findEdge(v1, v2); - if (e_coll == null) - { - e_coll = new ArrayList(); - target.addEdge(e_coll, v1, v2); - } - e_coll.add(e); - } - } - } - -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java deleted file mode 100644 index e44d05d446..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java +++ /dev/null @@ -1,103 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.transformation; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.MapTransformer; - -import edu.uci.ics.jung.algorithms.blockmodel.VertexPartition; -import edu.uci.ics.jung.graph.Graph; - -/** - * This class transforms a graph with a known vertex partitioning into a graph whose - * vertices correspond to the input graph's partitions. Two vertices in the output graph - * are connected if and only if there exists at least one edge between vertices in the - * corresponding partitions of the input graph. If the output graph permits parallel edges, - * there will be an edge connecting two vertices in the new graph for each such - * edge connecting constituent vertices in the input graph. - * - *

        Concept based on Danyel Fisher's GraphCollapser in JUNG 1.x. - * - */ -public class VertexPartitionCollapser -{ - protected Factory> graph_factory; - protected Factory vertex_factory; - protected Factory edge_factory; - protected Map, CV> set_collapsedv; - - /** - * Creates an instance with the specified graph and element factories. - * @param vertex_factory used to construct the vertices of the new graph - * @param edge_factory used to construct the edges of the new graph - * @param graph_factory used to construct the new graph - */ - public VertexPartitionCollapser(Factory> graph_factory, - Factory vertex_factory, Factory edge_factory) - { - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.set_collapsedv = new HashMap, CV>(); - } - - /** - * Creates a new graph whose vertices correspond to the partitions of the supplied graph. - * @param partitioning - * @return a new graph whose vertices correspond to the partitions of the supplied graph - */ - public Graph collapseVertexPartitions(VertexPartition partitioning) - { - Graph original = partitioning.getGraph(); - Graph collapsed = graph_factory.create(); - - // create vertices in new graph corresponding to equivalence sets in the original graph - for (Set set : partitioning.getVertexPartitions()) - { - CV cv = vertex_factory.create(); - collapsed.addVertex(vertex_factory.create()); - set_collapsedv.put(set, cv); - } - - // create edges in new graph corresponding to edges in original graph - for (E e : original.getEdges()) - { - Collection incident = original.getIncidentVertices(e); - Collection collapsed_vertices = new HashSet(); - Map> vertex_partitions = partitioning.getVertexToPartitionMap(); - // collect the collapsed vertices corresponding to the original incident vertices - for (V v : incident) - collapsed_vertices.add(set_collapsedv.get(vertex_partitions.get(v))); - // if there's only one collapsed vertex, continue (no edges to create) - if (collapsed_vertices.size() > 1) - { - CE ce = edge_factory.create(); - collapsed.addEdge(ce, collapsed_vertices); - } - } - return collapsed; - } - - /** - * Returns a transformer from vertex sets in the original graph to collapsed vertices - * in the transformed graph. - */ - public Transformer, CV> getSetToCollapsedVertexTransformer() - { - return MapTransformer.getInstance(set_collapsedv); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/package.html deleted file mode 100644 index 6680095fb1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/transformation/package.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - -Mechanisms for graph transformation. These currently include: -

          -
        • DirectionTransformer: generates graphs where input undirected -edges have been converted to directed edges, or vice versa -
        • FoldingTransformer: transforms k-partite graphs or hypergraphs -into unipartite graphs -
        • VertexPartitionCollapser: transforms a graph, given a -partition of its vertices into disjoint sets, into a graph in which each -of these disjoint sets has been 'collapsed' into a single new vertex. -
        - - - diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java deleted file mode 100644 index a82aea6b98..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java +++ /dev/null @@ -1,80 +0,0 @@ -package edu.uci.ics.jung.algorithms.util; - -import java.util.HashMap; -import java.util.Map; - -/** - * An simple minimal implementation of Map.Entry. - * - * @param the key type - * @param the value type - */ -public class BasicMapEntry implements Map.Entry { - final K key; - V value; - - /** - * Create new entry. - */ - public BasicMapEntry(K k, V v) { - value = v; - key = k; - } - - public K getKey() { - return key; - } - - public V getValue() { - return value; - } - - public V setValue(V newValue) { - V oldValue = value; - value = newValue; - return oldValue; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object o) { - if (!(o instanceof Map.Entry)) - return false; - Map.Entry e = (Map.Entry)o; - Object k1 = getKey(); - Object k2 = e.getKey(); - if (k1 == k2 || (k1 != null && k1.equals(k2))) { - Object v1 = getValue(); - Object v2 = e.getValue(); - if (v1 == v2 || (v1 != null && v1.equals(v2))) - return true; - } - return false; - } - - @Override - public int hashCode() { - return (key==null ? 0 : key.hashCode()) ^ - (value==null ? 0 : value.hashCode()); - } - - @Override - public String toString() { - return getKey() + "=" + getValue(); - } - - /** - * This method is invoked whenever the value in an entry is - * overwritten by an invocation of put(k,v) for a key k that's already - * in the HashMap. - */ - void recordAccess(HashMap m) { - } - - /** - * This method is invoked whenever the entry is - * removed from the table. - */ - void recordRemoval(HashMap m) { - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/ConstantMap.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/ConstantMap.java deleted file mode 100644 index 53054d71d9..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/ConstantMap.java +++ /dev/null @@ -1,93 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ - -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.Set; - -/** - * An implementation of Map that returns the constructor-supplied - * value for any input. - * - * @param the key type - * @param the value type - */ -public class ConstantMap implements Map { - - private Map delegate; - - /** - * Creates an instance whose {@code get} method always returns {@code value}. - */ - public ConstantMap(V value) { - delegate = Collections.unmodifiableMap(Collections.singletonMap(null, value)); - } - - public V get(Object key) { - return delegate.get(null); - } - - public void clear() { - delegate.clear(); - } - - public boolean containsKey(Object key) { - return true; - } - - public boolean containsValue(Object value) { - return delegate.containsValue(value); - } - - public Set> entrySet() { - return delegate.entrySet(); - } - - @Override - public boolean equals(Object o) { - return delegate.equals(o); - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - public boolean isEmpty() { - return delegate.isEmpty(); - } - - public Set keySet() { - return delegate.keySet(); - } - - public V put(K key, V value) { - return delegate.put(key, value); - } - - public void putAll(Map t) { - delegate.putAll(t); - } - - public V remove(Object key) { - return delegate.remove(key); - } - - public int size() { - return delegate.size(); - } - - public Collection values() { - return delegate.values(); - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java deleted file mode 100644 index 84eadc0735..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Feb 18, 2004 - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; -import java.util.Iterator; - -/** - * A utility class for calculating properties of discrete distributions. - * Generally, these distributions are represented as arrays of - * double values, which are assumed to be normalized - * such that the entries in a single array sum to 1. - * - * @author Joshua O'Madadhain - */ -public class DiscreteDistribution -{ - - /** - * Returns the Kullback-Leibler divergence between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of - * dist[i] * Math.log(dist[i] / reference[i]). - * Note that this value is not symmetric; see - * symmetricKL for a symmetric variant. - * @see #symmetricKL(double[], double[]) - */ - public static double KullbackLeibler(double[] dist, double[] reference) - { - double distance = 0; - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0 && reference[i] > 0) - distance += dist[i] * Math.log(dist[i] / reference[i]); - } - return distance; - } - - /** - * Returns KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist). - * @see #KullbackLeibler(double[], double[]) - */ - public static double symmetricKL(double[] dist, double[] reference) - { - return KullbackLeibler(dist, reference) - + KullbackLeibler(reference, dist); - } - - /** - * Returns the squared difference between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of the square of - * (dist[i] - reference[i]). - */ - public static double squaredError(double[] dist, double[] reference) - { - double error = 0; - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - double difference = dist[i] - reference[i]; - error += difference * difference; - } - return error; - } - - /** - * Returns the cosine distance between the two - * specified distributions, which must have the same number - * of elements. The distributions are treated as vectors - * in dist.length-dimensional space. - * Given the following definitions - *
          - *
        • v = the sum over all i of dist[i] * dist[i] - *
        • w = the sum over all i of reference[i] * reference[i] - *
        • vw = the sum over all i of dist[i] * reference[i] - *
        - * the value returned is defined as vw / (Math.sqrt(v) * Math.sqrt(w)). - */ - public static double cosine(double[] dist, double[] reference) - { - double v_prod = 0; // dot product x*x - double w_prod = 0; // dot product y*y - double vw_prod = 0; // dot product x*y - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - vw_prod += dist[i] * reference[i]; - v_prod += dist[i] * dist[i]; - w_prod += reference[i] * reference[i]; - } - // cosine distance between v and w - return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod)); - } - - /** - * Returns the entropy of this distribution. - * High entropy indicates that the distribution is - * close to uniform; low entropy indicates that the - * distribution is close to a Dirac delta (i.e., if - * the probability mass is concentrated at a single - * point, this method returns 0). Entropy is defined as - * the sum over all i of - * -(dist[i] * Math.log(dist[i])) - */ - public static double entropy(double[] dist) - { - double total = 0; - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0) - total += dist[i] * Math.log(dist[i]); - } - return -total; - } - - /** - * Throws an IllegalArgumentException if the two arrays are not of the same length. - */ - protected static void checkLengths(double[] dist, double[] reference) - { - if (dist.length != reference.length) - throw new IllegalArgumentException("Arrays must be of the same length"); - } - - /** - * Normalizes, with Lagrangian smoothing, the specified double - * array, so that the values sum to 1 (i.e., can be treated as probabilities). - * The effect of the Lagrangian smoothing is to ensure that all entries - * are nonzero; effectively, a value of alpha is added to each - * entry in the original array prior to normalization. - * @param counts - * @param alpha - */ - public static void normalize(double[] counts, double alpha) - { - double total_count = 0; - - for (int i = 0; i < counts.length; i++) - total_count += counts[i]; - - for (int i = 0; i < counts.length; i++) - counts[i] = (counts[i] + alpha) - / (total_count + counts.length * alpha); - } - - /** - * Returns the mean of the specified Collection of - * distributions, which are assumed to be normalized arrays of - * double values. - * @see #mean(double[][]) - */ - public static double[] mean(Collection distributions) - { - if (distributions.isEmpty()) - throw new IllegalArgumentException("Distribution collection must be non-empty"); - Iterator iter = distributions.iterator(); - double[] first = iter.next(); - double[][] d_array = new double[distributions.size()][first.length]; - d_array[0] = first; - for (int i = 1; i < d_array.length; i++) - d_array[i] = iter.next(); - - return mean(d_array); - } - - /** - * Returns the mean of the specified array of distributions, - * represented as normalized arrays of double values. - * Will throw an "index out of bounds" exception if the - * distribution arrays are not all of the same length. - */ - public static double[] mean(double[][] distributions) - { - double[] d_mean = new double[distributions[0].length]; - for (int j = 0; j < d_mean.length; j++) - d_mean[j] = 0; - - for (int i = 0; i < distributions.length; i++) - for (int j = 0; j < d_mean.length; j++) - d_mean[j] += distributions[i][j] / distributions.length; - - return d_mean; - } - -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java deleted file mode 100644 index b8a215e3d5..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java +++ /dev/null @@ -1,56 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; - -import org.apache.commons.collections15.BidiMap; -import org.apache.commons.collections15.bidimap.DualHashBidiMap; - -/** - * A class providing static methods useful for improving the - * performance of graph algorithms. - * - * @author Tom Nelson - * - */ -public class Indexer { - - /** - * Returns a BidiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param - * @param collection - * @return a bidirectional map from collection elements to 0-based indices - */ - public static BidiMap create(Collection collection) { - return create(collection, 0); - } - /** - * Returns a BidiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param - * @param collection - * @param start start index - * @return a bidirectional map from collection elements to start-based indices - */ - public static BidiMap create(Collection collection, int start) { - BidiMap map = new DualHashBidiMap(); - int i=start; - for(T t : collection) { - map.put(t,i++); - } - return map; - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java deleted file mode 100644 index 92bd45d3f4..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java +++ /dev/null @@ -1,28 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - - -/** - * An interface for algorithms that proceed iteratively. - * - */ -public interface IterativeContext -{ - /** - * Advances one step. - */ - void step(); - - /** - * Returns true if this iterative process is finished, and false otherwise. - */ - boolean done(); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java deleted file mode 100644 index fbe07f4e86..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java +++ /dev/null @@ -1,174 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - - - -/** - * Provides basic infrastructure for iterative algorithms. Services provided include: - *
          - *
        • storage of current and max iteration count
        • - *
        • framework for initialization, iterative evaluation, and finalization
        • - *
        • test for convergence
        • - *
        • etc.
        • - *
        - *

        - * Algorithms that subclass this class are typically used in the following way:
        - *

        - * FooAlgorithm foo = new FooAlgorithm(...)
        - * foo.setMaximumIterations(100); //set up conditions
        - * ...
        - * foo.evaluate(); //key method which initiates iterative process
        - * foo.getSomeResult();
        - * 
        - * - * @author Scott White (originally written by Didier Besset) - */ -public abstract class IterativeProcess implements IterativeContext { - /** - * Number of iterations performed. - */ - private int iterations; - /** - * Maximum allowed number of iterations. - */ - private int maximumIterations = 50; - /** - * Desired precision. - */ - private double desiredPrecision = Double.MIN_VALUE; - /** - * Achieved precision. - */ - private double precision; - - - /** - * Generic constructor. - */ - public IterativeProcess() { - } - - /** - * Performs the iterative process. - * Note: this method does not return anything because Java does not - * allow mixing double, int, or objects - */ - public void evaluate() { - iterations = 0; - initializeIterations(); - while (iterations++ < maximumIterations) { - step(); - precision = getPrecision(); - if (hasConverged()) - break; - } - finalizeIterations(); - } - - /** - * Evaluate the result of the current iteration. - */ - abstract public void step(); - - /** - * Perform eventual clean-up operations - * (must be implement by subclass when needed). - */ - protected void finalizeIterations() { - } - - /** - * Returns the desired precision. - */ - public double getDesiredPrecision() { - return desiredPrecision; - } - - /** - * Returns the number of iterations performed. - */ - public int getIterations() { - return iterations; - } - - /** - * Returns the maximum allowed number of iterations. - */ - public int getMaximumIterations() { - return maximumIterations; - } - - /** - * Returns the attained precision. - */ - public double getPrecision() { - return precision; - } - - /** - * @param precision the precision to set - */ - public void setPrecision(double precision) { - this.precision = precision; - } - - /** - * - * Check to see if the result has been attained. - * @return boolean - */ - public boolean hasConverged() { - return precision < desiredPrecision; - } - - public boolean done() { - return hasConverged(); - } - - /** - * Initializes internal parameters to start the iterative process. - */ - protected void initializeIterations() { - } - - /** - * - */ - public void reset() { - } - - /** - * @return double - * @param epsilon double - * @param x double - */ - public double relativePrecision(double epsilon, double x) { - return x > desiredPrecision ? epsilon / x: epsilon; - } - - /** - * Defines the desired precision. - */ - public void setDesiredPrecision(double prec) throws IllegalArgumentException { - if (prec <= 0) - throw new IllegalArgumentException("Non-positive precision: " + prec); - desiredPrecision = prec; - } - - /** - * Defines the maximum allowed number of iterations. - */ - public void setMaximumIterations(int maxIter) throws IllegalArgumentException { - if (maxIter < 1) - throw new IllegalArgumentException("Non-positive maximum iteration: " + maxIter); - maximumIterations = maxIter; - } -} \ No newline at end of file diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java deleted file mode 100644 index dce550f477..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Aug 9, 2004 - * - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Random; -import java.util.Set; - - - -/** - * Groups items into a specified number of clusters, based on their proximity in - * d-dimensional space, using the k-means algorithm. Calls to - * cluster will terminate when either of the two following - * conditions is true: - *
          - *
        • the number of iterations is > max_iterations - *
        • none of the centroids has moved as much as convergence_threshold - * since the previous iteration - *
        - * - * @author Joshua O'Madadhain - */ -public class KMeansClusterer -{ - protected int max_iterations; - protected double convergence_threshold; - protected Random rand; - - /** - * Creates an instance whose termination conditions are set according - * to the parameters. - */ - public KMeansClusterer(int max_iterations, double convergence_threshold) - { - this.max_iterations = max_iterations; - this.convergence_threshold = convergence_threshold; - this.rand = new Random(); - } - - /** - * Creates an instance with max iterations of 100 and convergence threshold - * of 0.001. - */ - public KMeansClusterer() - { - this(100, 0.001); - } - - /** - * Returns the maximum number of iterations. - */ - public int getMaxIterations() - { - return max_iterations; - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int max_iterations) - { - if (max_iterations < 0) - throw new IllegalArgumentException("max iterations must be >= 0"); - - this.max_iterations = max_iterations; - } - - /** - * Returns the convergence threshold. - */ - public double getConvergenceThreshold() - { - return convergence_threshold; - } - - /** - * Sets the convergence threshold. - * @param convergence_threshold - */ - public void setConvergenceThreshold(double convergence_threshold) - { - if (convergence_threshold <= 0) - throw new IllegalArgumentException("convergence threshold " + - "must be > 0"); - - this.convergence_threshold = convergence_threshold; - } - - /** - * Returns a Collection of clusters, where each cluster is - * represented as a Map of Objects to locations - * in d-dimensional space. - * @param object_locations a map of the Objects to cluster, to - * double arrays that specify their locations in d-dimensional space. - * @param num_clusters the number of clusters to create - * @throws NotEnoughClustersException - */ - @SuppressWarnings("unchecked") - public Collection> cluster(Map object_locations, int num_clusters) - { - if (object_locations == null || object_locations.isEmpty()) - throw new IllegalArgumentException("'objects' must be non-empty"); - - if (num_clusters < 2 || num_clusters > object_locations.size()) - throw new IllegalArgumentException("number of clusters " + - "must be >= 2 and <= number of objects (" + - object_locations.size() + ")"); - - - Set centroids = new HashSet(); - - Object[] obj_array = object_locations.keySet().toArray(); - Set tried = new HashSet(); - - // create the specified number of clusters - while (centroids.size() < num_clusters && tried.size() < object_locations.size()) - { - T o = (T)obj_array[(int)(rand.nextDouble() * obj_array.length)]; - tried.add(o); - double[] mean_value = object_locations.get(o); - boolean duplicate = false; - for (double[] cur : centroids) - { - if (Arrays.equals(mean_value, cur)) - duplicate = true; - } - if (!duplicate) - centroids.add(mean_value); - } - - if (tried.size() >= object_locations.size()) - throw new NotEnoughClustersException(); - - // put items in their initial clusters - Map> clusterMap = assignToClusters(object_locations, centroids); - - // keep reconstituting clusters until either - // (a) membership is stable, or - // (b) number of iterations passes max_iterations, or - // (c) max movement of any centroid is <= convergence_threshold - int iterations = 0; - double max_movement = Double.POSITIVE_INFINITY; - while (iterations++ < max_iterations && max_movement > convergence_threshold) - { - max_movement = 0; - Set new_centroids = new HashSet(); - // calculate new mean for each cluster - for (Map.Entry> entry : clusterMap.entrySet()) - { - double[] centroid = entry.getKey(); - Map elements = entry.getValue(); - ArrayList locations = new ArrayList(elements.values()); - - double[] mean = DiscreteDistribution.mean(locations); - max_movement = Math.max(max_movement, - Math.sqrt(DiscreteDistribution.squaredError(centroid, mean))); - new_centroids.add(mean); - } - - // TODO: check membership of clusters: have they changed? - - // regenerate cluster membership based on means - clusterMap = assignToClusters(object_locations, new_centroids); - } - return clusterMap.values(); - } - - /** - * Assigns each object to the cluster whose centroid is closest to the - * object. - * @param object_locations a map of objects to locations - * @param centroids the centroids of the clusters to be formed - * @return a map of objects to assigned clusters - */ - protected Map> assignToClusters(Map object_locations, Set centroids) - { - Map> clusterMap = new HashMap>(); - for (double[] centroid : centroids) - clusterMap.put(centroid, new HashMap()); - - for (Map.Entry object_location : object_locations.entrySet()) - { - T object = object_location.getKey(); - double[] location = object_location.getValue(); - - // find the cluster with the closest centroid - Iterator c_iter = centroids.iterator(); - double[] closest = c_iter.next(); - double distance = DiscreteDistribution.squaredError(location, closest); - - while (c_iter.hasNext()) - { - double[] centroid = c_iter.next(); - double dist_cur = DiscreteDistribution.squaredError(location, centroid); - if (dist_cur < distance) - { - distance = dist_cur; - closest = centroid; - } - } - clusterMap.get(closest).put(object, location); - } - - return clusterMap; - } - - /** - * Sets the seed used by the internal random number generator. - * Enables consistent outputs. - */ - public void setSeed(int random_seed) - { - this.rand = new Random(random_seed); - } - - /** - * An exception that indicates that the specified data points cannot be - * clustered into the number of clusters requested by the user. - * This will happen if and only if there are fewer distinct points than - * requested clusters. (If there are fewer total data points than - * requested clusters, IllegalArgumentException will be thrown.) - * - * @author Joshua O'Madadhain - */ - @SuppressWarnings("serial") - public static class NotEnoughClustersException extends RuntimeException - { - @Override - public String getMessage() - { - return "Not enough distinct points in the input data set to form " + - "the requested number of clusters"; - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java deleted file mode 100644 index bd00a82810..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * - * Created on Oct 29, 2003 - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.AbstractCollection; -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Queue; -import java.util.Vector; - -import org.apache.commons.collections15.IteratorUtils; - -/** - * An array-based binary heap implementation of a priority queue, - * which also provides - * efficient update() and contains operations. - * It contains extra infrastructure (a hash table) to keep track of the - * position of each element in the array; thus, if the key value of an element - * changes, it may be "resubmitted" to the heap via update - * so that the heap can reposition it efficiently, as necessary. - * - * @author Joshua O'Madadhain - */ -public class MapBinaryHeap - extends AbstractCollection - implements Queue -{ - private Vector heap = new Vector(); // holds the heap as an implicit binary tree - private Map object_indices = new HashMap(); // maps each object in the heap to its index in the heap - private Comparator comp; - private final static int TOP = 0; // the index of the top of the heap - - /** - * Creates a MapBinaryHeap whose heap ordering - * is based on the ordering of the elements specified by c. - */ - public MapBinaryHeap(Comparator comp) - { - initialize(comp); - } - - /** - * Creates a MapBinaryHeap whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - */ - public MapBinaryHeap() - { - initialize(new ComparableComparator()); - } - - /** - * Creates a MapBinaryHeap based on the specified - * collection whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - */ - public MapBinaryHeap(Collection c) - { - this(); - addAll(c); - } - - /** - * Creates a MapBinaryHeap based on the specified collection - * whose heap ordering - * is based on the ordering of the elements specified by c. - */ - public MapBinaryHeap(Collection c, Comparator comp) - { - this(comp); - addAll(c); - } - - private void initialize(Comparator comp) - { - this.comp = comp; - clear(); - } - - /** - * @see Collection#clear() - */ - @Override - public void clear() - { - object_indices.clear(); - heap.clear(); - } - - /** - * Inserts o into this collection. - */ - @Override - public boolean add(T o) - { - int i = heap.size(); // index 1 past the end of the heap - heap.setSize(i+1); - percolateUp(i, o); - return true; - } - - /** - * Returns true if this collection contains no elements, and - * false otherwise. - */ - @Override - public boolean isEmpty() - { - return heap.isEmpty(); - } - - /** - * Returns the element at the top of the heap; does not - * alter the heap. - */ - public T peek() - { - if (heap.size() > 0) - return heap.elementAt(TOP); - else - return null; - } - - /** - * Removes the element at the top of this heap, and returns it. - * @deprecated Use {@link MapBinaryHeap#poll()} - * or {@link MapBinaryHeap#remove()} instead. - */ - @Deprecated - public T pop() throws NoSuchElementException - { - return this.remove(); - } - - /** - * Returns the size of this heap. - */ - @Override - public int size() - { - return heap.size(); - } - - /** - * Informs the heap that this object's internal key value has been - * updated, and that its place in the heap may need to be shifted - * (up or down). - * @param o - */ - public void update(T o) - { - // Since we don't know whether the key value increased or - // decreased, we just percolate up followed by percolating down; - // one of the two will have no effect. - - int cur = object_indices.get(o).intValue(); // current index - int new_idx = percolateUp(cur, o); - percolateDown(new_idx); - } - - /** - * @see Collection#contains(java.lang.Object) - */ - @Override - public boolean contains(Object o) - { - return object_indices.containsKey(o); - } - - /** - * Moves the element at position cur closer to - * the bottom of the heap, or returns if no further motion is - * necessary. Calls itself recursively if further motion is - * possible. - */ - private void percolateDown(int cur) - { - int left = lChild(cur); - int right = rChild(cur); - int smallest; - - if ((left < heap.size()) && - (comp.compare(heap.elementAt(left), heap.elementAt(cur)) < 0)) { - smallest = left; - } else { - smallest = cur; - } - - if ((right < heap.size()) && - (comp.compare(heap.elementAt(right), heap.elementAt(smallest)) < 0)) { - smallest = right; - } - - if (cur != smallest) - { - swap(cur, smallest); - percolateDown(smallest); - } - } - - /** - * Moves the element o at position cur - * as high as it can go in the heap. Returns the new position of the - * element in the heap. - */ - private int percolateUp(int cur, T o) - { - int i = cur; - - while ((i > TOP) && (comp.compare(heap.elementAt(parent(i)), o) > 0)) - { - T parentElt = heap.elementAt(parent(i)); - heap.setElementAt(parentElt, i); - object_indices.put(parentElt, new Integer(i)); // reset index to i (new location) - i = parent(i); - } - - // place object in heap at appropriate place - object_indices.put(o, new Integer(i)); - heap.setElementAt(o, i); - - return i; - } - - /** - * Returns the index of the left child of the element at - * index i of the heap. - * @param i - * @return the index of the left child of the element at - * index i of the heap - */ - private int lChild(int i) - { - return (i<<1) + 1; - } - - /** - * Returns the index of the right child of the element at - * index i of the heap. - * @param i - * @return the index of the right child of the element at - * index i of the heap - */ - private int rChild(int i) - { - return (i<<1) + 2; - } - - /** - * Returns the index of the parent of the element at - * index i of the heap. - * @param i - * @return the index of the parent of the element at index i of the heap - */ - private int parent(int i) - { - return (i-1)>>1; - } - - /** - * Swaps the positions of the elements at indices i - * and j of the heap. - * @param i - * @param j - */ - private void swap(int i, int j) - { - T iElt = heap.elementAt(i); - T jElt = heap.elementAt(j); - - heap.setElementAt(jElt, i); - object_indices.put(jElt, new Integer(i)); - - heap.setElementAt(iElt, j); - object_indices.put(iElt, new Integer(j)); - } - - /** - * Comparator used if none is specified in the constructor. - * @author Joshua O'Madadhain - */ - private class ComparableComparator implements Comparator - { - /** - * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) - */ - @SuppressWarnings("unchecked") - public int compare(T arg0, T arg1) - { - if (!(arg0 instanceof Comparable) || !(arg1 instanceof Comparable)) - throw new IllegalArgumentException("Arguments must be Comparable"); - - return ((Comparable)arg0).compareTo(arg1); - } - } - - /** - * Returns an Iterator that does not support modification - * of the heap. - */ - @Override - public Iterator iterator() - { - return IteratorUtils.unmodifiableIterator(heap.iterator()); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean remove(Object o) - { - throw new UnsupportedOperationException(); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean removeAll(Collection c) - { - throw new UnsupportedOperationException(); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean retainAll(Collection c) - { - throw new UnsupportedOperationException(); - } - - public T element() throws NoSuchElementException - { - T top = this.peek(); - if (top == null) - throw new NoSuchElementException(); - return top; - } - - public boolean offer(T o) - { - return add(o); - } - - public T poll() - { - T top = this.peek(); - if (top != null) - { - T bottom_elt = heap.lastElement(); - heap.setElementAt(bottom_elt, TOP); - object_indices.put(bottom_elt, new Integer(TOP)); - - heap.setSize(heap.size() - 1); // remove the last element - if (heap.size() > 1) - percolateDown(TOP); - - object_indices.remove(top); - } - return top; - } - - public T remove() - { - T top = this.poll(); - if (top == null) - throw new NoSuchElementException(); - return top; - } - -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java deleted file mode 100644 index 1aa7d50709..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Created on Aug 5, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Map; - - -/** - * A SettableTransformer that operates on an underlying Map instance. - * Similar to MapTransformer. - * - * @author Joshua O'Madadhain - */ -public class MapSettableTransformer implements SettableTransformer -{ - protected Map map; - - /** - * Creates an instance based on m. - */ - public MapSettableTransformer(Map m) - { - this.map = m; - } - - public O transform(I input) - { - return map.get(input); - } - - public void set(I input, O output) - { - map.put(input, output); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java deleted file mode 100644 index a92c3b8d43..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java +++ /dev/null @@ -1,23 +0,0 @@ -package edu.uci.ics.jung.algorithms.util; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * A Predicate that returns true if the input edge's - * endpoints in the input graph are identical. (Thus, an edge which connects - * its sole incident vertex to itself). - * - * @param - * @param - */ -public class SelfLoopEdgePredicate implements Predicate,E>> { - - public boolean evaluate(Context,E> context) { - Pair endpoints = context.graph.getEndpoints(context.element); - return endpoints.getFirst().equals(endpoints.getSecond()); - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java deleted file mode 100644 index 5e5168abf1..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Created on Aug 5, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.util; - -import org.apache.commons.collections15.Transformer; - -/** - * An interface for classes that can set the value to be returned (from transform()) - * when invoked on a given input. - * - * @author Joshua O'Madadhain - */ -public interface SettableTransformer extends Transformer -{ - /** - * Sets the value (output) to be returned by a call to - * transform(input)). - * @param input - * @param output - */ - public void set(I input, O output); -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java deleted file mode 100644 index d9590b26a6..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java +++ /dev/null @@ -1,193 +0,0 @@ -/** - * Copyright (c) 2009, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jan 8, 2009 - * - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Random; - -/** - * Selects items according to their probability in an arbitrary probability - * distribution. The distribution is specified by a {@code Map} from - * items (of type {@code T}) to weights of type {@code Number}, supplied - * to the constructor; these weights are normalized internally to act as - * probabilities. - * - *

        This implementation selects items in O(1) time, and requires O(n) space. - * - * @author Joshua O'Madadhain - */ -public class WeightedChoice -{ - private List item_pairs; - private Random random; - - /** - * The default minimum value that is treated as a valid probability - * (as opposed to rounding error from floating-point operations). - */ - public static final double DEFAULT_THRESHOLD = 0.00000000001; - - /** - * Equivalent to {@code this(item_weights, new Random(), DEFAULT_THRESHOLD)}. - * @param item_weights - */ - public WeightedChoice(Map item_weights) - { - this(item_weights, new Random(), DEFAULT_THRESHOLD); - } - - /** - * Equivalent to {@code this(item_weights, new Random(), threshold)}. - */ - public WeightedChoice(Map item_weights, double threshold) - { - this(item_weights, new Random(), threshold); - } - - /** - * Equivalent to {@code this(item_weights, random, DEFAULT_THRESHOLD)}. - */ - public WeightedChoice(Map item_weights, Random random) - { - this(item_weights, random, DEFAULT_THRESHOLD); - } - - /** - * Creates an instance with the specified mapping from items to weights, - * random number generator, and threshold value. - * - *

        The mapping defines the weight for each item to be selected; this - * will be proportional to the probability of its selection. - *

        The random number generator specifies the mechanism which will be - * used to provide uniform integer and double values. - *

        The threshold indicates default minimum value that is treated as a valid - * probability (as opposed to rounding error from floating-point operations). - */ - public WeightedChoice(Map item_weights, Random random, - double threshold) - { - if (item_weights.isEmpty()) - throw new IllegalArgumentException("Item weights must be non-empty"); - - int item_count = item_weights.size(); - item_pairs = new ArrayList(item_count); - - double sum = 0; - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Weights must be > 0"); - sum += value; - } - double bucket_weight = 1.0 / item_weights.size(); - - Queue light_weights = new LinkedList(); - Queue heavy_weights = new LinkedList(); - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue() / sum; - enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); - } - - // repeat until both queues empty - while (!heavy_weights.isEmpty() || !light_weights.isEmpty()) - { - ItemPair heavy_item = heavy_weights.poll(); - ItemPair light_item = light_weights.poll(); - double light_weight = 0; - T light = null; - T heavy = null; - if (light_item != null) - { - light_weight = light_item.weight; - light = light_item.light; - } - if (heavy_item != null) - { - heavy = heavy_item.heavy; - // put the 'left over' weight from the heavy item--what wasn't - // needed to make up the difference between the light weight and - // 1/n--back in the appropriate queue - double new_weight = heavy_item.weight - (bucket_weight - light_weight); - if (new_weight > threshold) - enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights); - } - light_weight *= item_count; - - item_pairs.add(new ItemPair(light, heavy, light_weight)); - } - - this.random = random; - } - - /** - * Adds key/value to the appropriate queue. Keys with values less than - * the threshold get added to {@code light_weights}, all others get added - * to {@code heavy_weights}. - */ - private void enqueueItem(T key, double value, double threshold, - Queue light_weights, Queue heavy_weights) - { - if (value < threshold) - light_weights.offer(new ItemPair(key, null, value)); - else - heavy_weights.offer(new ItemPair(null, key, value)); - } - - /** - * Sets the seed used by the internal random number generator. - */ - public void setRandomSeed(long seed) - { - this.random.setSeed(seed); - } - - /** - * Retrieves an item with probability proportional to its weight in the - * {@code Map} provided in the input. - */ - public T nextItem() - { - ItemPair item_pair = item_pairs.get(random.nextInt(item_pairs.size())); - if (random.nextDouble() < item_pair.weight) - return item_pair.light; - return item_pair.heavy; - } - - /** - * Manages light object/heavy object/light conditional probability tuples. - */ - private class ItemPair - { - T light; - T heavy; - double weight; - - private ItemPair(T light, T heavy, double weight) - { - this.light = light; - this.heavy = heavy; - this.weight = weight; - } - - @Override - public String toString() - { - return String.format("[L:%s, H:%s, %.3f]", light, heavy, weight); - } - } -} diff --git a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/package.html b/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/package.html deleted file mode 100644 index 58c5f591f0..0000000000 --- a/third-party/net.sf.jung2/src/main/java/edu/uci/ics/jung/algorithms/util/package.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - -Provides general algorithmic utilities. These include: -

          -
        • DiscreteDistribution: calculates statistical measures on -discrete probability distributions represented as double arrays -
        • KMeansClusterer: uses the k-means algorithm to cluster -points in d-dimensional space into k clusters -
        • MapBinaryHeap: a binary heap implementation that permits -efficient element access and update operations -
        • RandomLocationTransformer: a class that randomly assigns -2D coordinates to items (default initializer for iterative Layouts) -
        • SettableTransformer: an extension of Transformer -that allows mutation of the transformation -
        - - - diff --git a/third-party/openflowj/LICENSE b/third-party/openflowj/LICENSE deleted file mode 100644 index ee6da46abe..0000000000 --- a/third-party/openflowj/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior -University - -We are making the OpenFlow specification and associated documentation -(Software) available for public use and benefit with the expectation that -others will use, modify and enhance the Software and contribute those -enhancements back to the community. However, since we would like to make the -Software available for broadest use, with as few restrictions as possible -permission is hereby granted, free of charge, to any person obtaining a copy of -this Software to deal in the Software under the copyrights without restriction, -including without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to permit -persons to whom the Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -The name and trademarks of copyright holder(s) may NOT be used in advertising -or publicity pertaining to the Software or any derivatives without specific, -written prior permission. diff --git a/third-party/openflowj/Makefile b/third-party/openflowj/Makefile deleted file mode 100644 index b803071a3e..0000000000 --- a/third-party/openflowj/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -# Because I am old and crotchety and my fingers can't stop from running -# `make` commands -all: - ant - -run: - ant run - -doc: - ant javadoc - -tests: - ant tests - -count: - @find . -name \*.java | xargs wc -l | sort -n - -clean: - ant clean diff --git a/third-party/openflowj/README b/third-party/openflowj/README deleted file mode 100644 index 6fffebef0b..0000000000 --- a/third-party/openflowj/README +++ /dev/null @@ -1,16 +0,0 @@ -OpenFlow Java - v1.0.0 - -A Java implementation of low-level OpenFlow packet marshalling/unmarshalling -and IO operations. Implements v1.0 of the OpenFlow specification at -http://www.openflow.org. - - - David Erickson (daviderickson@cs.stanford.edu) - - Rob Sherwood (rob.sherwood@stanford.edu) - -Building requires Maven 2.x+ (http://maven.apache.org/). - -To build: - mvn package - -To build javadocs: - mvn javadoc:javadoc diff --git a/third-party/openflowj/eclipse_codestyle.xml b/third-party/openflowj/eclipse_codestyle.xml deleted file mode 100644 index 6b661c671d..0000000000 --- a/third-party/openflowj/eclipse_codestyle.xml +++ /dev/null @@ -1,269 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/third-party/openflowj/lib/commons-cli-1.2.jar b/third-party/openflowj/lib/commons-cli-1.2.jar deleted file mode 100644 index ce4b9fffe4..0000000000 Binary files a/third-party/openflowj/lib/commons-cli-1.2.jar and /dev/null differ diff --git a/third-party/openflowj/lib/junit-4.8.1.jar b/third-party/openflowj/lib/junit-4.8.1.jar deleted file mode 100644 index 524cd65ce5..0000000000 Binary files a/third-party/openflowj/lib/junit-4.8.1.jar and /dev/null differ diff --git a/third-party/openflowj/pom.xml b/third-party/openflowj/pom.xml deleted file mode 100644 index 9e04a70b83..0000000000 --- a/third-party/openflowj/pom.xml +++ /dev/null @@ -1,140 +0,0 @@ - - 4.0.0 - org.opendaylight.controller.thirdparty - org.openflow.openflowj - 1.1.0-SNAPSHOT - OpenFlow Java - A Java implemention of the OpenFlow v1.0 protocol - - - - org.opendaylight.controller - commons.thirdparty - 1.2.0-SNAPSHOT - ../commons/thirdparty - - - - - David Erickson - daviderickson@cs.stanford.edu - - - Rob Sherwood - rob.sherwood@stanford.edu - - - bundle - http://www.openflow.org - - - The OpenFlow License - http://www.openflowswitch.org/wp/legal/ - repo - - - - scm:git:ssh://git.opendaylight.org:29418/controller.git - scm:git:ssh://git.opendaylight.org:29418/controller.git - https://wiki.opendaylight.org/view/OpenDaylight_Controller:Main - HEAD - - - UTF-8 - - - - - release-sign-artifacts - - - - - - - - - - - org.apache.maven.plugins - maven-gpg-plugin - 1.1 - - - sign-artifacts - verify - - sign - - - - - forked-path - - - - - - - - - - - org.apache.felix - maven-bundle-plugin - 2.3.6 - true - - - - org.openflow.example;version="1.0.1"; - uses:="org.openflow.example.cli, - org.openflow.protocol, - org.openflow.io, - org.openflow.protocol.factory", - org.openflow.io;version="1.0.1"; - uses:="org.openflow.protocol, - org.openflow.protocol.factory", - org.openflow.protocol;version="1.0.1"; - uses:="org.openflow.protocol.statistics, - org.openflow.protocol, - org.openflow.protocol.factory", - org.openflow.protocol.action;version="1.0.1"; - uses:="org.openflow.protocol", - org.openflow.protocol.factory;version="1.0.1"; - uses:="org.openflow.protocol.statistics, - org.openflow.protocol, - org.openflow.protocol.action, - org.openflow.protocol.queue", - org.openflow.protocol.queue;version="1.0.2"; - uses:="org.openflow.protocol, - org.openflow.protocol.factory", - org.openflow.protocol.statistics;version="1.0.1"; - uses:="org.openflow.protocol, - org.openflow.protocol.factory", - org.openflow.util;version="1.0.1" - - - ${project.basedir}/META-INF - - - - org.apache.maven.plugins - maven-compiler-plugin - 2.5.1 - - 1.7 - 1.7 - - - - - - - junit - junit - 4.8.1 - test - - - diff --git a/third-party/openflowj/src/main/java/org/openflow/example/SelectListener.java b/third-party/openflowj/src/main/java/org/openflow/example/SelectListener.java deleted file mode 100644 index 16fa109c82..0000000000 --- a/third-party/openflowj/src/main/java/org/openflow/example/SelectListener.java +++ /dev/null @@ -1,21 +0,0 @@ -/** - * - */ -package org.openflow.example; - -import java.io.IOException; -import java.nio.channels.SelectionKey; - -/** - * @author Rob Sherwood (rob.sherwood@stanford.edu) - * - */ -public interface SelectListener { - /** - * Tell the select listener that an event took place on the passed object - * @param key the key used on the select - * @param arg some parameter passed by the caller when registering - * @throws IOException - */ - void handleEvent(SelectionKey key, Object arg) throws IOException; -} diff --git a/third-party/openflowj/src/main/java/org/openflow/example/SelectLoop.java b/third-party/openflowj/src/main/java/org/openflow/example/SelectLoop.java deleted file mode 100644 index b7927d876e..0000000000 --- a/third-party/openflowj/src/main/java/org/openflow/example/SelectLoop.java +++ /dev/null @@ -1,156 +0,0 @@ -package org.openflow.example; - -import java.io.IOException; -import java.nio.channels.CancelledKeyException; -import java.nio.channels.ClosedChannelException; -import java.nio.channels.SelectableChannel; -import java.nio.channels.SelectionKey; -import java.nio.channels.Selector; -import java.nio.channels.spi.SelectorProvider; -import java.util.Iterator; -import java.util.Queue; -import java.util.concurrent.ConcurrentLinkedQueue; - -/*** - * Dirt simple SelectLoop for simple java controller - */ - - -public class SelectLoop { - protected SelectListener callback; - protected boolean dontStop; - protected Object registrationLock; - protected int registrationRequests = 0; - protected Queue registrationQueue; - protected Selector selector; - protected long timeout; - - public SelectLoop(SelectListener cb) throws IOException { - callback = cb; - dontStop = true; - selector = SelectorProvider.provider().openSelector(); - registrationLock = new Object(); - registrationQueue = new ConcurrentLinkedQueue(); - timeout = 0; - } - - /** - * Initializes this SelectLoop - * @param cb the callback to call when select returns - * @param timeout the timeout value in milliseconds that select will be - * called with - * @throws IOException - */ - public SelectLoop(SelectListener cb, long timeout) throws IOException { - callback = cb; - dontStop = true; - selector = SelectorProvider.provider().openSelector(); - registrationLock = new Object(); - registrationQueue = new ConcurrentLinkedQueue(); - this.timeout = timeout; - } - - public void register(SelectableChannel ch, int ops, Object arg) - throws ClosedChannelException { - registrationQueue.add(new Object[] {ch, ops, arg}); - } - - /** - * Registers the supplied SelectableChannel with this SelectLoop. Note this - * method blocks until registration proceeds. It is advised that - * SelectLoop is intialized with a timeout value when using this method. - * @param ch the channel - * @param ops interest ops - * @param arg argument that will be returned with the SelectListener - * @return - * @throws ClosedChannelException - */ - public synchronized SelectionKey registerBlocking(SelectableChannel ch, int ops, Object arg) - throws ClosedChannelException { - synchronized (registrationLock) { - registrationRequests++; - } - selector.wakeup(); - SelectionKey key = ch.register(selector, ops, arg); - synchronized (registrationLock) { - registrationRequests--; - registrationLock.notifyAll(); - } - return key; - } - - /**** - * Main top-level IO loop this dispatches all IO events and timer events - * together I believe this is fairly efficient - */ - public void doLoop() throws IOException { - int nEvents; - processRegistrationQueue(); - - while (dontStop) { - nEvents = selector.select(timeout); - if (nEvents > 0) { - for (Iterator i = selector.selectedKeys() - .iterator(); i.hasNext();) { - SelectionKey sk = i.next(); - i.remove(); - - if (!sk.isValid()) - continue; - - Object arg = sk.attachment(); - callback.handleEvent(sk, arg); - } - } - - if (this.registrationQueue.size() > 0) - processRegistrationQueue(); - - if (registrationRequests > 0) { - synchronized (registrationLock) { - while (registrationRequests > 0) { - try { - registrationLock.wait(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - } - } - } - - protected void processRegistrationQueue() { - // add any elements in queue - for (Iterator it = registrationQueue.iterator(); it.hasNext();) { - Object[] args = it.next(); - SelectableChannel ch = (SelectableChannel) args[0]; - try { - ch.register(selector, (Integer) args[1], args[2]); - } catch (CancelledKeyException cke) { - continue; - } catch (ClosedChannelException e) { - } - it.remove(); - } - } - - /** - * Force this select loop to return immediately and re-enter select, useful - * for example if a new item has been added to the select loop while it - * was already blocked. - */ - public void wakeup() { - if (selector != null) { - selector.wakeup(); - } - } - - /** - * Shuts down this select loop, may return before it has fully shutdown - */ - public void shutdown() { - this.dontStop = false; - wakeup(); - } -} diff --git a/third-party/openflowj/src/main/java/org/openflow/example/SimpleController.java b/third-party/openflowj/src/main/java/org/openflow/example/SimpleController.java deleted file mode 100644 index e18e2f5fca..0000000000 --- a/third-party/openflowj/src/main/java/org/openflow/example/SimpleController.java +++ /dev/null @@ -1,321 +0,0 @@ -/** - * - */ -package org.openflow.example; - -import java.io.IOException; -import java.net.InetAddress; -import java.nio.channels.SelectionKey; -import java.nio.channels.ServerSocketChannel; -import java.nio.channels.SocketChannel; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.openflow.example.cli.Options; -import org.openflow.example.cli.ParseException; -import org.openflow.example.cli.SimpleCLI; -import org.openflow.io.OFMessageAsyncStream; -import org.openflow.protocol.OFEchoReply; -import org.openflow.protocol.OFFlowMod; -import org.openflow.protocol.OFMatch; -import org.openflow.protocol.OFMessage; -import org.openflow.protocol.OFPacketIn; -import org.openflow.protocol.OFPacketOut; -import org.openflow.protocol.OFPort; -import org.openflow.protocol.OFType; -import org.openflow.protocol.action.OFAction; -import org.openflow.protocol.action.OFActionOutput; -import org.openflow.protocol.factory.BasicFactory; -import org.openflow.util.LRULinkedHashMap; -import org.openflow.util.U16; - -/** - * @author Rob Sherwood (rob.sherwood@stanford.edu), David Erickson (daviderickson@cs.stanford.edu) - * - */ -public class SimpleController implements SelectListener { - protected ExecutorService es; - protected BasicFactory factory; - protected SelectLoop listenSelectLoop; - protected ServerSocketChannel listenSock; - protected List switchSelectLoops; - protected Map switchSockets; - protected Integer threadCount; - protected int port; - - protected class OFSwitch { - protected SocketChannel sock; - protected OFMessageAsyncStream stream; - protected Map macTable = - new LRULinkedHashMap(64001, 64000); - - public OFSwitch(SocketChannel sock, OFMessageAsyncStream stream) { - this.sock = sock; - this.stream = stream; - } - - public void handlePacketIn(OFPacketIn pi) { - // Build the Match - OFMatch match = new OFMatch(); - match.loadFromPacket(pi.getPacketData(), pi.getInPort()); - byte[] dlDst = match.getDataLayerDestination(); - Integer dlDstKey = Arrays.hashCode(dlDst); - byte[] dlSrc = match.getDataLayerSource(); - Integer dlSrcKey = Arrays.hashCode(dlSrc); - int bufferId = pi.getBufferId(); - - // if the src is not multicast, learn it - if ((dlSrc[0] & 0x1) == 0) { - if (!macTable.containsKey(dlSrcKey) || - !macTable.get(dlSrcKey).equals(pi.getInPort())) { - macTable.put(dlSrcKey, pi.getInPort()); - } - } - - Short outPort = null; - // if the destination is not multicast, look it up - if ((dlDst[0] & 0x1) == 0) { - outPort = macTable.get(dlDstKey); - } - - // push a flow mod if we know where the packet should be going - if (outPort != null) { - OFFlowMod fm = (OFFlowMod) factory.getMessage(OFType.FLOW_MOD); - fm.setBufferId(bufferId); - fm.setCommand((short) 0); - fm.setCookie(0); - fm.setFlags((short) 0); - fm.setHardTimeout((short) 0); - fm.setIdleTimeout((short) 5); - match.setInputPort(pi.getInPort()); - match.setWildcards(0); - fm.setMatch(match); - fm.setOutPort((short) OFPort.OFPP_NONE.getValue()); - fm.setPriority((short) 0); - OFActionOutput action = new OFActionOutput(); - action.setMaxLength((short) 0); - action.setPort(outPort); - List actions = new ArrayList(); - actions.add(action); - fm.setActions(actions); - fm.setLength(U16.t(OFFlowMod.MINIMUM_LENGTH+OFActionOutput.MINIMUM_LENGTH)); - try { - stream.write(fm); - } catch (IOException e) { - e.printStackTrace(); - } - } - - // Send a packet out - if (outPort == null || pi.getBufferId() == 0xffffffff) { - OFPacketOut po = new OFPacketOut(); - po.setBufferId(bufferId); - po.setInPort(pi.getInPort()); - - // set actions - OFActionOutput action = new OFActionOutput(); - action.setMaxLength((short) 0); - action.setPort((short) ((outPort == null) ? OFPort.OFPP_FLOOD - .getValue() : outPort)); - List actions = new ArrayList(); - actions.add(action); - po.setActions(actions); - po.setActionsLength((short) OFActionOutput.MINIMUM_LENGTH); - - // set data if needed - if (bufferId == 0xffffffff) { - byte[] packetData = pi.getPacketData(); - po.setLength(U16.t(OFPacketOut.MINIMUM_LENGTH - + po.getActionsLength() + packetData.length)); - po.setPacketData(packetData); - } else { - po.setLength(U16.t(OFPacketOut.MINIMUM_LENGTH - + po.getActionsLength())); - } - try { - stream.write(po); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - - public String toString() { - InetAddress remote = sock.socket().getInetAddress(); - return remote.getHostAddress() + ":" + sock.socket().getPort(); - } - - public OFMessageAsyncStream getStream() { - return stream; - } - } - - public SimpleController(int port) throws IOException{ - listenSock = ServerSocketChannel.open(); - listenSock.configureBlocking(false); - listenSock.socket().bind(new java.net.InetSocketAddress(port)); - listenSock.socket().setReuseAddress(true); - this.port = port; - switchSelectLoops = new ArrayList(); - switchSockets = new ConcurrentHashMap(); - threadCount = 1; - listenSelectLoop = new SelectLoop(this); - // register this connection for accepting - listenSelectLoop.register(listenSock, SelectionKey.OP_ACCEPT, listenSock); - - this.factory = new BasicFactory(); - } - - @Override - public void handleEvent(SelectionKey key, Object arg) throws IOException { - if (arg instanceof ServerSocketChannel) - handleListenEvent(key, (ServerSocketChannel)arg); - else - handleSwitchEvent(key, (SocketChannel) arg); - } - - protected void handleListenEvent(SelectionKey key, ServerSocketChannel ssc) - throws IOException { - SocketChannel sock = listenSock.accept(); - OFMessageAsyncStream stream = new OFMessageAsyncStream(sock, factory); - switchSockets.put(sock, new OFSwitch(sock, stream)); - System.err - .println("Got new connection from " + switchSockets.get(sock)); - List l = new ArrayList(); - l.add(factory.getMessage(OFType.HELLO)); - l.add(factory.getMessage(OFType.FEATURES_REQUEST)); - stream.write(l); - - int ops = SelectionKey.OP_READ; - if (stream.needsFlush()) - ops |= SelectionKey.OP_WRITE; - - // hash this switch into a thread - SelectLoop sl = switchSelectLoops.get(sock.hashCode() - % switchSelectLoops.size()); - sl.register(sock, ops, sock); - // force select to return and re-enter using the new set of keys - sl.wakeup(); - } - - protected void handleSwitchEvent(SelectionKey key, SocketChannel sock) { - OFSwitch sw = switchSockets.get(sock); - OFMessageAsyncStream stream = sw.getStream(); - try { - if (key.isReadable()) { - List msgs = stream.read(); - if (msgs == null) { - key.cancel(); - switchSockets.remove(sock); - return; - } - - for (OFMessage m : msgs) { - switch (m.getType()) { - case PACKET_IN: - sw.handlePacketIn((OFPacketIn) m); - break; - case HELLO: - System.err.println("GOT HELLO from " + sw); - break; - case ECHO_REQUEST: - OFEchoReply reply = (OFEchoReply) stream - .getMessageFactory().getMessage( - OFType.ECHO_REPLY); - reply.setXid(m.getXid()); - stream.write(reply); - break; - default: - System.err.println("Unhandled OF message: " - + m.getType() + " from " - + sock.socket().getInetAddress()); - } - } - } - if (key.isWritable()) { - stream.flush(); - } - - /** - * Only register for interest in R OR W, not both, causes stream - * deadlock after some period of time - */ - if (stream.needsFlush()) - key.interestOps(SelectionKey.OP_WRITE); - else - key.interestOps(SelectionKey.OP_READ); - } catch (IOException e) { - // if we have an exception, disconnect the switch - key.cancel(); - switchSockets.remove(sock); - } - } - - public void run() throws IOException{ - System.err.println("Starting " + this.getClass().getCanonicalName() + - " on port " + this.port + " with " + this.threadCount + " threads"); - // Static number of threads equal to processor cores - es = Executors.newFixedThreadPool(threadCount); - - // Launch one select loop per threadCount and start running - for (int i = 0; i < threadCount; ++i) { - final SelectLoop sl = new SelectLoop(this); - switchSelectLoops.add(sl); - es.execute(new Runnable() { - @Override - public void run() { - try { - sl.doLoop(); - } catch (IOException e) { - e.printStackTrace(); - } - }} - ); - } - - // Start the listen loop - listenSelectLoop.doLoop(); - } - - public static void main(String [] args) throws IOException { - SimpleCLI cmd = parseArgs(args); - int port = Integer.valueOf(cmd.getOptionValue("p")); - SimpleController sc = new SimpleController(port); - sc.threadCount = Integer.valueOf(cmd.getOptionValue("t")); - sc.run(); - } - - public static SimpleCLI parseArgs(String[] args) { - Options options = new Options(); - options.addOption("h", "help", "print help"); - // unused? - // options.addOption("n", true, "the number of packets to send"); - options.addOption("p", "port", 6633, "the port to listen on"); - options.addOption("t", "threads", 1, "the number of threads to run"); - try { - SimpleCLI cmd = SimpleCLI.parse(options, args); - if (cmd.hasOption("h")) { - printUsage(options); - System.exit(0); - } - return cmd; - } catch (ParseException e) { - System.err.println(e); - printUsage(options); - } - - System.exit(-1); - return null; - } - - public static void printUsage(Options options) { - SimpleCLI.printHelp("Usage: " - + SimpleController.class.getCanonicalName() + " [options]", - options); - } -} diff --git a/third-party/openflowj/src/main/java/org/openflow/example/cli/Option.java b/third-party/openflowj/src/main/java/org/openflow/example/cli/Option.java deleted file mode 100644 index acf8446d4b..0000000000 --- a/third-party/openflowj/src/main/java/org/openflow/example/cli/Option.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.openflow.example.cli; - -public class Option { - String shortOpt; - String longOpt; - Object defaultVal; - String val; // current value of this option, string form - boolean specified; // was this option found in the cmdline? - String comment; - - /** - * Option information storrage - * - * @param shortOpt - * Short name for the option, e.g., "-p" - * @param longOpt - * Long name for option, e.g., "--port" - * @param defaultVal - * default value: "6633" or null if no default value - * @param comment - * String to print to explain this option, e.g., a help message - */ - public Option(String shortOpt, String longOpt, Object defaultVal, - String comment) { - super(); - this.shortOpt = shortOpt; - this.longOpt = longOpt; - this.defaultVal = defaultVal; - this.comment = comment; - this.specified = false; - } - - public Option(String shortOpt, String longOpt, String comment) { - this(shortOpt, longOpt, null, comment); - } - - public boolean needsArg() { - return this.defaultVal != null; - } -} diff --git a/third-party/openflowj/src/main/java/org/openflow/example/cli/Options.java b/third-party/openflowj/src/main/java/org/openflow/example/cli/Options.java deleted file mode 100644 index 7f55b50bc6..0000000000 --- a/third-party/openflowj/src/main/java/org/openflow/example/cli/Options.java +++ /dev/null @@ -1,69 +0,0 @@ -package org.openflow.example.cli; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -/** - * Very basic CLI options listing - * - * @author Rob Sherwood (rob.sherwood@stanford.edu) - * - */ - -public class Options { - Map shortOptionsMap; - Map longOptionsMap; - - public Options() { - this.shortOptionsMap = new HashMap(); - this.longOptionsMap = new HashMap(); - } - - public static Options make(Option opts[]) { - Options options = new Options(); - for (int i = 0; i < opts.length; i++) - options.addOption(opts[i]); - return options; - } - - private void addOption(Option option) { - if (option.shortOpt != null) - this.shortOptionsMap.put(option.shortOpt, option); - if (option.longOpt != null) - this.longOptionsMap.put(option.longOpt, option); - } - - protected void addOption(String shortName, String longName, Object o, - String comment) { - Option option = new Option(shortName, longName, o, comment); - addOption(option); - } - - public void addOption(String shortName, String longName, boolean b, - String comment) { - this.addOption(shortName, longName, Boolean.valueOf(b), comment); - } - - public void addOption(String shortName, String longName, int i, - String comment) { - this.addOption(shortName, longName, Integer.valueOf(i), comment); - } - - public Option getOption(String shortName) { - return this.shortOptionsMap.get(shortName); - } - - public Option getOptionByLongName(String longName) { - return this.longOptionsMap.get(longName); - } - - public Collection